aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDan Willemsen <dwillemsen@google.com>2022-03-26 03:42:10 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2022-03-26 03:42:10 +0000
commit5a8b6adcfa54d474a9ad1a772d65bf6716217993 (patch)
tree690b5fb2994f47324dd4298e676335d4555ee267
parentdf3578c2a35ac125ee592a3f800065e72f6e77c9 (diff)
parentd25f34ce2b67cf462c2936561c8873a66492532e (diff)
downloadsetuptools-5a8b6adcfa54d474a9ad1a772d65bf6716217993.tar.gz
Upgrade to setuptools 61.1.0 am: adad21eb06 am: 30dd327dfb am: d25f34ce2b
Original change: https://android-review.googlesource.com/c/platform/external/python/setuptools/+/2045746 Change-Id: Ic6335d01130b6b75885d30275ed206a098f42d25 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--Android.bp6
-rw-r--r--CHANGES.rst2850
-rw-r--r--LICENSE24
-rw-r--r--MANIFEST.in9
-rw-r--r--METADATA6
-rw-r--r--PKG-INFO130
-rw-r--r--[-rwxr-xr-x]README.rst69
-rw-r--r--_distutils_hack/__init__.py187
-rw-r--r--_distutils_hack/override.py1
-rw-r--r--bootstrap.py64
-rw-r--r--changelog.d/.gitignore0
-rw-r--r--changelog.d/README.rst93
-rw-r--r--conftest.py42
-rw-r--r--docs/Makefile75
-rw-r--r--docs/_templates/indexsidebar.html8
-rw-r--r--docs/_theme/nature/static/nature.css_t237
-rw-r--r--docs/_theme/nature/static/pygments.css54
-rw-r--r--docs/_theme/nature/theme.conf4
-rw-r--r--docs/artwork.rst119
-rw-r--r--docs/build_meta.rst176
-rw-r--r--docs/conf.py236
-rw-r--r--docs/deprecated/distutils-legacy.rst36
-rw-r--r--docs/deprecated/distutils/_setuptools_disclaimer.rst5
-rw-r--r--docs/deprecated/distutils/apiref.rst2053
-rw-r--r--docs/deprecated/distutils/builtdist.rst479
-rw-r--r--docs/deprecated/distutils/commandref.rst106
-rw-r--r--docs/deprecated/distutils/configfile.rst144
-rw-r--r--docs/deprecated/distutils/examples.rst340
-rw-r--r--docs/deprecated/distutils/extending.rst98
-rw-r--r--docs/deprecated/distutils/index.rst42
-rw-r--r--docs/deprecated/distutils/introduction.rst214
-rw-r--r--docs/deprecated/distutils/packageindex.rst16
-rw-r--r--docs/deprecated/distutils/setupscript.rst715
-rw-r--r--docs/deprecated/distutils/sourcedist.rst242
-rw-r--r--docs/deprecated/distutils/uploading.rst8
-rw-r--r--docs/deprecated/easy_install.rst (renamed from docs/easy_install.txt)567
-rw-r--r--docs/deprecated/functionalities.rst33
-rw-r--r--docs/deprecated/index.rst20
-rw-r--r--docs/deprecated/python_eggs.rst (renamed from docs/formats.txt)11
-rw-r--r--docs/development/developer-guide.rst (renamed from docs/developer-guide.txt)92
-rw-r--r--docs/development/index.rst (renamed from docs/development.txt)3
-rw-r--r--docs/development/releases.rst (renamed from docs/releases.txt)25
-rw-r--r--docs/history.rst (renamed from docs/history.txt)11
-rw-r--r--docs/index.rst31
-rw-r--r--docs/index.txt25
-rw-r--r--docs/pkg_resources.rst (renamed from docs/pkg_resources.txt)360
-rw-r--r--docs/python 2 sunset.rst69
-rw-r--r--docs/python3.txt94
-rw-r--r--docs/references/keywords.rst336
-rw-r--r--docs/requirements.txt5
-rw-r--r--docs/roadmap.rst7
-rw-r--r--docs/roadmap.txt6
-rw-r--r--docs/setuptools.rst228
-rw-r--r--docs/setuptools.txt2775
-rw-r--r--docs/userguide/commands.rst566
-rw-r--r--docs/userguide/datafiles.rst206
-rw-r--r--docs/userguide/declarative_config.rst282
-rw-r--r--docs/userguide/dependency_management.rst443
-rw-r--r--docs/userguide/development_mode.rst52
-rw-r--r--docs/userguide/distribution.rst251
-rw-r--r--docs/userguide/entry_point.rst181
-rw-r--r--docs/userguide/extension.rst250
-rw-r--r--docs/userguide/functionalities_rewrite.rst9
-rw-r--r--docs/userguide/index.rst38
-rw-r--r--docs/userguide/keywords.rst163
-rw-r--r--docs/userguide/miscellaneous.rst159
-rw-r--r--docs/userguide/package_discovery.rst576
-rw-r--r--docs/userguide/pyproject_config.rst218
-rw-r--r--docs/userguide/quickstart.rst414
-rwxr-xr-xeasy_install.py5
-rw-r--r--exercises.py6
-rw-r--r--[-rwxr-xr-x]launcher.c1
-rw-r--r--pavement.py62
-rw-r--r--pkg_resources/__init__.py498
-rw-r--r--pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/appdirs.py86
-rw-r--r--pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/importlib_resources/__init__.py36
-rw-r--r--pkg_resources/_vendor/importlib_resources/_adapters.py170
-rw-r--r--pkg_resources/_vendor/importlib_resources/_common.py104
-rw-r--r--pkg_resources/_vendor/importlib_resources/_compat.py98
-rw-r--r--pkg_resources/_vendor/importlib_resources/_itertools.py35
-rw-r--r--pkg_resources/_vendor/importlib_resources/_legacy.py121
-rw-r--r--pkg_resources/_vendor/importlib_resources/abc.py137
-rw-r--r--pkg_resources/_vendor/importlib_resources/readers.py122
-rw-r--r--pkg_resources/_vendor/importlib_resources/simple.py116
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/_compat.py19
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt1
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt1
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py102
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_contents.py43
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_files.py46
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_open.py81
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_path.py64
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_read.py76
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_reader.py128
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/test_resource.py252
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/update-zips.py53
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/util.py178
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py0
-rw-r--r--pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py0
-rw-r--r--pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/jaraco/__init__.py0
-rw-r--r--pkg_resources/_vendor/jaraco/context.py213
-rw-r--r--pkg_resources/_vendor/jaraco/functools.py525
-rw-r--r--pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt2
-rw-r--r--pkg_resources/_vendor/jaraco/text/__init__.py599
-rw-r--r--pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/more_itertools/__init__.py4
-rw-r--r--pkg_resources/_vendor/more_itertools/more.py4316
-rw-r--r--pkg_resources/_vendor/more_itertools/recipes.py698
-rw-r--r--pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/packaging/__about__.py17
-rw-r--r--pkg_resources/_vendor/packaging/__init__.py21
-rw-r--r--pkg_resources/_vendor/packaging/_compat.py30
-rw-r--r--pkg_resources/_vendor/packaging/_manylinux.py301
-rw-r--r--pkg_resources/_vendor/packaging/_musllinux.py136
-rw-r--r--pkg_resources/_vendor/packaging/_structures.py49
-rw-r--r--pkg_resources/_vendor/packaging/markers.py197
-rw-r--r--pkg_resources/_vendor/packaging/requirements.py95
-rw-r--r--pkg_resources/_vendor/packaging/specifiers.py376
-rw-r--r--pkg_resources/_vendor/packaging/tags.py487
-rw-r--r--pkg_resources/_vendor/packaging/utils.py128
-rw-r--r--pkg_resources/_vendor/packaging/version.py335
-rw-r--r--pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt18
-rw-r--r--pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/pyparsing.py96
-rw-r--r--pkg_resources/_vendor/six.py868
-rw-r--r--pkg_resources/_vendor/vendored.txt12
-rw-r--r--pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt1
-rw-r--r--pkg_resources/_vendor/zipp.py329
-rw-r--r--pkg_resources/api_tests.txt6
-rw-r--r--pkg_resources/extern/__init__.py39
-rw-r--r--pkg_resources/py31compat.py22
-rw-r--r--pkg_resources/tests/data/my-test-package-source/setup.cfg0
-rw-r--r--pkg_resources/tests/data/my-test-package-source/setup.py6
-rw-r--r--pkg_resources/tests/data/my-test-package-zip/my-test-package.zipbin0 -> 1809 bytes
-rw-r--r--pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO10
-rw-r--r--pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt7
-rw-r--r--pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt (renamed from setuptools.egg-info/zip-safe)0
-rw-r--r--pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt1
-rw-r--r--pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe1
-rw-r--r--pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.eggbin0 -> 843 bytes
-rw-r--r--pkg_resources/tests/test_find_distributions.py67
-rw-r--r--pkg_resources/tests/test_pkg_resources.py240
-rw-r--r--pkg_resources/tests/test_resources.py72
-rw-r--r--pkg_resources/tests/test_working_set.py10
-rw-r--r--pyproject.toml59
-rw-r--r--[-rwxr-xr-x]pytest.ini66
-rw-r--r--[-rwxr-xr-x]setup.cfg163
-rwxr-xr-xsetup.py192
-rw-r--r--setuptools.egg-info/PKG-INFO130
-rw-r--r--setuptools.egg-info/SOURCES.txt396
-rw-r--r--setuptools.egg-info/dependency_links.txt3
-rw-r--r--setuptools.egg-info/entry_points.txt18
-rw-r--r--setuptools.egg-info/requires.txt52
-rw-r--r--setuptools.egg-info/top_level.txt2
-rw-r--r--setuptools/__init__.py185
-rw-r--r--setuptools/_deprecation_warning.py7
-rw-r--r--setuptools/_distutils/__init__.py24
-rw-r--r--setuptools/_distutils/_collections.py56
-rw-r--r--setuptools/_distutils/_macos_compat.py12
-rw-r--r--setuptools/_distutils/_msvccompiler.py561
-rw-r--r--setuptools/_distutils/archive_util.py256
-rw-r--r--setuptools/_distutils/bcppcompiler.py393
-rw-r--r--setuptools/_distutils/ccompiler.py1123
-rw-r--r--setuptools/_distutils/cmd.py403
-rw-r--r--setuptools/_distutils/command/__init__.py31
-rw-r--r--setuptools/_distutils/command/bdist.py143
-rw-r--r--setuptools/_distutils/command/bdist_dumb.py123
-rw-r--r--setuptools/_distutils/command/bdist_msi.py749
-rw-r--r--setuptools/_distutils/command/bdist_rpm.py579
-rw-r--r--setuptools/_distutils/command/bdist_wininst.py377
-rw-r--r--setuptools/_distutils/command/build.py157
-rw-r--r--setuptools/_distutils/command/build_clib.py209
-rw-r--r--setuptools/_distutils/command/build_ext.py755
-rw-r--r--setuptools/_distutils/command/build_py.py392
-rw-r--r--setuptools/_distutils/command/build_scripts.py152
-rw-r--r--setuptools/_distutils/command/check.py148
-rw-r--r--setuptools/_distutils/command/clean.py76
-rw-r--r--setuptools/_distutils/command/config.py344
-rw-r--r--setuptools/_distutils/command/install.py781
-rw-r--r--setuptools/_distutils/command/install_data.py79
-rw-r--r--setuptools/_distutils/command/install_egg_info.py84
-rw-r--r--setuptools/_distutils/command/install_headers.py47
-rw-r--r--setuptools/_distutils/command/install_lib.py217
-rw-r--r--setuptools/_distutils/command/install_scripts.py60
-rw-r--r--setuptools/_distutils/command/py37compat.py30
-rw-r--r--setuptools/_distutils/command/register.py304
-rw-r--r--setuptools/_distutils/command/sdist.py494
-rw-r--r--setuptools/_distutils/command/upload.py214
-rw-r--r--setuptools/_distutils/command/wininst-10.0-amd64.exebin0 -> 222208 bytes
-rw-r--r--setuptools/_distutils/command/wininst-10.0.exebin0 -> 190976 bytes
-rw-r--r--setuptools/_distutils/command/wininst-14.0-amd64.exebin0 -> 587776 bytes
-rw-r--r--setuptools/_distutils/command/wininst-14.0.exebin0 -> 458240 bytes
-rw-r--r--setuptools/_distutils/command/wininst-6.0.exebin0 -> 61440 bytes
-rw-r--r--setuptools/_distutils/command/wininst-7.1.exebin0 -> 65536 bytes
-rw-r--r--setuptools/_distutils/command/wininst-8.0.exebin0 -> 61440 bytes
-rw-r--r--setuptools/_distutils/command/wininst-9.0-amd64.exebin0 -> 224256 bytes
-rw-r--r--setuptools/_distutils/command/wininst-9.0.exebin0 -> 196096 bytes
-rw-r--r--setuptools/_distutils/config.py130
-rw-r--r--setuptools/_distutils/core.py249
-rw-r--r--setuptools/_distutils/cygwinccompiler.py362
-rw-r--r--setuptools/_distutils/debug.py5
-rw-r--r--setuptools/_distutils/dep_util.py92
-rw-r--r--setuptools/_distutils/dir_util.py210
-rw-r--r--setuptools/_distutils/dist.py1257
-rw-r--r--setuptools/_distutils/errors.py97
-rw-r--r--setuptools/_distutils/extension.py240
-rw-r--r--setuptools/_distutils/fancy_getopt.py457
-rw-r--r--setuptools/_distutils/file_util.py238
-rw-r--r--setuptools/_distutils/filelist.py355
-rw-r--r--setuptools/_distutils/log.py81
-rw-r--r--setuptools/_distutils/msvc9compiler.py788
-rw-r--r--setuptools/_distutils/msvccompiler.py643
-rw-r--r--setuptools/_distutils/py35compat.py19
-rw-r--r--setuptools/_distutils/py38compat.py7
-rw-r--r--setuptools/_distutils/spawn.py106
-rw-r--r--setuptools/_distutils/sysconfig.py469
-rw-r--r--setuptools/_distutils/tests/__init__.py42
-rw-r--r--setuptools/_distutils/tests/py35compat.py77
-rw-r--r--setuptools/_distutils/tests/py38compat.py62
-rw-r--r--setuptools/_distutils/tests/support.py210
-rw-r--r--setuptools/_distutils/tests/test_archive_util.py393
-rw-r--r--setuptools/_distutils/tests/test_bdist.py57
-rw-r--r--setuptools/_distutils/tests/test_bdist_dumb.py97
-rw-r--r--setuptools/_distutils/tests/test_bdist_msi.py28
-rw-r--r--setuptools/_distutils/tests/test_bdist_rpm.py138
-rw-r--r--setuptools/_distutils/tests/test_bdist_wininst.py40
-rw-r--r--setuptools/_distutils/tests/test_build.py56
-rw-r--r--setuptools/_distutils/tests/test_build_clib.py136
-rw-r--r--setuptools/_distutils/tests/test_build_ext.py550
-rw-r--r--setuptools/_distutils/tests/test_build_py.py179
-rw-r--r--setuptools/_distutils/tests/test_build_scripts.py112
-rw-r--r--setuptools/_distutils/tests/test_check.py163
-rw-r--r--setuptools/_distutils/tests/test_clean.py49
-rw-r--r--setuptools/_distutils/tests/test_cmd.py126
-rw-r--r--setuptools/_distutils/tests/test_config.py141
-rw-r--r--setuptools/_distutils/tests/test_config_cmd.py98
-rw-r--r--setuptools/_distutils/tests/test_core.py165
-rw-r--r--setuptools/_distutils/tests/test_cygwinccompiler.py96
-rw-r--r--setuptools/_distutils/tests/test_dep_util.py80
-rw-r--r--setuptools/_distutils/tests/test_dir_util.py139
-rw-r--r--setuptools/_distutils/tests/test_dist.py533
-rw-r--r--setuptools/_distutils/tests/test_extension.py71
-rw-r--r--setuptools/_distutils/tests/test_file_util.py124
-rw-r--r--setuptools/_distutils/tests/test_filelist.py353
-rw-r--r--setuptools/_distutils/tests/test_install.py263
-rw-r--r--setuptools/_distutils/tests/test_install_data.py75
-rw-r--r--setuptools/_distutils/tests/test_install_headers.py39
-rw-r--r--setuptools/_distutils/tests/test_install_lib.py115
-rw-r--r--setuptools/_distutils/tests/test_install_scripts.py82
-rw-r--r--setuptools/_distutils/tests/test_log.py46
-rw-r--r--setuptools/_distutils/tests/test_msvc9compiler.py184
-rw-r--r--setuptools/_distutils/tests/test_msvccompiler.py138
-rw-r--r--setuptools/_distutils/tests/test_register.py325
-rw-r--r--setuptools/_distutils/tests/test_sdist.py489
-rw-r--r--setuptools/_distutils/tests/test_spawn.py139
-rw-r--r--setuptools/_distutils/tests/test_sysconfig.py309
-rw-r--r--setuptools/_distutils/tests/test_text_file.py107
-rw-r--r--setuptools/_distutils/tests/test_unixccompiler.py289
-rw-r--r--setuptools/_distutils/tests/test_upload.py223
-rw-r--r--setuptools/_distutils/tests/test_util.py224
-rw-r--r--setuptools/_distutils/tests/test_version.py95
-rw-r--r--setuptools/_distutils/tests/test_versionpredicate.py13
-rw-r--r--setuptools/_distutils/tests/unix_compat.py16
-rw-r--r--setuptools/_distutils/text_file.py286
-rw-r--r--setuptools/_distutils/unixccompiler.py370
-rw-r--r--setuptools/_distutils/util.py496
-rw-r--r--setuptools/_distutils/version.py363
-rw-r--r--setuptools/_distutils/versionpredicate.py169
-rw-r--r--setuptools/_entry_points.py86
-rw-r--r--setuptools/_imp.py82
-rw-r--r--setuptools/_importlib.py36
-rw-r--r--setuptools/_itertools.py23
-rw-r--r--setuptools/_path.py7
-rw-r--r--setuptools/_reqs.py19
-rw-r--r--setuptools/_vendor/__pycache__/__init__.cpython-36.pycbin148 -> 0 bytes
-rw-r--r--setuptools/_vendor/__pycache__/six.cpython-36.pycbin24445 -> 0 bytes
-rw-r--r--setuptools/_vendor/_validate_pyproject/__init__.py34
-rw-r--r--setuptools/_vendor/_validate_pyproject/error_reporting.py318
-rw-r--r--setuptools/_vendor/_validate_pyproject/extra_validations.py36
-rw-r--r--setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py51
-rw-r--r--setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py1001
-rw-r--r--setuptools/_vendor/_validate_pyproject/formats.py252
-rw-r--r--setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/importlib_metadata/__init__.py1047
-rw-r--r--setuptools/_vendor/importlib_metadata/_adapters.py68
-rw-r--r--setuptools/_vendor/importlib_metadata/_collections.py30
-rw-r--r--setuptools/_vendor/importlib_metadata/_compat.py71
-rw-r--r--setuptools/_vendor/importlib_metadata/_functools.py104
-rw-r--r--setuptools/_vendor/importlib_metadata/_itertools.py73
-rw-r--r--setuptools/_vendor/importlib_metadata/_meta.py48
-rw-r--r--setuptools/_vendor/importlib_metadata/_text.py99
-rw-r--r--setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/importlib_resources/__init__.py36
-rw-r--r--setuptools/_vendor/importlib_resources/_adapters.py170
-rw-r--r--setuptools/_vendor/importlib_resources/_common.py104
-rw-r--r--setuptools/_vendor/importlib_resources/_compat.py98
-rw-r--r--setuptools/_vendor/importlib_resources/_itertools.py35
-rw-r--r--setuptools/_vendor/importlib_resources/_legacy.py121
-rw-r--r--setuptools/_vendor/importlib_resources/abc.py137
-rw-r--r--setuptools/_vendor/importlib_resources/readers.py122
-rw-r--r--setuptools/_vendor/importlib_resources/simple.py116
-rw-r--r--setuptools/_vendor/importlib_resources/tests/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/_compat.py19
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data01/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data02/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt1
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt1
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py102
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_contents.py43
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_files.py46
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_open.py81
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_path.py64
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_read.py76
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_reader.py128
-rw-r--r--setuptools/_vendor/importlib_resources/tests/test_resource.py252
-rw-r--r--setuptools/_vendor/importlib_resources/tests/update-zips.py53
-rw-r--r--setuptools/_vendor/importlib_resources/tests/util.py178
-rw-r--r--setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py0
-rw-r--r--setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py0
-rw-r--r--setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/jaraco/__init__.py0
-rw-r--r--setuptools/_vendor/jaraco/context.py213
-rw-r--r--setuptools/_vendor/jaraco/functools.py525
-rw-r--r--setuptools/_vendor/jaraco/text/Lorem ipsum.txt2
-rw-r--r--setuptools/_vendor/jaraco/text/__init__.py599
-rw-r--r--setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/more_itertools/__init__.py4
-rw-r--r--setuptools/_vendor/more_itertools/more.py3824
-rw-r--r--setuptools/_vendor/more_itertools/recipes.py620
-rw-r--r--setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/nspektr/__init__.py145
-rw-r--r--setuptools/_vendor/nspektr/_compat.py21
-rw-r--r--setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/ordered_set.py488
-rw-r--r--setuptools/_vendor/packaging-21.3.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/packaging/__about__.py17
-rw-r--r--setuptools/_vendor/packaging/__init__.py21
-rw-r--r--setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pycbin684 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pycbin522 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pycbin969 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pycbin2826 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pycbin19788 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/__pycache__/version.cpython-36.pycbin10563 -> 0 bytes
-rw-r--r--setuptools/_vendor/packaging/_compat.py30
-rw-r--r--setuptools/_vendor/packaging/_manylinux.py301
-rw-r--r--setuptools/_vendor/packaging/_musllinux.py136
-rw-r--r--setuptools/_vendor/packaging/_structures.py49
-rw-r--r--setuptools/_vendor/packaging/markers.py197
-rw-r--r--setuptools/_vendor/packaging/requirements.py95
-rw-r--r--setuptools/_vendor/packaging/specifiers.py376
-rw-r--r--setuptools/_vendor/packaging/tags.py487
-rw-r--r--setuptools/_vendor/packaging/utils.py128
-rw-r--r--setuptools/_vendor/packaging/version.py335
-rw-r--r--setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt18
-rw-r--r--setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/pyparsing.py96
-rw-r--r--setuptools/_vendor/six.py868
-rw-r--r--setuptools/_vendor/tomli/__init__.py11
-rw-r--r--setuptools/_vendor/tomli/_parser.py691
-rw-r--r--setuptools/_vendor/tomli/_re.py107
-rw-r--r--setuptools/_vendor/tomli/_types.py10
-rw-r--r--setuptools/_vendor/typing_extensions.py2296
-rw-r--r--setuptools/_vendor/vendored.txt17
-rw-r--r--setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt1
-rw-r--r--setuptools/_vendor/zipp.py329
-rw-r--r--[-rwxr-xr-x]setuptools/archive_util.py102
-rw-r--r--setuptools/build_meta.py302
-rw-r--r--setuptools/cli-arm64.exebin0 -> 137216 bytes
-rw-r--r--setuptools/command/__init__.py10
-rw-r--r--[-rwxr-xr-x]setuptools/command/alias.py2
-rw-r--r--setuptools/command/bdist_egg.py71
-rw-r--r--[-rwxr-xr-x]setuptools/command/bdist_rpm.py23
-rwxr-xr-xsetuptools/command/bdist_wininst.py21
-rw-r--r--setuptools/command/build_clib.py61
-rw-r--r--setuptools/command/build_ext.py47
-rw-r--r--setuptools/command/build_py.py84
-rw-r--r--[-rwxr-xr-x]setuptools/command/develop.py65
-rw-r--r--[-rwxr-xr-x]setuptools/command/easy_install.py626
-rw-r--r--[-rwxr-xr-x]setuptools/command/egg_info.py293
-rw-r--r--setuptools/command/install.py9
-rw-r--r--[-rwxr-xr-x]setuptools/command/install_egg_info.py3
-rw-r--r--setuptools/command/install_lib.py7
-rw-r--r--[-rwxr-xr-x]setuptools/command/install_scripts.py11
-rw-r--r--setuptools/command/py36compat.py4
-rw-r--r--[-rwxr-xr-x]setuptools/command/register.py16
-rw-r--r--[-rwxr-xr-x]setuptools/command/rotate.py8
-rw-r--r--[-rwxr-xr-x]setuptools/command/saveopts.py0
-rw-r--r--[-rwxr-xr-x]setuptools/command/sdist.py84
-rw-r--r--[-rwxr-xr-x]setuptools/command/setopt.py4
-rw-r--r--setuptools/command/test.py95
-rw-r--r--setuptools/command/upload.py45
-rw-r--r--setuptools/command/upload_docs.py46
-rw-r--r--setuptools/config/__init__.py35
-rw-r--r--setuptools/config/_apply_pyprojecttoml.py249
-rw-r--r--setuptools/config/expand.py458
-rw-r--r--setuptools/config/pyprojecttoml.py338
-rw-r--r--setuptools/config/setupcfg.py (renamed from setuptools/config.py)396
-rw-r--r--setuptools/dep_util.py4
-rw-r--r--setuptools/depends.py60
-rw-r--r--setuptools/discovery.py588
-rw-r--r--setuptools/dist.py1054
-rw-r--r--setuptools/errors.py58
-rw-r--r--setuptools/extension.py4
-rw-r--r--setuptools/extern/__init__.py40
-rw-r--r--setuptools/glibc.py86
-rw-r--r--setuptools/glob.py31
-rw-r--r--setuptools/gui-arm64.exebin0 -> 137728 bytes
-rw-r--r--setuptools/installer.py104
-rw-r--r--setuptools/launch.py3
-rw-r--r--setuptools/lib2to3_ex.py62
-rw-r--r--setuptools/logging.py36
-rw-r--r--setuptools/monkey.py20
-rw-r--r--setuptools/msvc.py1249
-rw-r--r--[-rwxr-xr-x]setuptools/namespaces.py20
-rw-r--r--[-rwxr-xr-x]setuptools/package_index.py225
-rw-r--r--setuptools/pep425tags.py317
-rw-r--r--setuptools/py27compat.py28
-rw-r--r--setuptools/py31compat.py41
-rw-r--r--setuptools/py33compat.py54
-rw-r--r--setuptools/py34compat.py13
-rw-r--r--setuptools/py36compat.py82
-rw-r--r--[-rwxr-xr-x]setuptools/sandbox.py129
-rw-r--r--setuptools/script (dev).tmpl3
-rw-r--r--setuptools/site-patch.py74
-rw-r--r--setuptools/ssl_support.py260
-rw-r--r--setuptools/tests/__init__.py4
-rw-r--r--setuptools/tests/config/__init__.py0
-rw-r--r--setuptools/tests/config/test_apply_pyprojecttoml.py254
-rw-r--r--setuptools/tests/config/test_expand.py155
-rw-r--r--setuptools/tests/config/test_pyprojecttoml.py350
-rw-r--r--setuptools/tests/config/test_setupcfg.py924
-rw-r--r--setuptools/tests/contexts.py33
-rw-r--r--setuptools/tests/environment.py35
-rw-r--r--setuptools/tests/files.py39
-rw-r--r--setuptools/tests/fixtures.py123
-rw-r--r--setuptools/tests/integration/__init__.py0
-rw-r--r--setuptools/tests/integration/helpers.py75
-rw-r--r--setuptools/tests/integration/test_pip_install_sdist.py219
-rw-r--r--setuptools/tests/namespaces.py2
-rw-r--r--setuptools/tests/server.py46
-rw-r--r--setuptools/tests/test_archive_util.py6
-rw-r--r--setuptools/tests/test_bdist_deprecations.py27
-rw-r--r--setuptools/tests/test_bdist_egg.py5
-rw-r--r--setuptools/tests/test_build_clib.py12
-rw-r--r--setuptools/tests/test_build_ext.py111
-rw-r--r--setuptools/tests/test_build_meta.py754
-rw-r--r--setuptools/tests/test_build_py.py67
-rw-r--r--setuptools/tests/test_config.py583
-rw-r--r--setuptools/tests/test_config_discovery.py581
-rw-r--r--setuptools/tests/test_depends.py18
-rw-r--r--setuptools/tests/test_develop.py109
-rw-r--r--setuptools/tests/test_dist.py384
-rw-r--r--setuptools/tests/test_dist_info.py4
-rw-r--r--setuptools/tests/test_distutils_adoption.py158
-rw-r--r--setuptools/tests/test_easy_install.py577
-rw-r--r--setuptools/tests/test_editable_install.py113
-rw-r--r--setuptools/tests/test_egg_info.py618
-rw-r--r--setuptools/tests/test_extern.py20
-rw-r--r--setuptools/tests/test_find_packages.py85
-rw-r--r--setuptools/tests/test_find_py_modules.py81
-rw-r--r--setuptools/tests/test_glob.py34
-rw-r--r--setuptools/tests/test_install_scripts.py8
-rw-r--r--setuptools/tests/test_integration.py62
-rw-r--r--setuptools/tests/test_logging.py36
-rw-r--r--setuptools/tests/test_manifest.py143
-rw-r--r--setuptools/tests/test_msvc.py11
-rw-r--r--setuptools/tests/test_msvc14.py82
-rw-r--r--setuptools/tests/test_namespaces.py49
-rw-r--r--setuptools/tests/test_packageindex.py159
-rw-r--r--setuptools/tests/test_register.py22
-rw-r--r--setuptools/tests/test_sandbox.py3
-rw-r--r--setuptools/tests/test_sdist.py280
-rw-r--r--setuptools/tests/test_setopt.py41
-rw-r--r--setuptools/tests/test_setuptools.py122
-rw-r--r--setuptools/tests/test_test.py137
-rw-r--r--setuptools/tests/test_upload.py22
-rw-r--r--setuptools/tests/test_upload_docs.py71
-rw-r--r--setuptools/tests/test_virtualenv.py144
-rw-r--r--setuptools/tests/test_wheel.py124
-rw-r--r--setuptools/tests/test_windows_wrappers.py32
-rw-r--r--setuptools/tests/text.py5
-rw-r--r--setuptools/tests/textwrap.py2
-rw-r--r--setuptools/unicode_utils.py6
-rw-r--r--setuptools/wheel.py244
-rw-r--r--setuptools/windows_support.py2
-rw-r--r--tests/manual_test.py98
-rw-r--r--tests/test_pypi.py82
-rw-r--r--tools/finalize.py102
-rw-r--r--tools/msvc-build-launcher-arm64.cmd19
-rw-r--r--tools/msvc-build-launcher.cmd (renamed from msvc-build-launcher.cmd)0
-rw-r--r--tools/ppc64le-patch.py28
-rw-r--r--tools/towncrier_template.rst35
-rw-r--r--tools/vendored.py187
-rw-r--r--tox.ini112
510 files changed, 78673 insertions, 13096 deletions
diff --git a/Android.bp b/Android.bp
index 8f17fe4..d5d0229 100644
--- a/Android.bp
+++ b/Android.bp
@@ -48,13 +48,7 @@ python_library {
host_supported: true,
srcs: [
"pkg_resources/__init__.py",
- "pkg_resources/py31compat.py",
"pkg_resources/extern/**/*.py",
"pkg_resources/_vendor/**/*.py",
],
- version: {
- py2: {
- enabled: true,
- },
- },
}
diff --git a/CHANGES.rst b/CHANGES.rst
index d8b5b49..2c76dad 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,3 +1,2265 @@
+v61.1.0
+-------
+
+
+Deprecations
+^^^^^^^^^^^^
+* #3206: Changed ``setuptools.convert_path`` to an internal function that is not exposed
+ as part of setuptools API.
+ Future releases of ``setuptools`` are likely to remove this function.
+
+Changes
+^^^^^^^
+* #3202: Changed behaviour of auto-discovery to not explicitly expand ``package_dir``
+ for flat-layouts and to not use relative paths starting with ``./``.
+* #3203: Prevented ``pyproject.toml`` parsing from overwriting
+ ``dist.include_package_data`` explicitly set in ``setup.py`` with default
+ value.
+* #3208: Added a warning for non existing files listed with the ``file`` directive in
+ ``setup.cfg`` and ``pyproject.toml``.
+* #3208: Added a default value for dynamic ``classifiers`` in ``pyproject.toml`` when
+ files are missing and errors being ignored.
+* #3211: Disabled auto-discovery when distribution class has a ``configuration``
+ attribute (e.g. when the ``setup.py`` script contains ``setup(...,
+ configuration=...)``). This is done to ensure extension-only packages created
+ with ``numpy.distutils.misc_util.Configuration`` are not broken by the safe
+ guard
+ behaviour to avoid accidental multiple top-level packages in a flat-layout.
+
+ .. note::
+ Users that don't set ``packages``, ``py_modules``, or ``configuration`` are
+ still likely to observe the auto-discovery behavior, which may halt the
+ build if the project contains multiple directories and/or multiple Python
+ files directly under the project root.
+
+ To disable auto-discovery please explicitly set either ``packages`` or
+ ``py_modules``. Alternatively you can also configure :ref:`custom-discovery`.
+
+
+v61.0.0
+-------
+
+
+Deprecations
+^^^^^^^^^^^^
+* #3068: Deprecated ``setuptools.config.read_configuration``,
+ ``setuptools.config.parse_configuration`` and other functions or classes
+ from ``setuptools.config``.
+
+ Users that still need to parse and process configuration from ``setup.cfg`` can
+ import a direct replacement from ``setuptools.config.setupcfg``, however this
+ module is transitional and might be removed in the future
+ (the ``setup.cfg`` configuration format itself is likely to be deprecated in the future).
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2894: If you purposefully want to create an *"empty distribution"*, please be aware
+ that some Python files (or general folders) might be automatically detected and
+ included.
+
+ Projects that currently don't specify both ``packages`` and ``py_modules`` in their
+ configuration and contain extra folders or Python files (not meant for distribution),
+ might see these files being included in the wheel archive or even experience
+ the build to fail.
+
+ You can check details about the automatic discovery (and how to configure a
+ different behaviour) in :doc:`/userguide/package_discovery`.
+* #3067: If the file ``pyproject.toml`` exists and it includes project
+ metadata/config (via ``[project]`` table or ``[tool.setuptools]``),
+ a series of new behaviors that are not backward compatible may take place:
+
+ - The default value of ``include_package_data`` will be considered to be ``True``.
+ - Setuptools will attempt to validate the ``pyproject.toml`` file according
+ to PEP 621 specification.
+ - The values specified in ``pyproject.toml`` will take precedence over those
+ specified in ``setup.cfg`` or ``setup.py``.
+
+Changes
+^^^^^^^
+* #2887: **[EXPERIMENTAL]** Added automatic discovery for ``py_modules`` and ``packages``
+ -- by :user:`abravalheri`.
+
+ Setuptools will try to find these values assuming that the package uses either
+ the *src-layout* (a ``src`` directory containing all the packages or modules),
+ the *flat-layout* (package directories directly under the project root),
+ or the *single-module* approach (an isolated Python file, directly under
+ the project root).
+
+ The automatic discovery will also respect layouts that are explicitly
+ configured using the ``package_dir`` option.
+
+ For backward-compatibility, this behavior will be observed **only if both**
+ ``py_modules`` **and** ``packages`` **are not set**.
+ (**Note**: specifying ``ext_modules`` might also prevent auto-discover from
+ taking place)
+
+ If setuptools detects modules or packages that are not supposed to be in the
+ distribution, please manually set ``py_modules`` and ``packages`` in your
+ ``setup.cfg`` or ``setup.py`` file.
+ If you are using a *flat-layout*, you can also consider switching to
+ *src-layout*.
+* #2887: **[EXPERIMENTAL]** Added automatic configuration for the ``name`` metadata
+ -- by :user:`abravalheri`.
+
+ Setuptools will adopt the name of the top-level package (or module in the case
+ of single-module distributions), **only when** ``name`` **is not explicitly
+ provided**.
+
+ Please note that it is not possible to automatically derive a single name when
+ the distribution consists of multiple top-level packages or modules.
+* #3066: Added vendored dependencies for :pypi:`tomli`, :pypi:`validate-pyproject`.
+
+ These dependencies are used to read ``pyproject.toml`` files and validate them.
+* #3067: **[EXPERIMENTAL]** When using ``pyproject.toml`` metadata,
+ the default value of ``include_package_data`` is changed to ``True``.
+* #3068: **[EXPERIMENTAL]** Add support for ``pyproject.toml`` configuration
+ (as introduced by :pep:`621`). Configuration parameters not covered by
+ standards are handled in the ``[tool.setuptools]`` sub-table.
+
+ In the future, existing ``setup.cfg`` configuration
+ may be automatically converted into the ``pyproject.toml`` equivalent before taking effect
+ (as proposed in #1688). Meanwhile users can use automated tools like
+ :pypi:`ini2toml` to help in the transition.
+
+ Please note that the legacy backend is not guaranteed to work with
+ ``pyproject.toml`` configuration.
+
+ -- by :user:`abravalheri`
+* #3125: Implicit namespaces (as introduced in :pep:`420`) are now considered by default
+ during :doc:`package discovery </userguide/package_discovery>`, when
+ ``setuptools`` configuration and project metadata are added to the
+ ``pyproject.toml`` file.
+
+ To disable this behaviour, use ``namespaces = False`` when explicitly setting
+ the ``[tool.setuptools.packages.find]`` section in ``pyproject.toml``.
+
+ This change is backwards compatible and does not affect the behaviour of
+ configuration done in ``setup.cfg`` or ``setup.py``.
+* #3152: **[EXPERIMENTAL]** Added support for ``attr:`` and ``cmdclass`` configurations
+ in ``setup.cfg`` and ``pyproject.toml`` when ``package_dir`` is implicitly
+ found via auto-discovery.
+* #3178: Postponed importing ``ctypes`` when hiding files on Windows.
+ This helps to prevent errors in systems that might not have `libffi` installed.
+* #3179: Merge with pypa/distutils@267dbd25ac
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #3172: Added initial documentation about configuring ``setuptools`` via ``pyproject.toml``
+ (using standard project metadata).
+
+Misc
+^^^^
+* #3065: Refactored ``setuptools.config`` by separating configuration parsing (specific
+ to the configuration file format, e.g. ``setup.cfg``) and post-processing
+ (which includes directives such as ``file:`` that can be used across different
+ configuration formats).
+
+
+v60.10.0
+--------
+
+
+Changes
+^^^^^^^
+* #2971: Deprecated upload_docs command, to be removed in the future.
+* #3137: Use samefile from stdlib, supported on Windows since Python 3.2.
+* #3170: Adopt nspektr (vendored) to implement Distribution._install_dependencies.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #3144: Added documentation on using console_scripts from setup.py, which was previously only shown in setup.cfg -- by :user:`xhlulu`
+* #3148: Added clarifications about ``MANIFEST.in``, that include links to PyPUG docs
+ and more prominent mentions to using a revision control system plugin as an
+ alternative.
+* #3148: Removed mention to ``pkg_resources`` as the recommended way of accessing data
+ files, in favour of importlib.resources.
+ Additionally more emphasis was put on the fact that *package data files* reside
+ **inside** the *package directory* (and therefore should be *read-only*).
+
+Misc
+^^^^
+* #3120: Added workaround for intermittent failures of backend tests on PyPy.
+ These tests now are marked with `XFAIL
+ <https://docs.pytest.org/en/stable/how-to/skipping.html>`_, instead of erroring
+ out directly.
+* #3124: Improved configuration for :pypi:`rst-linker` (extension used to build the
+ changelog).
+* #3133: Enhanced isolation of tests using virtual environments - PYTHONPATH is not leaking to spawned subprocesses -- by :user:`befeleme`
+* #3147: Added options to provide a pre-built ``setuptools`` wheel or sdist for being
+ used during tests with virtual environments.
+ Paths for these pre-built distribution files can now be set via the environment
+ variables: ``PRE_BUILT_SETUPTOOLS_SDIST`` and ``PRE_BUILT_SETUPTOOLS_WHEEL``.
+
+
+v60.9.3
+-------
+
+
+Misc
+^^^^
+* #3093: Repaired automated release process.
+
+
+v60.9.2
+-------
+
+
+Misc
+^^^^
+* #3035: When loading distutils from the vendored copy, rewrite ``__name__`` to ensure consistent importing from inside and out.
+
+
+v60.9.1
+-------
+
+
+Misc
+^^^^
+* #3102: Prevent vendored importlib_metadata from loading distributions from older importlib_metadata.
+* #3103: Fixed issue where string-based entry points would be omitted.
+* #3107: Bump importlib_metadata to 4.11.1 addressing issue with parsing requirements in egg-info as found in PyPy.
+
+
+v60.9.0
+-------
+
+
+Changes
+^^^^^^^
+* #2876: In the build backend, allow single config settings to be supplied.
+* #2993: Removed workaround in distutils hack for get-pip now that pypa/get-pip#137 is closed.
+* #3085: Setuptools no longer relies on ``pkg_resources`` for entry point handling.
+* #3098: Bump vendored packaging to 21.3.
+* Removed bootstrap script.
+
+
+v60.8.2
+-------
+
+
+Misc
+^^^^
+* #3091: Make ``concurrent.futures`` import lazy in vendored ``more_itertools``
+ package to a avoid importing threading as a side effect (which caused
+ `gevent/gevent#1865 <https://github.com/gevent/gevent/issues/1865>`__).
+ -- by :user:`maciejp-ro`
+
+
+v60.8.1
+-------
+
+
+Misc
+^^^^
+* #3084: When vendoring jaraco packages, ensure the namespace package is converted to a simple package to support zip importer.
+
+
+v60.8.0
+-------
+
+
+Changes
+^^^^^^^
+* #3085: Setuptools now vendors importlib_resources and importlib_metadata and jaraco.text. Setuptools no longer relies on pkg_resources for ensure_directory nor parse_requirements.
+
+
+v60.7.1
+-------
+
+
+Misc
+^^^^
+* #3072: Remove lorem_ipsum from jaraco.text when vendored.
+
+
+v60.7.0
+-------
+
+
+Changes
+^^^^^^^
+* #3061: Vendored jaraco.text and use line processing from that library in pkg_resources.
+
+Misc
+^^^^
+* #3070: Avoid AttributeError in easy_install.create_home_path when sysconfig.get_config_vars values are not strings.
+
+
+v60.6.0
+-------
+
+
+Changes
+^^^^^^^
+* #3043: Merge with pypa/distutils@bb018f1ac3 including consolidated behavior in sysconfig.get_platform (pypa/distutils#104).
+* #3057: Don't include optional ``Home-page`` in metadata if no ``url`` is specified. -- by :user:`cdce8p`
+* #3062: Merge with pypa/distutils@b53a824ec3 including improved support for lib directories on non-x64 Windows builds.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2897: Added documentation about wrapping ``setuptools.build_meta`` in a in-tree
+ custom backend. This is a :pep:`517`-compliant way of dynamically specifying
+ build dependencies (e.g. when platform, OS and other markers are not enough).
+ -- by :user:`abravalheri`
+* #3034: Replaced occurrences of the defunct distutils-sig mailing list with pointers
+ to GitHub Discussions.
+ -- by :user:`ashemedai`
+* #3056: The documentation has stopped suggesting to add ``wheel`` to
+ :pep:`517` requirements -- by :user:`webknjaz`
+
+Misc
+^^^^
+* #3054: Used Py3 syntax ``super().__init__()`` -- by :user:`imba-tjd`
+
+
+v60.5.4
+-------
+
+
+Misc
+^^^^
+* #3009: Remove filtering of distutils warnings.
+* #3031: Suppress distutils replacement when building or testing CPython.
+
+
+v60.5.3
+-------
+
+
+Misc
+^^^^
+* #3026: Honor sysconfig variables in easy_install.
+
+
+v60.5.2
+-------
+
+
+Misc
+^^^^
+* #2993: In _distutils_hack, for get-pip, simulate existence of setuptools.
+
+
+v60.5.1
+-------
+
+
+Misc
+^^^^
+* #2918: Correct support for Python 3 native loaders.
+
+
+v60.5.0
+-------
+
+
+Changes
+^^^^^^^
+* #2990: Set the ``.origin`` attribute of the ``distutils`` module to the module's ``__file__``.
+
+
+v60.4.0
+-------
+
+
+Changes
+^^^^^^^
+* #2839: Removed ``requires`` sorting when installing wheels as an egg dir.
+* #2953: Fixed a bug that easy install incorrectly parsed Python 3.10 version string.
+* #3006: Fixed startup performance issue of Python interpreter due to imports of
+ costly modules in ``_distutils_hack`` -- by :user:`tiran`
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2674: Added link to additional resources on packaging in Quickstart guide
+* #3008: "In-tree" Sphinx extension for "favicons" replaced with ``sphinx-favicon``.
+* #3008: SVG images (logo, banners, ...) optimised with the help of the ``scour``
+ package.
+
+Misc
+^^^^
+* #2862: Added integration tests that focus on building and installing some packages in
+ the Python ecosystem via ``pip`` -- by :user:`abravalheri`
+* #2952: Modified "vendoring" logic to keep license files.
+* #2968: Improved isolation for some tests that where inadvertently using the project
+ root for builds, and therefore creating directories (e.g. ``build``, ``dist``,
+ ``*.egg-info``) that could interfere with the outcome of other tests
+ -- by :user:`abravalheri`.
+* #2968: Introduced new test fixtures ``venv``, ``venv_without_setuptools``,
+ ``bare_venv`` that rely on the ``jaraco.envs`` package.
+ These new test fixtures were also used to remove the (currently problematic)
+ dependency on the ``pytest_virtualenv`` plugin.
+* #2968: Removed ``tmp_src`` test fixture. Previously this fixture was copying all the
+ files and folders under the project root, including the ``.git`` directory,
+ which is error prone and increases testing time.
+
+ Since ``tmp_src`` was used to populate virtual environments (installing the
+ version of ``setuptools`` under test via the source tree), it was replaced by
+ the new ``setuptools_sdist`` and ``setuptools_wheel`` fixtures (that are build
+ only once per session testing and can be shared between all the workers for
+ read-only usage).
+
+
+v60.3.1
+-------
+
+
+Misc
+^^^^
+* #3002: Suppress AttributeError when detecting get-pip.
+
+
+v60.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #2993: In _distutils_hack, bypass the distutils exception for pip when get-pip is being invoked, because it imports setuptools.
+
+Misc
+^^^^
+* #2989: Merge with pypa/distutils@788cc159. Includes fix for config vars missing from sysconfig.
+
+
+v60.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2974: Setuptools now relies on the Python logging infrastructure to log messages. Instead of using ``distutils.log.*``, use ``logging.getLogger(name).*``.
+* #2987: Sync with pypa/distutils@2def21c5d74fdd2fe7996ee4030ac145a9d751bd, including fix for missing get_versions attribute (#2969), more reliance on sysconfig from stdlib.
+
+Misc
+^^^^
+* #2962: Avoid attempting to use local distutils when the presiding version of Setuptools on the path doesn't have one.
+* #2983: Restore 'add_shim' as the way to invoke the hook. Avoids compatibility issues between different versions of Setuptools with the distutils local implementation.
+
+
+v60.1.1
+-------
+
+
+Misc
+^^^^
+* #2980: Bypass distutils loader when setuptools module is no longer available on sys.path.
+
+
+v60.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2958: In distutils_hack, only add the metadata finder once. In ensure_local_distutils, rely on a context manager for reliable manipulation.
+* #2963: Merge with pypa/distutils@a5af364910. Includes revisited fix for pypa/distutils#15 and improved MinGW/Cygwin support from pypa/distutils#77.
+
+
+v60.0.5
+-------
+
+
+Misc
+^^^^
+* #2960: Install schemes fall back to default scheme for headers.
+
+
+v60.0.4
+-------
+
+
+Misc
+^^^^
+* #2954: Merge with pypa/distutils@eba2bcd310. Adds platsubdir to config vars available for substitution.
+
+
+v60.0.3
+-------
+
+
+Misc
+^^^^
+* #2940: Avoid KeyError in distutils hack when pip is imported during ensurepip.
+
+
+v60.0.2
+-------
+
+
+Misc
+^^^^
+* #2938: Select 'posix_user' for the scheme unless falling back to stdlib, then use 'unix_user'.
+
+
+v60.0.1
+-------
+
+
+Misc
+^^^^
+* #2944: Add support for extended install schemes in easy_install.
+
+
+v60.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2896: Setuptools once again makes its local copy of distutils the default. To override, set SETUPTOOLS_USE_DISTUTILS=stdlib.
+
+
+v59.8.0
+-------
+
+
+Changes
+^^^^^^^
+* #2935: Merge pypa/distutils@460b59f0e68dba17e2465e8dd421bbc14b994d1f.
+
+
+v59.7.0
+-------
+
+
+Changes
+^^^^^^^
+* #2930: Require Python 3.7
+
+
+v59.6.0
+-------
+
+
+Changes
+^^^^^^^
+* #2925: Merge with pypa/distutils@92082ee42c including introduction of deprecation warning on Version classes.
+
+
+v59.5.0
+-------
+
+
+Changes
+^^^^^^^
+* #2914: Merge with pypa/distutils@8f2df0bf6.
+
+
+v59.4.0
+-------
+
+
+Changes
+^^^^^^^
+* #2893: Restore deprecated support for newlines in the Summary field.
+
+
+v59.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #2902: Merge with pypa/distutils@85db7a41242.
+
+Misc
+^^^^
+* #2906: In ensure_local_distutils, re-use DistutilsMetaFinder to load the module. Avoids race conditions when _distutils_system_mod is employed.
+
+
+v59.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2875: Introduce changes from pypa/distutils@514e9d0, including support for overrides from Debian and pkgsrc, unlocking the possibility of making SETUPTOOLS_USE_DISTUTILS=local the default again.
+
+
+v59.1.1
+-------
+
+
+Misc
+^^^^
+* #2885: Fixed errors when encountering LegacyVersions.
+
+
+v59.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2497: Update packaging to 21.2.
+* #2877: Back out deprecation of setup_requires and replace instead by a deprecation of setuptools.installer and fetch_build_egg. Now setup_requires is still supported when installed as part of a PEP 517 build, but is deprecated when an unsatisfied requirement is encountered.
+* #2879: Bump packaging to 21.2.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2867: PNG/ICO images replaced with SVG in the docs.
+* #2867: Added support to SVG "favicons" via "in-tree" Sphinx extension.
+
+
+v59.0.1
+-------
+
+
+Misc
+^^^^
+* #2880: Removed URL requirement for ``pytest-virtualenv`` in ``setup.cfg``.
+ PyPI rejects packages with dependencies external to itself.
+ Instead the test dependency was overwritten via ``tox.ini``
+
+
+v59.0.0
+-------
+
+
+Deprecations
+^^^^^^^^^^^^
+* #2856: Support for custom commands that inherit directly from ``distutils`` is
+ **deprecated**. Users should extend classes provided by setuptools instead.
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2870: Started failing on invalid inline description with line breaks :class:`ValueError` -- by :user:`webknjaz`
+
+Changes
+^^^^^^^
+* #2698: Exposed exception classes from ``distutils.errors`` via ``setuptools.errors``.
+* #2866: Incorporate changes from pypa/distutils@f1b0a2b.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2227: Added sphinx theme customisations to display the new logo in the sidebar and
+ use its colours as "accent" in the documentation -- by :user:`abravalheri`
+* #2227: Added new setuptools logo, including editable files and artwork documentation
+ -- by :user:`abravalheri`
+* #2698: Added mentions to ``setuptools.errors`` as a way of handling custom command
+ errors.
+* #2698: Added instructions to migrate from ``distutils.commands`` and
+ ``distutils.errors`` in the porting guide.
+* #2871: Added a note to the docs that it is possible to install
+ ``setup.py``-less projects in editable mode with :doc:`pip v21.1+
+ <pip:index>`, only having ``setup.cfg`` and ``pyproject.toml`` in
+ project root -- by :user:`webknjaz`
+
+
+v58.5.3
+-------
+
+
+Misc
+^^^^
+* #2849: Add fallback for custom ``build_py`` commands inheriting directly from
+ :mod:`distutils`, while still handling ``include_package_data=True`` for
+ ``sdist``.
+
+
+v58.5.2
+-------
+
+
+Misc
+^^^^
+* #2847: Suppress 'setup.py install' warning under bdist_wheel.
+
+
+v58.5.1
+-------
+
+
+Misc
+^^^^
+* #2846: Move PkgResourcesDeprecationWarning above implicitly-called function so that it's in the namespace when version warnings are generated in an environment that contains them.
+
+
+v58.5.0
+-------
+
+
+Changes
+^^^^^^^
+* #1461: Fix inconsistency with ``include_package_data`` and ``packages_data`` in sdist
+ by replacing the loop breaking mechanism between the ``sdist`` and
+ ``egg_info`` commands -- by :user:`abravalheri`
+
+
+v58.4.0
+-------
+
+
+Changes
+^^^^^^^
+* #2497: Officially deprecated PEP 440 non-compliant versions.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2832: Removed the deprecated ``data_files`` option from the example in the
+ declarative configuration docs -- by :user:`abravalheri`
+* #2832: Change type of ``data_files`` option from ``dict`` to ``section`` in
+ declarative configuration docs (to match previous example) -- by
+ :user:`abravalheri`
+
+
+v58.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #917: ``setup.py install`` and ``easy_install`` commands are now officially deprecated. Use other standards-based installers (like pip) and builders (like build). Workloads reliant on this behavior should pin to this major version of Setuptools. See `Why you shouldn't invoke setup.py directly <https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html>`_ for more background.
+* #1988: Deprecated the ``bdist_rpm`` command. Binary packages should be built as wheels instead.
+ -- by :user:`hugovk`
+* #2785: Replace ``configparser``'s ``readfp`` with ``read_file``, deprecated since Python 3.2.
+ -- by :user:`hugovk`
+* #2823: Officially deprecated support for ``setup_requires``. Users are encouraged instead to migrate to PEP 518 ``build-system.requires`` in ``pyproject.toml``. Users reliant on ``setup_requires`` should consider pinning to this major version to avoid disruption.
+
+Misc
+^^^^
+* #2762: Changed codecov.yml to configure the threshold to be lower
+ -- by :user:`tanvimoharir`
+
+
+v58.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2757: Add windows arm64 launchers for scripts generated by easy_install.
+* #2800: Added ``--owner`` and ``--group`` options to the ``sdist`` command,
+ for specifying file ownership within the produced tarball (similarly
+ to the corresponding distutils ``sdist`` options).
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2792: Document how the legacy and non-legacy versions are compared, and reference to the PEP 440 scheme.
+
+
+v58.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2796: Merge with pypa/distutils@02e9f65ab0
+
+
+v58.0.4
+-------
+
+
+Misc
+^^^^
+* #2773: Retain case in setup.cfg during sdist.
+
+
+v58.0.3
+-------
+
+
+Misc
+^^^^
+* #2777: Build does not fail fast when ``use_2to3`` is supplied but set to a false value.
+
+
+v58.0.2
+-------
+
+
+Misc
+^^^^
+* #2769: Build now fails fast when ``use_2to3`` is supplied.
+
+
+v58.0.1
+-------
+
+
+Misc
+^^^^
+* #2765: In Distribution.finalize_options, suppress known removed entry points to avoid issues with older Setuptools.
+
+
+v58.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2086: Removed support for 2to3 during builds. Projects should port to a unified codebase or pin to an older version of Setuptools using PEP 518 build-requires.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2746: add python_requires example
+
+
+v57.5.0
+-------
+
+
+Changes
+^^^^^^^
+* #2712: Added implicit globbing support for ``[options.data_files]`` values.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2737: fix various syntax and style errors in code snippets in docs
+
+
+v57.4.0
+-------
+
+
+Changes
+^^^^^^^
+* #2722: Added support for ``SETUPTOOLS_EXT_SUFFIX`` environment variable to override the suffix normally detected from the ``sysconfig`` module.
+
+
+v57.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #2465: Documentation is now published using the Furo theme.
+
+
+v57.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2724: Added detection of Windows ARM64 build environments using the ``VSCMD_ARG_TGT_ARCH`` environment variable.
+
+
+v57.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2692: Globs are now sorted in 'license_files' restoring reproducibility by eliminating variance from disk order.
+* #2714: Update to distutils at pypa/distutils@e2627b7.
+* #2715: Removed reliance on deprecated ssl.match_hostname by removing the ssl support. Now any index operations rely on the native SSL implementation.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2604: Revamped the backward/cross tool compatibility section to remove
+ some confusion.
+ Add some examples and the version since when ``entry_points`` are
+ supported in declarative configuration.
+ Tried to make the reading flow a bit leaner, gather some information
+ that were a bit dispersed.
+
+
+v57.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2645: License files excluded via the ``MANIFEST.in`` but matched by either
+ the ``license_file`` (deprecated) or ``license_files`` options,
+ will be nevertheless included in the source distribution. - by :user:`cdce8p`
+
+Changes
+^^^^^^^
+* #2628: Write long description in message payload of PKG-INFO file. - by :user:`cdce8p`
+* #2645: Added ``License-File`` (multiple) to the output package metadata.
+ The field will contain the path of a license file, matched by the
+ ``license_file`` (deprecated) and ``license_files`` options,
+ relative to ``.dist-info``. - by :user:`cdce8p`
+* #2678: Moved Setuptools' own entry points into declarative config.
+* #2680: Vendored :pypi:`more_itertools` for Setuptools.
+* #2681: Setuptools own setup.py no longer declares setup_requires, but instead expects wheel to be installed as declared by pyproject.toml.
+
+Misc
+^^^^
+* #2650: Updated the docs build tooling to support the latest version of
+ Towncrier and show the previews of not-yet-released setuptools versions
+ in the changelog -- :user:`webknjaz`
+
+
+v56.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2640: Fixed handling of multiline license strings. - by :user:`cdce8p`
+* #2641: Setuptools will now always try to use the latest supported
+ metadata version for ``PKG-INFO``. - by :user:`cdce8p`
+
+
+v56.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2653: Incorporated assorted changes from pypa/distutils.
+* #2657: Adopted docs from distutils.
+* #2663: Added Visual Studio Express 2017 support -- by :user:`dofuuz`
+
+Misc
+^^^^
+* #2644: Fixed ``DeprecationWarning`` due to ``threading.Thread.setDaemon`` in tests -- by :user:`tirkarthi`
+* #2654: Made the changelog generator compatible
+ with Towncrier >= 19.9 -- :user:`webknjaz`
+* #2664: Relax the deprecation message in the distutils hack.
+
+
+v56.0.0
+-------
+
+
+Deprecations
+^^^^^^^^^^^^
+* #2620: The ``license_file`` option is now marked as deprecated.
+ Use ``license_files`` instead. -- by :user:`cdce8p`
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2620: If neither ``license_file`` nor ``license_files`` is specified, the ``sdist``
+ option will now auto-include files that match the following patterns:
+ ``LICEN[CS]E*``, ``COPYING*``, ``NOTICE*``, ``AUTHORS*``.
+ This matches the behavior of ``bdist_wheel``. -- by :user:`cdce8p`
+
+Changes
+^^^^^^^
+* #2620: The ``license_file`` and ``license_files`` options now support glob patterns. -- by :user:`cdce8p`
+* #2632: Implemented ``VendorImporter.find_spec()`` method to get rid
+ of ``ImportWarning`` that Python 3.10 emits when only the old-style
+ importer hooks are present -- by :user:`webknjaz`
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2620: Added documentation for the ``license_files`` option. -- by :user:`cdce8p`
+
+
+v55.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2566: Remove the deprecated ``bdist_wininst`` command. Binary packages should be built as wheels instead. -- by :user:`hroncok`
+
+
+v54.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2608: Added informative error message to PEP 517 build failures owing to
+ an empty ``setup.py`` -- by :user:`layday`
+
+
+v54.1.3
+-------
+
+No significant changes.
+
+
+v54.1.2
+-------
+
+
+Misc
+^^^^
+* #2595: Reduced scope of dash deprecation warning to Setuptools/distutils only -- by :user:`melissa-kun-li`
+
+
+v54.1.1
+-------
+
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2584: Added ``sphinx-inline-tabs`` extension to allow for comparison of ``setup.py`` and its equivalent ``setup.cfg`` -- by :user:`amy-lei`
+
+Misc
+^^^^
+* #2592: Made option keys in the ``[metadata]`` section of ``setup.cfg`` case-sensitive. Users having
+ uppercase option spellings will get a warning suggesting to make them to lowercase
+ -- by :user:`melissa-kun-li`
+
+
+v54.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #1608: Removed the conversion of dashes to underscores in the :code:`extras_require` and :code:`data_files` of :code:`setup.cfg` to support the usage of dashes. Method will warn users when they use a dash-separated key which in the future will only allow an underscore. Note: the method performs the dash to underscore conversion to preserve compatibility, but future versions will no longer support it -- by :user:`melissa-kun-li`
+
+
+v54.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2582: Simplified build-from-source story by providing bootstrapping metadata in a separate egg-info directory. Build requirements no longer include setuptools itself. Sdist once again includes the pyproject.toml. Project can no longer be installed from source on pip 19.x, but install from source is still supported on pip < 19 and pip >= 20 and install from wheel is still supported with pip >= 9.
+
+Changes
+^^^^^^^
+* #1932: Handled :code:`AttributeError` by raising :code:`DistutilsSetupError` in :code:`dist.check_specifier()` when specifier is not a string -- by :user:`melissa-kun-li`
+* #2570: Correctly parse cmdclass in setup.cfg.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2553: Added userguide example for markers in extras_require -- by :user:`pwoolvett`
+
+
+v53.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #1937: Preserved case-sensitivity of keys in setup.cfg so that entry point names are case-sensitive. Changed sensitivity of configparser. NOTE: Any projects relying on case-insensitivity will need to adapt to accept the original case as published. -- by :user:`melissa-kun-li`
+* #2573: Fixed error in uploading a Sphinx doc with the :code:`upload_docs` command. An html builder will be used.
+ Note: :code:`upload_docs` is deprecated for PyPi, but is supported for other sites -- by :user:`melissa-kun-li`
+
+
+v53.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1527: Removed bootstrap script. Now Setuptools requires pip or another pep517-compliant builder such as 'build' to build. Now Setuptools can be installed from Github main branch.
+
+
+v52.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2537: Remove fallback support for fetch_build_eggs using easy_install. Now pip is required for setup_requires to succeed.
+* #2544: Removed 'easy_install' top-level model (runpy entry point) and 'easy_install' console script.
+* #2545: Removed support for eggsecutables.
+
+Changes
+^^^^^^^
+* #2459: Tests now run in parallel via pytest-xdist, completing in about half the time. Special thanks to :user:`webknjaz` for hard work implementing test isolation. To run without parallelization, disable the plugin with ``tox -- -p no:xdist``.
+
+
+v51.3.3
+-------
+
+
+Misc
+^^^^
+* #2539: Fix AttributeError in Description validation.
+
+
+v51.3.2
+-------
+
+
+Misc
+^^^^
+* #1390: Validation of Description field now is more lenient, emitting a warning and mangling the value to be valid (replacing newlines with spaces).
+
+
+v51.3.1
+-------
+
+
+Misc
+^^^^
+* #2536: Reverted tag deduplication handling.
+
+
+v51.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #1390: Newlines in metadata description/Summary now trigger a ValueError.
+* #2481: Define ``create_module()`` and ``exec_module()`` methods in ``VendorImporter``
+ to get rid of ``ImportWarning`` -- by :user:`hroncok`
+* #2489: ``pkg_resources`` behavior for zipimport now matches the regular behavior, and finds
+ ``.egg-info`` (previously would only find ``.dist-info``) -- by :user:`thatch`
+* #2529: Fixed an issue where version tags may be added multiple times
+
+
+v51.2.0
+-------
+
+
+Changes
+^^^^^^^
+* #2493: Use importlib.import_module() rather than the deprecated loader.load_module()
+ in pkg_resources namespace declaration -- by :user:`encukou`
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2525: Fix typo in the document page about entry point. -- by :user:`jtr109`
+
+Misc
+^^^^
+* #2534: Avoid hitting network during test_easy_install.
+
+
+v51.1.2
+-------
+
+
+Misc
+^^^^
+* #2505: Disable inclusion of package data as it causes 'tests' to be included as data.
+
+
+v51.1.1
+-------
+
+
+Misc
+^^^^
+* #2534: Avoid hitting network during test_virtualenv.test_test_command.
+
+
+v51.1.0
+-------
+
+
+Changes
+^^^^^^^
+* #2486: Project adopts jaraco/skeleton for shared package maintenance.
+
+Misc
+^^^^
+* #2477: Restore inclusion of rst files in sdist.
+* #2484: Setuptools has replaced the master branch with the main branch.
+* #2485: Fixed failing test when pip 20.3+ is present.
+ -- by :user:`yan12125`
+* #2487: Fix tests with pytest 6.2
+ -- by :user:`yan12125`
+
+
+v51.0.0
+-------
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2435: Require Python 3.6 or later.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2430: Fixed inconsistent RST title nesting levels caused by #2399
+ -- by :user:`webknjaz`
+* #2430: Fixed a typo in Sphinx docs that made docs dev section disappear
+ as a result of PR #2426 -- by :user:`webknjaz`
+
+Misc
+^^^^
+* #2471: Removed the tests that guarantee that the vendored dependencies can be built by distutils.
+
+
+v50.3.2
+-------
+
+
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2394: Extended towncrier news template to include change note categories.
+ This allows to see what types of changes a given version introduces
+ -- by :user:`webknjaz`
+* #2427: Started enforcing strict syntax and reference validation
+ in the Sphinx docs -- by :user:`webknjaz`
+* #2428: Removed redundant Sphinx ``Makefile`` support -- by :user:`webknjaz`
+
+Misc
+^^^^
+* #2401: Enabled test results reporting in AppVeyor CI
+ -- by :user:`webknjaz`
+* #2420: Replace Python 3.9.0 beta with 3.9.0 final on GitHub Actions.
+* #2421: Python 3.9 Trove classifier got added to the dist metadata
+ -- by :user:`webknjaz`
+
+
+v50.3.1
+-------
+
+
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2093: Finalized doc revamp.
+* #2097: doc: simplify index and group deprecated files
+* #2102: doc overhaul step 2: break main doc into multiple sections
+* #2111: doc overhaul step 3: update userguide
+* #2395: Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`
+* #2395: Added an illustrative explanation about the change notes to fragments dir -- by :user:`webknjaz`
+
+Misc
+^^^^
+* #2379: Travis CI test suite now tests against PPC64.
+* #2413: Suppress EOF errors (and other exceptions) when importing lib2to3.
+
+
+v50.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2368: In distutils, restore support for monkeypatched CCompiler.spawn per pypa/distutils#15.
+
+
+v50.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2355: When pip is imported as part of a build, leave distutils patched.
+* #2380: There are some setuptools specific changes in the
+ ``setuptools.command.bdist_rpm`` module that are no longer needed, because
+ they are part of the ``bdist_rpm`` module in distutils in Python
+ 3.5.0. Therefore, code was removed from ``setuptools.command.bdist_rpm``.
+
+
+v50.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2350: Setuptools reverts using the included distutils by default. Platform maintainers and system integrators and others are *strongly* encouraged to set ``SETUPTOOLS_USE_DISTUTILS=local`` to help identify and work through the reported issues with distutils adoption, mainly to file issues and pull requests with pypa/distutils such that distutils performs as needed across every supported environment.
+
+
+v50.0.3
+-------
+
+
+
+Misc
+^^^^
+* #2363: Restore link_libpython support on Python 3.7 and earlier (see pypa/distutils#9).
+
+
+v50.0.2
+-------
+
+
+
+Misc
+^^^^
+* #2352: In distutils hack, use absolute import rather than relative to avoid bpo-30876.
+
+
+v50.0.1
+-------
+
+
+
+Misc
+^^^^
+* #2357: Restored Python 3.5 support in distutils.util for missing ``subprocess._optim_args_from_interpreter_flags``.
+* #2358: Restored AIX support on Python 3.8 and earlier.
+* #2361: Add Python 3.10 support to _distutils_hack. Get the 'Loader' abstract class
+ from importlib.abc rather than importlib.util.abc (alias removed in Python
+ 3.10).
+
+
+v50.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2232: Once again, Setuptools overrides the stdlib distutils on import. For environments or invocations where this behavior is undesirable, users are provided with a temporary escape hatch. If the environment variable ``SETUPTOOLS_USE_DISTUTILS`` is set to ``stdlib``, Setuptools will fall back to the legacy behavior. Use of this escape hatch is discouraged, but it is provided to ease the transition while proper fixes for edge cases can be addressed.
+
+Changes
+^^^^^^^
+* #2334: In MSVC module, refine text in error message.
+
+
+v49.6.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2129: In pkg_resources, no longer detect any pathname ending in .egg as a Python egg. Now the path must be an unpacked egg or a zip file.
+
+
+v49.5.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2306: When running as a PEP 517 backend, setuptools does not try to install
+ ``setup_requires`` itself. They are reported as build requirements for the
+ frontend to install.
+
+
+v49.4.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2310: Updated vendored packaging version to 20.4.
+
+
+v49.3.2
+-------
+
+
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2300: Improve the ``safe_version`` function documentation
+
+Misc
+^^^^
+* #2297: Once again, in stubs prefer exec_module to the deprecated load_module.
+
+
+v49.3.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #2316: Removed warning when ``distutils`` is imported before ``setuptools`` when ``distutils`` replacement is not enabled.
+
+
+v49.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2259: Setuptools now provides a .pth file (except for editable installs of setuptools) to the target environment to ensure that when enabled, the setuptools-provided distutils is preferred before setuptools has been imported (and even if setuptools is never imported). Honors the SETUPTOOLS_USE_DISTUTILS environment variable.
+
+
+v49.2.1
+-------
+
+
+
+Misc
+^^^^
+* #2257: Fixed two flaws in distutils._msvccompiler.MSVCCompiler.spawn.
+
+
+v49.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2230: Now warn the user when setuptools is imported after distutils modules have been loaded (exempting PyPy for 3.6), directing the users of packages to import setuptools first.
+
+
+v49.1.3
+-------
+
+
+
+Misc
+^^^^
+* #2212: (Distutils) Allow spawn to accept environment. Avoid monkey-patching global state.
+* #2249: Fix extension loading technique in stubs.
+
+
+v49.1.2
+-------
+
+
+
+Changes
+^^^^^^^
+* #2232: In preparation for re-enabling a local copy of distutils, Setuptools now honors an environment variable, SETUPTOOLS_USE_DISTUTILS. If set to 'stdlib' (current default), distutils will be used from the standard library. If set to 'local' (default in a imminent backward-incompatible release), the local copy of distutils will be used.
+
+
+v49.1.1
+-------
+
+
+
+Misc
+^^^^
+* #2094: Removed pkg_resources.py2_warn module, which is no longer reachable.
+
+
+v49.0.1
+-------
+
+
+
+Misc
+^^^^
+* #2228: Applied fix for pypa/distutils#3, restoring expectation that spawn will raise a DistutilsExecError when attempting to execute a missing file.
+
+
+v49.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2228: Disabled distutils adoption for now while emergent issues are addressed.
+
+
+v49.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2165: Setuptools no longer installs a site.py file during easy_install or develop installs. As a result, .eggs on PYTHONPATH will no longer take precedence over other packages on sys.path. If this issue affects your production environment, please reach out to the maintainers at #2165.
+
+Changes
+^^^^^^^
+* #2137: Removed (private) pkg_resources.RequirementParseError, now replaced by packaging.requirements.InvalidRequirement. Kept the name for compatibility, but users should catch InvalidRequirement instead.
+* #2180: Update vendored packaging in pkg_resources to 19.2.
+
+Misc
+^^^^
+* #2199: Fix exception causes all over the codebase by using ``raise new_exception from old_exception``
+
+
+v48.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2143: Setuptools adopts distutils from the Python 3.9 standard library and no longer depends on distutils in the standard library. When importing ``setuptools`` or ``setuptools.distutils_patch``, Setuptools will expose its bundled version as a top-level ``distutils`` package (and unload any previously-imported top-level distutils package), retaining the expectation that ``distutils``' objects are actually Setuptools objects.
+ To avoid getting any legacy behavior from the standard library, projects are advised to always "import setuptools" prior to importing anything from distutils. This behavior happens by default when using ``pip install`` or ``pep517.build``. Workflows that rely on ``setup.py (anything)`` will need to first ensure setuptools is imported. One way to achieve this behavior without modifying code is to invoke Python thus: ``python -c "import setuptools; exec(open('setup.py').read())" (anything)``.
+
+
+v47.3.2
+-------
+
+
+
+Misc
+^^^^
+* #2071: Replaced references to the deprecated imp package with references to importlib
+
+
+v47.3.1
+-------
+
+
+
+Misc
+^^^^
+* #1973: Removed ``pkg_resources.py31compat.makedirs`` in favor of the stdlib. Use ``os.makedirs()`` instead.
+* #2198: Restore ``__requires__`` directive in easy-install wrapper scripts.
+
+
+v47.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2197: Console script wrapper for editable installs now has a unified template and honors importlib_metadata if present for faster script execution on older Pythons.
+
+Misc
+^^^^
+* #2195: Fix broken entry points generated by easy-install (pip editable installs).
+
+
+v47.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2194: Editable-installed entry points now load significantly faster on Python versions 3.8+.
+* #1471: Incidentally fixed by #2194 on Python 3.8 or when importlib_metadata is present.
+
+
+v47.1.1
+-------
+
+
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2156: Update mailing list pointer in developer docs
+
+Incorporate changes from v44.1.1:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* #2158: Avoid loading working set during ``Distribution.finalize_options`` prior to invoking ``_install_setup_requires``, broken since v42.0.0.
+
+
+v44.1.1
+-------
+
+
+
+Misc
+^^^^
+* #2158: Avoid loading working set during ``Distribution.finalize_options`` prior to invoking ``_install_setup_requires``, broken since v42.0.0.
+
+
+v47.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2070: In wheel-to-egg conversion, use simple pkg_resources-style namespace declaration for packages that declare namespace_packages.
+
+
+v47.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #2094: Setuptools now actively crashes under Python 2. Python 3.5 or later is required. Users of Python 2 should use ``setuptools<45``.
+
+Changes
+^^^^^^^
+* #1700: Document all supported keywords by migrating the ones from distutils.
+
+
+v46.4.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1753: ``attr:`` now extracts variables through rudimentary examination of the AST,
+ thereby supporting modules with third-party imports. If examining the AST
+ fails to find the variable, ``attr:`` falls back to the old behavior of
+ importing the module. Works on Python 3 only.
+
+
+v46.3.1
+-------
+
+No significant changes.
+
+
+v46.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2089: Package index functionality no longer attempts to remove an md5 fragment from the index URL. This functionality, added for distribute #163 is no longer relevant.
+
+Misc
+^^^^
+* #2041: Preserve file modes during pkg files copying, but clear read only flag for target afterwards.
+* #2105: Filter ``2to3`` deprecation warnings from ``TestDevelop.test_2to3_user_mode``.
+
+
+v46.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #2040: Deprecated the ``bdist_wininst`` command. Binary packages should be built as wheels instead.
+* #2062: Change 'Mac OS X' to 'macOS' in code.
+* #2075: Stop recognizing files ending with ``.dist-info`` as distribution metadata.
+* #2086: Deprecate 'use_2to3' functionality. Packagers are encouraged to use single-source solutions or build tool chains to manage conversions outside of setuptools.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1698: Added documentation for ``build_meta`` (a bare minimum, not completed).
+
+Misc
+^^^^
+* #2082: Filter ``lib2to3`` ``PendingDeprecationWarning`` and ``DeprecationWarning`` in tests,
+ because ``lib2to3`` is `deprecated in Python 3.9 <https://bugs.python.org/issue40360>`_.
+
+
+v46.1.3
+-------
+
+No significant changes.
+
+
+v46.1.2
+-------
+
+
+
+Misc
+^^^^
+* #1458: Added template for reporting Python 2 incompatibilities.
+
+
+v46.1.1
+-------
+
+No significant changes.
+
+
+v46.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #308: Allow version number normalization to be bypassed by wrapping in a 'setuptools.sic()' call.
+* #1424: Prevent keeping files mode for package_data build. It may break a build if user's package data has read only flag.
+* #1431: In ``easy_install.check_site_dir``, ensure the installation directory exists.
+* #1563: In ``pkg_resources`` prefer ``find_spec`` (PEP 451) to ``find_module``.
+
+Incorporate changes from v44.1.0:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* #1704: Set sys.argv[0] in setup script run by build_meta.__legacy__
+* #1959: Fix for Python 4: replace unsafe six.PY3 with six.PY2
+* #1994: Fixed a bug in the "setuptools.finalize_distribution_options" hook that lead to ignoring the order attribute of entry points managed by this hook.
+
+
+v44.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1704: Set sys.argv[0] in setup script run by build_meta.__legacy__
+* #1959: Fix for Python 4: replace unsafe six.PY3 with six.PY2
+* #1994: Fixed a bug in the "setuptools.finalize_distribution_options" hook that lead to ignoring the order attribute of entry points managed by this hook.
+
+
+v46.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #65: Once again as in 3.0, removed the Features feature.
+
+Changes
+^^^^^^^
+* #1890: Fix vendored dependencies so importing ``setuptools.extern.some_module`` gives the same object as ``setuptools._vendor.some_module``. This makes Metadata picklable again.
+* #1899: Test suite now fails on warnings.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #2011: Fix broken link to distutils docs on package_data
+
+Misc
+^^^^
+* #1991: Include pkg_resources test data in sdist, so tests can be executed from it.
+
+
+v45.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1557: Deprecated eggsecutable scripts and updated docs.
+* #1904: Update msvc.py to use CPython 3.8.0 mechanism to find msvc 14+
+
+
+v45.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1905: Fixed defect in _imp, introduced in 41.6.0 when the 'tests' directory is not present.
+* #1941: Improve editable installs with PEP 518 build isolation:
+
+ * The ``--user`` option is now always available. A warning is issued if the user site directory is not available.
+ * The error shown when the install directory is not in ``PYTHONPATH`` has been turned into a warning.
+* #1981: Setuptools now declares its ``tests`` and ``docs`` dependencies in metadata (extras).
+* #1985: Add support for installing scripts in environments where bdist_wininst is missing (i.e. Python 3.9).
+
+Misc
+^^^^
+* #1968: Add flake8-2020 to check for misuse of sys.version or sys.version_info.
+
+
+v45.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1458: Add minimum sunset date and preamble to Python 2 warning.
+* #1704: Set sys.argv[0] in setup script run by build_meta.__legacy__
+* #1974: Add Python 3 Only Trove Classifier and remove universal wheel declaration for more complete transition from Python 2.
+
+
+v45.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1458: Drop support for Python 2. Setuptools now requires Python 3.5 or later. Install setuptools using pip >=9 or pin to Setuptools <45 to maintain 2.7 support.
+
+Changes
+^^^^^^^
+* #1959: Fix for Python 4: replace unsafe six.PY3 with six.PY2
+
+
+v44.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1908: Drop support for Python 3.4.
+
+
+v43.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1634: Include ``pyproject.toml`` in source distribution by default. Projects relying on the previous behavior where ``pyproject.toml`` was excluded by default should stop relying on that behavior or add ``exclude pyproject.toml`` to their MANIFEST.in file.
+
+Changes
+^^^^^^^
+* #1927: Setuptools once again declares 'setuptools' in the ``build-system.requires`` and adds PEP 517 build support by declaring itself as the ``build-backend``. It additionally specifies ``build-system.backend-path`` to rely on itself for those builders that support it.
+
+
+v42.0.2
+-------
+
+Changes
+^^^^^^^
+
+* #1921: Fix support for easy_install's ``find-links`` option in ``setup.cfg``.
+* #1922: Build dependencies (setup_requires and tests_require) now install transitive dependencies indicated by extras.
+
+
+v42.0.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1918: Fix regression in handling wheels compatibility tags.
+
+
+v42.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1830, #1909: Mark the easy_install script and setuptools command as deprecated, and use `pip <https://pip.pypa.io/en/stable/>`_ when available to fetch/build wheels for missing ``setup_requires``/``tests_require`` requirements, with the following differences in behavior:
+ * support for ``python_requires``
+ * better support for wheels (proper handling of priority with respect to PEP 425 tags)
+ * PEP 517/518 support
+ * eggs are not supported
+ * no support for the ``allow_hosts`` easy_install option (``index_url``/``find_links`` are still honored)
+ * pip environment variables are honored (and take precedence over easy_install options)
+* #1898: Removed the "upload" and "register" commands in favor of :pypi:`twine`.
+
+Changes
+^^^^^^^
+* #1767: Add support for the ``license_files`` option in ``setup.cfg`` to automatically
+ include multiple license files in a source distribution.
+* #1829: Update handling of wheels compatibility tags:
+ * add support for manylinux2010
+ * fix use of removed 'm' ABI flag in Python 3.8 on Windows
+* #1861: Fix empty namespace package installation from wheel.
+* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like :pypi:`setuptools_scm` to configure options on the distribution at finalization time.
+
+
+v41.6.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #479: Replace usage of deprecated ``imp`` module with local re-implementation in ``setuptools._imp``.
+
+
+v41.5.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1891: Fix code for detecting Visual Studio's version on Windows under Python 2.
+
+
+v41.5.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1811: Improve Visual C++ 14.X support, mainly for Visual Studio 2017 and 2019.
+* #1814: Fix ``pkg_resources.Requirement`` hash/equality implementation: take PEP 508 direct URL into account.
+* #1824: Fix tests when running under ``python3.10``.
+* #1878: Formally deprecated the ``test`` command, with the recommendation that users migrate to ``tox``.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1860: Update documentation to mention the egg format is not supported by pip and dependency links support was dropped starting with pip 19.0.
+* #1862: Drop ez_setup documentation: deprecated for some time (last updated in 2016), and still relying on easy_install (deprecated too).
+* #1868: Drop most documentation references to (deprecated) EasyInstall.
+* #1884: Added a trove classifier to document support for Python 3.8.
+
+Misc
+^^^^
+* #1886: Added Python 3.8 release to the Travis test matrix.
+
+
+v41.4.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1847: In declarative config, now traps errors when invalid ``python_requires`` values are supplied.
+
+
+v41.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1690: When storing extras, rely on OrderedSet to retain order of extras as indicated by the packager, which will also be deterministic on Python 2.7 (with PYTHONHASHSEED unset) and Python 3.6+.
+
+Misc
+^^^^
+* #1858: Fixed failing integration test triggered by 'long_description_content_type' in packaging.
+
+
+v41.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #479: Remove some usage of the deprecated ``imp`` module.
+
+Misc
+^^^^
+* #1565: Changed html_sidebars from string to list of string as per
+ https://www.sphinx-doc.org/en/master/changes.html#id58
+
+
+v41.1.0
+-------
+
+
+
+Misc
+^^^^
+* #1697: Moved most of the constants from setup.py to setup.cfg
+* #1749: Fixed issue with the PEP 517 backend where building a source distribution would fail if any tarball existed in the destination directory.
+* #1750: Fixed an issue with PEP 517 backend where wheel builds would fail if the destination directory did not already exist.
+* #1756: Force metadata-version >= 1.2. when project urls are present.
+* #1769: Improve ``package_data`` check: ensure the dictionary values are lists/tuples of strings.
+* #1788: Changed compatibility fallback logic for ``html.unescape`` to avoid accessing ``HTMLParser.unescape`` when not necessary. ``HTMLParser.unescape`` is deprecated and will be removed in Python 3.9.
+* #1790: Added the file path to the error message when a ``UnicodeDecodeError`` occurs while reading a metadata file.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1776: Use license classifiers rather than the license field.
+
+
+v41.0.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1671: Fixed issue with the PEP 517 backend that prevented building a wheel when the ``dist/`` directory contained existing ``.whl`` files.
+* #1709: In test.paths_on_python_path, avoid adding unnecessary duplicates to the PYTHONPATH.
+* #1741: In package_index, now honor "current directory" during a checkout of git and hg repositories under Windows
+
+
+v41.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1735: When parsing setup.cfg files, setuptools now requires the files to be encoded as UTF-8. Any other encoding will lead to a UnicodeDecodeError. This change removes support for specifying an encoding using a 'coding: ' directive in the header of the file, a feature that was introduces in 40.7. Given the recent release of the aforementioned feature, it is assumed that few if any projects are utilizing the feature to specify an encoding other than UTF-8.
+
+
+v40.9.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1675: Added support for ``setup.cfg``-only projects when using the ``setuptools.build_meta`` backend. Projects that have enabled PEP 517 no longer need to have a ``setup.py`` and can use the purely declarative ``setup.cfg`` configuration file instead.
+* #1720: Added support for ``pkg_resources.parse_requirements``-style requirements in ``setup_requires`` when ``setup.py`` is invoked from the ``setuptools.build_meta`` build backend.
+* #1664: Added the path to the ``PKG-INFO`` or ``METADATA`` file in the exception
+ text when the ``Version:`` header can't be found.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1705: Removed some placeholder documentation sections referring to deprecated features.
+
+
+v40.8.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1652: Added the ``build_meta:__legacy__`` backend, a "compatibility mode" PEP 517 backend that can be used as the default when ``build-backend`` is left unspecified in ``pyproject.toml``.
+* #1635: Resource paths are passed to ``pkg_resources.resource_string`` and similar no longer accept paths that traverse parents, that begin with a leading ``/``. Violations of this expectation raise DeprecationWarnings and will become errors. Additionally, any paths that are absolute on Windows are strictly disallowed and will raise ValueErrors.
+* #1536: ``setuptools`` will now automatically include licenses if ``setup.cfg`` contains a ``license_file`` attribute, unless this file is manually excluded inside ``MANIFEST.in``.
+
+
+v40.7.3
+-------
+
+
+
+Changes
+^^^^^^^
+* #1670: In package_index, revert to using a copy of splituser from Python 3.8. Attempts to use ``urllib.parse.urlparse`` led to problems as reported in #1663 and #1668. This change serves as an alternative to #1499 and fixes #1668.
+
+
+v40.7.2
+-------
+
+
+
+Changes
+^^^^^^^
+* #1666: Restore port in URL handling in package_index.
+
+
+v40.7.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1660: On Python 2, when reading config files, downcast options from text to bytes to satisfy distutils expectations.
+
+
+v40.7.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1551: File inputs for the ``license`` field in ``setup.cfg`` files now explicitly raise an error.
+
+Changes
+^^^^^^^
+* #1180: Add support for non-ASCII in setup.cfg (#1062). Add support for native strings on some parameters (#1136).
+* #1499: ``setuptools.package_index`` no longer relies on the deprecated ``urllib.parse.splituser`` per Python #27485.
+* #1544: Added tests for PackageIndex.download (for git URLs).
+* #1625: In PEP 517 build_meta builder, ensure that sdists are built as gztar per the spec.
+
+
+v40.6.3
+-------
+
+
+
+Changes
+^^^^^^^
+* #1594: PEP 517 backend no longer declares setuptools as a dependency as it can be assumed.
+
+
+v40.6.2
+-------
+
+
+
+Changes
+^^^^^^^
+* #1592: Fix invalid dependency on external six module (instead of vendored version).
+
+
+v40.6.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1590: Fixed regression where packages without ``author`` or ``author_email`` fields generated malformed package metadata.
+
+
+v40.6.0
+-------
+
+
+
+Deprecations
+^^^^^^^^^^^^
+* #1541: Officially deprecated the ``requires`` parameter in ``setup()``.
+
+Changes
+^^^^^^^
+* #1519: In ``pkg_resources.normalize_path``, additional path normalization is now performed to ensure path values to a directory is always the same, preventing false positives when checking scripts have a consistent prefix to set up on Windows.
+* #1545: Changed the warning class of all deprecation warnings; deprecation warning classes are no longer derived from ``DeprecationWarning`` and are thus visible by default.
+* #1554: ``build_meta.build_sdist`` now includes ``setup.py`` in source distributions by default.
+* #1576: Started monkey-patching ``get_metadata_version`` and ``read_pkg_file`` onto ``distutils.DistributionMetadata`` to retain the correct version on the ``PKG-INFO`` file in the (deprecated) ``upload`` command.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1395: Changed Pyrex references to Cython in the documentation.
+* #1456: Documented that the ``rpmbuild`` packages is required for the ``bdist_rpm`` command.
+* #1537: Documented how to use ``setup.cfg`` for ``src/ layouts``
+* #1539: Added minimum version column in ``setup.cfg`` metadata table.
+* #1552: Fixed a minor typo in the python 2/3 compatibility documentation.
+* #1553: Updated installation instructions to point to ``pip install`` instead of ``ez_setup.py``.
+* #1560: Updated ``setuptools`` distribution documentation to remove some outdated information.
+* #1564: Documented ``setup.cfg`` minimum version for version and project_urls.
+
+Misc
+^^^^
+* #1533: Restricted the ``recursive-include setuptools/_vendor`` to contain only .py and .txt files.
+* #1572: Added the ``concurrent.futures`` backport ``futures`` to the Python 2.7 test suite requirements.
+
+
+v40.5.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1335: In ``pkg_resources.normalize_path``, fix issue on Cygwin when cwd contains symlinks.
+* #1502: Deprecated support for downloads from Subversion in package_index/easy_install.
+* #1517: Dropped use of six.u in favor of ``u""`` literals.
+* #1520: Added support for ``data_files`` in ``setup.cfg``.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1525: Fixed rendering of the deprecation warning in easy_install doc.
+
+
+v40.4.3
+-------
+
+
+
+Changes
+^^^^^^^
+* #1480: Bump vendored pyparsing in pkg_resources to 2.2.1.
+
+
+v40.4.2
+-------
+
+
+
+Misc
+^^^^
+* #1497: Updated gitignore in repo.
+
+
+v40.4.1
+-------
+
+
+
+Changes
+^^^^^^^
+* #1480: Bump vendored pyparsing to 2.2.1.
+
+
+v40.4.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1481: Join the sdist ``--dist-dir`` and the ``build_meta`` sdist directory argument to point to the same target (meaning the build frontend no longer needs to clean manually the dist dir to avoid multiple sdist presence, and setuptools no longer needs to handle conflicts between the two).
+
+
+v40.3.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1402: Fixed a bug with namespace packages under Python 3.6 when one package in
+ current directory hides another which is installed.
+* #1427: Set timestamp of ``.egg-info`` directory whenever ``egg_info`` command is run.
+* #1474: ``build_meta.get_requires_for_build_sdist`` now does not include the ``wheel`` package anymore.
+* #1486: Suppress warnings in pkg_resources.handle_ns.
+
+Misc
+^^^^
+* #1479: Remove internal use of six.binary_type.
+
+
+v40.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1466: Fix handling of Unicode arguments in PEP 517 backend
+
+
+v40.1.1
+--------
+
+
+
+Changes
+^^^^^^^
+* #1465: Fix regression with ``egg_info`` command when tagging is used.
+
+
+v40.1.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1410: Deprecated ``upload`` and ``register`` commands.
+* #1312: Introduced find_namespace_packages() to find PEP 420 namespace packages.
+* #1420: Added find_namespace: directive to config parser.
+* #1418: Solved race in when creating egg cache directories.
+* #1450: Upgraded vendored PyParsing from 2.1.10 to 2.2.0.
+* #1451: Upgraded vendored appdirs from 1.4.0 to 1.4.3.
+* #1388: Fixed "Microsoft Visual C++ Build Tools" link in exception when Visual C++ not found.
+* #1389: Added support for scripts which have unicode content.
+* #1416: Moved several Python version checks over to using ``six.PY2`` and ``six.PY3``.
+
+Misc
+^^^^
+* #1441: Removed spurious executable permissions from files that don't need them.
+
+
+v40.0.0
+-------
+
+
+
+Breaking Changes
+^^^^^^^^^^^^^^^^
+* #1342: Drop support for Python 3.3.
+
+Changes
+^^^^^^^
+* #1366: In package_index, fixed handling of encoded entities in URLs.
+* #1383: In pkg_resources VendorImporter, avoid removing packages imported from the root.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1379: Minor doc fixes after actually using the new release process.
+* #1385: Removed section on non-package data files.
+* #1403: Fix developer's guide.
+
+Misc
+^^^^
+* #1404: Fix PEP 518 configuration: set build requirements in ``pyproject.toml`` to ``["wheel"]``.
+
+
+v39.2.0
+-------
+
+
+
+Changes
+^^^^^^^
+* #1359: Support using "file:" to load a PEP 440-compliant package version from
+ a text file.
+* #1360: Fixed issue with a mismatch between the name of the package and the
+ name of the .dist-info file in wheel files
+* #1364: Add ``__dir__()`` implementation to ``pkg_resources.Distribution()`` that
+ includes the attributes in the ``_provider`` instance variable.
+* #1365: Take the package_dir option into account when loading the version from
+ a module attribute.
+
+Documentation changes
+^^^^^^^^^^^^^^^^^^^^^
+* #1353: Added coverage badge to README.
+* #1356: Made small fixes to the developer guide documentation.
+* #1357: Fixed warnings in documentation builds and started enforcing that the
+ docs build without warnings in tox.
+* #1376: Updated release process docs.
+
+Misc
+^^^^
+* #1343: The ``setuptools`` specific ``long_description_content_type``,
+ ``project_urls`` and ``provides_extras`` fields are now set consistently
+ after any ``distutils`` ``setup_keywords`` calls, allowing them to override
+ values.
+* #1352: Added ``tox`` environment for documentation builds.
+* #1354: Added ``towncrier`` for changelog management.
+* #1355: Add PR template.
+* #1368: Fixed tests which failed without network connectivity.
+* #1369: Added unit tests for PEP 425 compatibility tags support.
+* #1372: Stop testing Python 3.3 in Travis CI, now that the latest version of
+ ``wheel`` no longer installs on it.
+
v39.1.0
-------
@@ -88,7 +2350,7 @@ v38.2.5
v38.2.4
-------
-* #1220: Fix `data_files` handling when installing from wheel.
+* #1220: Fix ``data_files`` handling when installing from wheel.
v38.2.3
-------
@@ -590,7 +2852,7 @@ v30.3.0
* #394 via #862: Added support for `declarative package
config in a setup.cfg file
- <https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files>`_.
+ <https://setuptools.pypa.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files>`_.
v30.2.1
-------
@@ -818,7 +3080,7 @@ v26.1.0
-------
* #763: ``pkg_resources.get_default_cache`` now defers to the
- `appdirs project <https://pypi.org/project/appdirs>`_ to
+ :pypi:`appdirs` project to
resolve the cache directory. Adds a vendored dependency on
appdirs to pkg_resources.
@@ -852,7 +3114,7 @@ v25.4.0
v25.3.0
-------
-* #739 Fix unquoted libpaths by fixing compatibility between `numpy.distutils` and `distutils._msvccompiler` for numpy < 1.11.2 (Fix issue #728, error also fixed in Numpy).
+* #739 Fix unquoted libpaths by fixing compatibility between ``numpy.distutils`` and ``distutils._msvccompiler`` for numpy < 1.11.2 (Fix issue #728, error also fixed in Numpy).
* #731: Bump certifi.
@@ -869,13 +3131,13 @@ v25.2.0
v25.1.6
-------
-* #725: revert `library_dir_option` patch (Error is related to `numpy.distutils` and make errors on non Numpy users).
+* #725: revert ``library_dir_option`` patch (Error is related to ``numpy.distutils`` and make errors on non Numpy users).
v25.1.5
-------
* #720
-* #723: Improve patch for `library_dir_option`.
+* #723: Improve patch for ``library_dir_option``.
v25.1.4
-------
@@ -883,7 +3145,7 @@ v25.1.4
* #717
* #713
* #707: Fix Python 2 compatibility for MSVC by catching errors properly.
-* #715: Fix unquoted libpaths by patching `library_dir_option`.
+* #715: Fix unquoted libpaths by patching ``library_dir_option``.
v25.1.3
-------
@@ -1051,7 +3313,7 @@ v23.0.0
about, please comment in the ticket.
* #604: Removed docs building support. The project
now relies on documentation hosted at
- https://setuptools.readthedocs.io/.
+ https://setuptools.pypa.io/.
v22.0.5
-------
@@ -1201,7 +3463,7 @@ v20.6.0
`semver <https://semver.org>`_ precisely.
The 'v' prefix on version numbers now also allows
version numbers to be referenced in the changelog,
- e.g. http://setuptools.readthedocs.io/en/latest/history.html#v20-6-0.
+ e.g. http://setuptools.pypa.io/en/latest/history.html#v20-6-0.
20.5
----
@@ -1281,7 +3543,7 @@ v20.6.0
* Added support for using passwords from keyring in the upload
command. See `the upload docs
- <https://setuptools.readthedocs.io/en/latest/setuptools.html#upload-upload-source-and-or-egg-distributions-to-pypi>`_
+ <https://setuptools.pypa.io/en/latest/setuptools.html#upload-upload-source-and-or-egg-distributions-to-pypi>`_
for details.
20.0
@@ -1294,8 +3556,8 @@ v20.6.0
19.7
----
-* `Off-project PR <https://github.com/jaraco/setuptools/pull/32>`_:
- For FreeBSD, also honor root certificates from ca_root_nss.
+* Off-project PR: `0dcee79 <https://github.com/pypa/setuptools/commit/0dcee791dfdcfacddaaec79b29f30a347a147413>`_ and `f9bd9b9 <https://github.com/pypa/setuptools/commit/f9bd9b9f5df54ef5a0bf8d16c3a889ab8c640580>`_
+ For FreeBSD, also `honor root certificates from ca_root_nss <https://github.com/pypa/setuptools/commit/3ae46c30225eb46e1f5aada1a19e88b79f04dc72>`_.
19.6.2
------
@@ -1459,9 +3721,9 @@ v20.6.0
now logged when pkg_resources is imported on Python 3.2 or earlier
Python 3 versions.
* `Add support for python_platform_implementation environment marker
- <https://github.com/jaraco/setuptools/pull/28>`_.
+ <https://github.com/pypa/setuptools/commit/94416707fd59a65f4a8f7f70541d6b3fc018b626>`_.
* `Fix dictionary mutation during iteration
- <https://github.com/jaraco/setuptools/pull/29>`_.
+ <https://github.com/pypa/setuptools/commit/57ebfa41e0f96b97e599ecd931b7ae8a143e096e>`_.
18.4
----
@@ -1810,8 +4072,7 @@ process to fail and PyPI uploads no longer accept files for 13.0.
* Issue #313: Removed built-in support for subversion. Projects wishing to
retain support for subversion will need to use a third party library. The
- extant implementation is being ported to `setuptools_svn
- <https://pypi.org/project/setuptools_svn/>`_.
+ extant implementation is being ported to :pypi:`setuptools_svn`.
* Issue #315: Updated setuptools to hide its own loaded modules during
installation of another package. This change will enable setuptools to
upgrade (or downgrade) itself even when its own metadata and implementation
@@ -1821,7 +4082,7 @@ process to fail and PyPI uploads no longer accept files for 13.0.
---
* Prefer vendored packaging library `as recommended
- <https://github.com/jaraco/setuptools/commit/170657b68f4b92e7e1bf82f5e19a831f5744af67#commitcomment-9109448>`_.
+ <https://github.com/pypa/setuptools/commit/170657b68f4b92e7e1bf82f5e19a831f5744af67>`_.
9.0.1
-----
@@ -2035,7 +4296,7 @@ process to fail and PyPI uploads no longer accept files for 13.0.
---
* Added a `Developer Guide
- <https://setuptools.readthedocs.io/en/latest/developer-guide.html>`_ to the official
+ <https://setuptools.pypa.io/en/latest/developer-guide.html>`_ to the official
documentation.
* Some code refactoring and cleanup was done with no intended behavioral
changes.
@@ -2315,8 +4576,7 @@ process to fail and PyPI uploads no longer accept files for 13.0.
* Address security vulnerability in SSL match_hostname check as reported in
Python #17997.
-* Prefer `backports.ssl_match_hostname
- <https://pypi.org/project/backports.ssl_match_hostname/>`_ for backport
+* Prefer :pypi:`backports.ssl_match_hostname` for backport
implementation if present.
* Correct NameError in ``ssl_support`` module (``socket.error``).
@@ -2404,17 +4664,17 @@ process to fail and PyPI uploads no longer accept files for 13.0.
connection.
Backward-Incompatible Changes
-=============================
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This release includes a couple of backward-incompatible changes, but most if
not all users will find 1.0 a drop-in replacement for 0.9.
* Issue #50: Normalized API of environment marker support. Specifically,
removed line number and filename from SyntaxErrors when returned from
- `pkg_resources.invalid_marker`. Any clients depending on the specific
+ ``pkg_resources.invalid_marker``. Any clients depending on the specific
string representation of exceptions returned by that function may need to
be updated to account for this change.
-* Issue #50: SyntaxErrors generated by `pkg_resources.invalid_marker` are
+* Issue #50: SyntaxErrors generated by ``pkg_resources.invalid_marker`` are
normalized for cross-implementation consistency.
* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting``
options to easy_install. These options have been deprecated since 0.6a11.
@@ -2422,13 +4682,13 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9.8
-----
-* Issue #53: Fix NameErrors in `_vcs_split_rev_from_url`.
+* Issue #53: Fix NameErrors in ``_vcs_split_rev_from_url``.
0.9.7
-----
* Issue #49: Correct AttributeError on PyPy where a hashlib.HASH object does
- not have a `.name` attribute.
+ not have a ``.name`` attribute.
* Issue #34: Documentation now refers to bootstrap script in code repository
referenced by bookmark.
* Add underscore-separated keys to environment markers (markerlib).
@@ -2436,7 +4696,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9.6
-----
-* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a `.name`
+* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a ``.name``
attribute.
0.9.5
@@ -2470,7 +4730,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.9
---
-* `package_index` now validates hashes other than MD5 in download links.
+* ``package_index`` now validates hashes other than MD5 in download links.
0.8
---
@@ -2517,7 +4777,7 @@ not all users will find 1.0 a drop-in replacement for 0.9.
0.7.2
-----
-* Issue #14: Use markerlib when the `parser` module is not available.
+* Issue #14: Use markerlib when the ``parser`` module is not available.
* Issue #10: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI.
0.7.1
@@ -2601,7 +4861,7 @@ Added several features that were slated for setuptools 0.6c12:
------
* Distribute #27: Use public api for loading resources from zip files rather than
- the private method `_zip_directory_cache`.
+ the private method ``_zip_directory_cache``.
* Added a new function ``easy_install.get_win_launcher`` which may be used by
third-party libraries such as buildout to get a suitable script launcher.
@@ -2667,7 +4927,7 @@ how it parses version numbers.
* Fix 2 errors with Jython 2.5.
* Fix 1 failure with Jython 2.5 and 2.7.
* Disable workaround for Jython scripts on Linux systems.
-* Distribute #336: `setup.py` no longer masks failure exit code when tests fail.
+* Distribute #336: ``setup.py`` no longer masks failure exit code when tests fail.
* Fix issue in pkg_resources where try/except around a platform-dependent
import would trigger hook load failures on Mercurial. See pull request 32
for details.
@@ -2678,7 +4938,7 @@ how it parses version numbers.
* Fix test suite with Python 2.6.
* Fix some DeprecationWarnings and ResourceWarnings.
-* Distribute #335: Backed out `setup_requires` superceding installed requirements
+* Distribute #335: Backed out ``setup_requires`` superseding installed requirements
until regression can be addressed.
0.6.31
@@ -2688,16 +4948,15 @@ how it parses version numbers.
* Distribute #329: Properly close files created by tests for compatibility with
Jython.
* Work around Jython #1980 and Jython #1981.
-* Distribute #334: Provide workaround for packages that reference `sys.__stdout__`
- such as numpy does. This change should address
- `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
+* Distribute #334: Provide workaround for packages that reference ``sys.__stdout__``
+ such as numpy does. This change should address pypa/virtualenv#359 as long
as the system encoding is UTF-8 or the IO encoding is specified in the
environment, i.e.::
PYTHONIOENCODING=utf8 pip install numpy
* Fix for encoding issue when installing from Windows executable on Python 3.
-* Distribute #323: Allow `setup_requires` requirements to supercede installed
+* Distribute #323: Allow ``setup_requires`` requirements to supersede installed
requirements. Added some new keyword arguments to existing pkg_resources
methods. Also had to updated how __path__ is handled for namespace packages
to ensure that when a new egg distribution containing a namespace package is
@@ -2716,17 +4975,17 @@ how it parses version numbers.
* BB Pull Request #14: Honor file permissions in zip files.
* Distribute #327: Merged pull request #24 to fix a dependency problem with pip.
-* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
+* Merged pull request #23 to fix pypa/virtualenv#301.
+* If Sphinx is installed, the ``upload_docs`` command now runs ``build_sphinx``
to produce uploadable documentation.
-* Distribute #326: `upload_docs` provided mangled auth credentials under Python 3.
-* Distribute #320: Fix check for "createable" in distribute_setup.py.
+* Distribute #326: ``upload_docs`` provided mangled auth credentials under Python 3.
+* Distribute #320: Fix check for "creatable" in distribute_setup.py.
* Distribute #305: Remove a warning that was triggered during normal operations.
* Distribute #311: Print metadata in UTF-8 independent of platform.
* Distribute #303: Read manifest file with UTF-8 encoding under Python 3.
* Distribute #301: Allow to run tests of namespace packages when using 2to3.
* Distribute #304: Prevent import loop in site.py under Python 3.3.
-* Distribute #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
+* Distribute #283: Re-enable scanning of ``*.pyc`` / ``*.pyo`` files on Python 3.3.
* Distribute #299: The develop command didn't work on Python 3, when using 2to3,
as the egg link would go to the Python 2 source. Linking to the 2to3'd code
in build/lib makes it work, although you will have to rebuild the module
@@ -2736,10 +4995,10 @@ how it parses version numbers.
* Distribute #313: Support for sdist subcommands (Python 2.7)
* Distribute #314: test_local_index() would fail an OS X.
* Distribute #310: Non-ascii characters in a namespace __init__.py causes errors.
-* Distribute #218: Improved documentation on behavior of `package_data` and
- `include_package_data`. Files indicated by `package_data` are now included
+* Distribute #218: Improved documentation on behavior of ``package_data`` and
+ ``include_package_data``. Files indicated by ``package_data`` are now included
in the manifest.
-* `distribute_setup.py` now allows a `--download-base` argument for retrieving
+* ``distribute_setup.py`` now allows a ``--download-base`` argument for retrieving
distribute from a specified location.
0.6.28
@@ -2748,7 +5007,7 @@ how it parses version numbers.
* Distribute #294: setup.py can now be invoked from any directory.
* Scripts are now installed honoring the umask.
* Added support for .dist-info directories.
-* Distribute #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
+* Distribute #283: Fix and disable scanning of ``*.pyc`` / ``*.pyo`` files on
Python 3.3.
0.6.27
@@ -2920,7 +5179,7 @@ how it parses version numbers.
(platform.mac_ver() fails)
* Distribute #103: test_get_script_header_jython_workaround not run
anymore under py3 with C or POSIX local. Contributed by Arfrever.
-* Distribute #104: remvoved the assertion when the installation fails,
+* Distribute #104: removed the assertion when the installation fails,
with a nicer message for the end user.
* Distribute #100: making sure there's no SandboxViolation when
the setup script patches setuptools.
@@ -2982,7 +5241,7 @@ how it parses version numbers.
0.6.4
-----
-* Added the generation of `distribute_setup_3k.py` during the release.
+* Added the generation of ``distribute_setup_3k.py`` during the release.
This closes Distribute #52.
* Added an upload_docs command to easily upload project documentation to
@@ -2994,12 +5253,12 @@ how it parses version numbers.
-----
setuptools
-==========
+^^^^^^^^^^
* Fixed a bunch of calls to file() that caused crashes on Python 3.
bootstrapping
-=============
+^^^^^^^^^^^^^
* Fixed a bug in sorting that caused bootstrap to fail on Python 3.
@@ -3007,7 +5266,7 @@ bootstrapping
-----
setuptools
-==========
+^^^^^^^^^^
* Added Python 3 support; see docs/python3.txt.
This closes Old Setuptools #39.
@@ -3025,7 +5284,7 @@ setuptools
This closes Old Setuptools #41.
bootstrapping
-=============
+^^^^^^^^^^^^^
* Fixed bootstrap not working on Windows. This closes issue Distribute #49.
@@ -3038,7 +5297,7 @@ bootstrapping
-----
setuptools
-==========
+^^^^^^^^^^
* package_index.urlopen now catches BadStatusLine and malformed url errors.
This closes Distribute #16 and Distribute #18.
@@ -3055,9 +5314,9 @@ setuptools
bootstrapping
-=============
+^^^^^^^^^^^^^
-* The boostrap process leave setuptools alone if detected in the system
+* The bootstrap process leave setuptools alone if detected in the system
and --root or --prefix is provided, but is not in the same location.
This closes Distribute #10.
@@ -3065,7 +5324,7 @@ bootstrapping
---
setuptools
-==========
+^^^^^^^^^^
* Packages required at build time where not fully present at install time.
This closes Distribute #12.
@@ -3082,7 +5341,7 @@ setuptools
* Added compatibility with Subversion 1.6. This references Distribute #1.
pkg_resources
-=============
+^^^^^^^^^^^^^
* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
instead. Based on a patch from ronaldoussoren. This closes issue #5.
@@ -3099,7 +5358,7 @@ pkg_resources
* Immediately close all file handles. This closes Distribute #3.
easy_install
-============
+^^^^^^^^^^^^
* Immediately close all file handles. This closes Distribute #3.
@@ -3143,6 +5402,37 @@ easy_install
gracefully under Google App Engine (with an ``ImportError`` loading the
C-based module, instead of getting a ``NameError``).
+ * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
+ is flattened out in the resulting egg. (There was a case-sensitivity
+ problem that affected some distributions, notably ``pywin32``.)
+
+ * Prevent ``--help-commands`` and other junk from showing under Python 2.5
+ when running ``easy_install --help``.
+
+ * Fixed GUI scripts sometimes not executing on Windows
+
+ * Fixed not picking up dependency links from recursive dependencies.
+
+ * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
+
+ * Changes for Jython compatibility
+
+ * Improved error message when a requirement is also a directory name, but the
+ specified directory is not a source package.
+
+ * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
+
+ * Fixed HTTP SVN detection failing when the page title included a project
+ name (e.g. on SourceForge-hosted SVN)
+
+ * Fix Jython script installation to handle ``#!`` lines better when
+ ``sys.executable`` is a script.
+
+ * Removed use of deprecated ``md5`` module if ``hashlib`` is available
+
+ * Keep site directories (e.g. ``site-packages``) from being included in
+ ``.pth`` files.
+
0.6c7
-----
@@ -3153,6 +5443,11 @@ easy_install
``--root`` or ``--single-version-externally-managed``, due to the
parent package not having the child package as an attribute.
+ * ``ftp:`` download URLs now work correctly.
+
+ * The default ``--index-url`` is now ``https://pypi.python.org/simple``, to use
+ the Python Package Index's new simpler (and faster!) REST API.
+
0.6c6
-----
@@ -3176,6 +5471,18 @@ easy_install
* Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in
their names as packages.
+ * EasyInstall no longer aborts the installation process if a URL it wants to
+ retrieve can't be downloaded, unless the URL is an actual package download.
+ Instead, it issues a warning and tries to keep going.
+
+ * Fixed distutils-style scripts originally built on Windows having their line
+ endings doubled when installed on any platform.
+
+ * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
+ installed using ``setup.py develop``.
+
+ * Fixed not HTML-decoding URLs scraped from web pages
+
0.6c5
-----
@@ -3185,6 +5492,9 @@ easy_install
* Fix uploaded ``bdist_wininst`` packages being described as suitable for
"any" version by Python 2.5, even if a ``--target-version`` was specified.
+ * Fixed ``.dll`` files on Cygwin not having executable permissions when an egg
+ is installed unzipped.
+
0.6c4
-----
@@ -3214,11 +5524,34 @@ easy_install
listed a namespace package ``foo.bar`` without explicitly listing ``foo``
as a namespace package.
+ * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
+ URLs. If a password-protected page contains links to the same host (and
+ protocol), those links will inherit the credentials used to access the
+ original page.
+
+ * Removed all special support for Sourceforge mirrors, as Sourceforge's
+ mirror system now works well for non-browser downloads.
+
+ * Fixed not recognizing ``win32.exe`` installers that included a custom
+ bitmap.
+
+ * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
+ are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
+ is done by ``os.urandom()`` on some platforms).
+
+ * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
+ has a space in it (e.g., the user installed Python to a ``Program Files``
+ directory).
+
0.6c3
-----
* Fixed breakages caused by Subversion 1.4's new "working copy" format
+ * You can once again use "python -m easy_install" with Python 2.4 and above.
+
+ * Python 2.5 compatibility fixes added.
+
0.6c2
-----
@@ -3229,6 +5562,19 @@ easy_install
* Running ``setup.py develop`` on a setuptools-using project will now install
setuptools if needed, instead of only downloading the egg.
+ * Windows script wrappers now support quoted arguments and arguments
+ containing spaces. (Patch contributed by Jim Fulton.)
+
+ * The ``ez_setup.py`` script now actually works when you put a setuptools
+ ``.egg`` alongside it for bootstrapping an offline machine.
+
+ * A writable installation directory on ``sys.path`` is no longer required to
+ download and extract a source distribution using ``--editable``.
+
+ * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
+ contains non-ASCII characters, to prevent deprecation warnings about an
+ unspecified encoding when the script is run.
+
0.6c1
-----
@@ -3252,6 +5598,9 @@ easy_install
the version was overridden on the command line that built the source
distribution.)
+ * EasyInstall now includes setuptools version information in the
+ ``User-Agent`` string sent to websites it visits.
+
0.6b4
-----
@@ -3264,6 +5613,27 @@ easy_install
* Fixed redundant warnings about missing ``README`` file(s); it should now
appear only if you are actually a source distribution.
+ * Fix creating Python wrappers for non-Python scripts
+
+ * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
+ "Home page" or "Download URL" slots on PyPI.
+
+ * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
+ or directory is deleted/overwritten.
+
+ * Fix not recognizing HTML 404 pages from package indexes.
+
+ * Allow ``file://`` URLs to be used as a package index. URLs that refer to
+ directories will use an internally-generated directory listing if there is
+ no ``index.html`` file in the directory.
+
+ * Allow external links in a package index to be specified using
+ ``rel="homepage"`` or ``rel="download"``, without needing the old
+ PyPI-specific visible markup.
+
+ * Suppressed warning message about possibly-misspelled project name, if an egg
+ or link for that project name has already been seen.
+
0.6b3
-----
@@ -3274,6 +5644,28 @@ easy_install
``include_package_data`` and ``package_data`` are used to refer to the same
files.
+ * Fix local ``--find-links`` eggs not being copied except with
+ ``--always-copy``.
+
+ * Fix sometimes not detecting local packages installed outside of "site"
+ directories.
+
+ * Fix mysterious errors during initial ``setuptools`` install, caused by
+ ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
+ after deleting the egg from which it's running.
+
+0.6b2
+-----
+
+ * Don't install or update a ``site.py`` patch when installing to a
+ ``PYTHONPATH`` directory with ``--multi-version``, unless an
+ ``easy-install.pth`` file is already in use there.
+
+ * Construct ``.pth`` file paths in such a way that installing an egg whose
+ name begins with ``import`` doesn't cause a syntax error.
+
+ * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
+
0.6b1
-----
@@ -3281,6 +5673,21 @@ easy_install
the name of a ``.py`` loader/wrapper. (Python's import machinery ignores
this suffix when searching for an extension module.)
+ * Better ambiguity management: accept ``#egg`` name/version even if processing
+ what appears to be a correctly-named distutils file, and ignore ``.egg``
+ files with no ``-``, since valid Python ``.egg`` files always have a version
+ number (but Scheme eggs often don't).
+
+ * Support ``file://`` links to directories in ``--find-links``, so that
+ easy_install can build packages from local source checkouts.
+
+ * Added automatic retry for Sourceforge mirrors. The new download process is
+ to first just try dl.sourceforge.net, then randomly select mirror IPs and
+ remove ones that fail, until something works. The removed IPs stay removed
+ for the remainder of the run.
+
+ * Ignore bdist_dumb distributions when looking at download URLs.
+
0.6a11
------
@@ -3314,11 +5721,75 @@ easy_install
it. Previously, the file could be left open and the actual error would be
masked by problems trying to remove the open file on Windows systems.
+ * Process ``dependency_links.txt`` if found in a distribution, by adding the
+ URLs to the list for scanning.
+
+ * Use relative paths in ``.pth`` files when eggs are being installed to the
+ same directory as the ``.pth`` file. This maximizes portability of the
+ target directory when building applications that contain eggs.
+
+ * Added ``easy_install-N.N`` script(s) for convenience when using multiple
+ Python versions.
+
+ * Added automatic handling of installation conflicts. Eggs are now shifted to
+ the front of sys.path, in an order consistent with where they came from,
+ making EasyInstall seamlessly co-operate with system package managers.
+
+ The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
+ are now no longer necessary, and will generate warnings at the end of a
+ run if you use them.
+
+ * Don't recursively traverse subdirectories given to ``--find-links``.
+
0.6a10
------
* Fixed the ``develop`` command ignoring ``--find-links``.
+ * Added exhaustive testing of the install directory, including a spawn test
+ for ``.pth`` file support, and directory writability/existence checks. This
+ should virtually eliminate the need to set or configure ``--site-dirs``.
+
+ * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
+ RTFM-ing. :)
+
+ * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
+ manually to make it work. ``--multi-version`` is no longer a silent
+ default; you must explicitly use it if installing to a non-PYTHONPATH,
+ non-"site" directory.
+
+ * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
+ ``--install-dir``, and ``--script-dir`` options, whether on the command line
+ or in configuration files.
+
+ * Improved SourceForge mirror processing to work faster and be less affected
+ by transient HTML changes made by SourceForge.
+
+ * PyPI searches now use the exact spelling of requirements specified on the
+ command line or in a project's ``install_requires``. Previously, a
+ normalized form of the name was used, which could lead to unnecessary
+ full-index searches when a project's name had an underscore (``_``) in it.
+
+ * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
+ as long as you include an ``#egg=name-version`` suffix on the URL, or if
+ the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
+ This allows third parties to "package" trivial Python modules just by
+ linking to them (e.g. from within their own PyPI page or download links
+ page).
+
+ * The ``--always-copy`` option now skips "system" and "development" eggs since
+ they can't be reliably copied. Note that this may cause EasyInstall to
+ choose an older version of a package than what you expected, or it may cause
+ downloading and installation of a fresh version of what's already installed.
+
+ * The ``--find-links`` option previously scanned all supplied URLs and
+ directories as early as possible, but now only directories and direct
+ archive links are scanned immediately. URLs are not retrieved unless a
+ package search was already going to go online due to a package not being
+ available locally, or due to the use of the ``--update`` or ``-U`` option.
+
+ * Fixed the annoying ``--help-commands`` wart.
+
0.6a9
-----
@@ -3369,6 +5840,22 @@ easy_install
and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
back into an ``.egg`` file or directory and install it as such.
+ * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
+ "baskets") when they weren't explicitly listed in the ``.pth`` file.
+
+ * If more than one URL appears to describe the exact same distribution, prefer
+ the shortest one. This helps to avoid "table of contents" CGI URLs like the
+ ones on effbot.org.
+
+ * Quote arguments to python.exe (including python's path) to avoid problems
+ when Python (or a script) is installed in a directory whose name contains
+ spaces on Windows.
+
+ * Support full roundtrip translation of eggs to and from ``bdist_wininst``
+ format. Running ``bdist_wininst`` on a setuptools-based package wraps the
+ egg in an .exe that will safely install it as an egg (i.e., with metadata
+ and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
+ back into an ``.egg`` file or directory and install it as such.
0.6a8
-----
@@ -3397,6 +5884,45 @@ easy_install
metadata cache to pretend that the egg has valid version information, until
it has a chance to make it actually be so (via the ``egg_info`` command).
+ * Update for changed SourceForge mirror format
+
+ * Fixed not installing dependencies for some packages fetched via Subversion
+
+ * Fixed dependency installation with ``--always-copy`` not using the same
+ dependency resolution procedure as other operations.
+
+ * Fixed not fully removing temporary directories on Windows, if a Subversion
+ checkout left read-only files behind
+
+ * Fixed some problems building extensions when Pyrex was installed, especially
+ with Python 2.4 and/or packages using SWIG.
+
+0.6a7
+-----
+
+ * Fixed not being able to install Windows script wrappers using Python 2.3
+
+0.6a6
+-----
+
+ * Added support for "traditional" PYTHONPATH-based non-root installation, and
+ also the convenient ``virtual-python.py`` script, based on a contribution
+ by Ian Bicking. The setuptools egg now contains a hacked ``site`` module
+ that makes the PYTHONPATH-based approach work with .pth files, so that you
+ can get the full EasyInstall feature set on such installations.
+
+ * Added ``--no-deps`` and ``--allow-hosts`` options.
+
+ * Improved Windows ``.exe`` script wrappers so that the script can have the
+ same name as a module without confusing Python.
+
+ * Changed dependency processing so that it's breadth-first, allowing a
+ depender's preferences to override those of a dependee, to prevent conflicts
+ when a lower version is acceptable to the dependee, but not the depender.
+ Also, ensure that currently installed/selected packages aren't given
+ precedence over ones desired by a package being installed, which could
+ cause conflict errors.
+
0.6a5
-----
@@ -3409,6 +5935,17 @@ easy_install
on Windows and other platforms. (The special handling is only for Windows;
other platforms are treated the same as for ``console_scripts``.)
+ * Improved error message when trying to use old ways of running
+ ``easy_install``. Removed the ability to run via ``python -m`` or by
+ running ``easy_install.py``; ``easy_install`` is the command to run on all
+ supported platforms.
+
+ * Improved wrapper script generation and runtime initialization so that a
+ VersionConflict doesn't occur if you later install a competing version of a
+ needed package as the default version of that package.
+
+ * Fixed a problem parsing version numbers in ``#egg=`` links.
+
0.6a2
-----
@@ -3417,6 +5954,15 @@ easy_install
scripts get an ``.exe`` wrapper so you can just type their name. On other
platforms, the scripts are written without a file extension.
+ * EasyInstall can now install "console_scripts" defined by packages that use
+ ``setuptools`` and define appropriate entry points. On Windows, console
+ scripts get an ``.exe`` wrapper so you can just type their name. On other
+ platforms, the scripts are installed without a file extension.
+
+ * Using ``python -m easy_install`` or running ``easy_install.py`` is now
+ DEPRECATED, since an ``easy_install`` wrapper is now available on all
+ platforms.
+
0.6a1
-----
@@ -3455,13 +6001,62 @@ easy_install
* ``setuptools`` now finds its commands, ``setup()`` argument validators, and
metadata writers using entry points, so that they can be extended by
third-party packages. See `Creating distutils Extensions
- <https://setuptools.readthedocs.io/en/latest/setuptools.html#creating-distutils-extensions>`_
+ <https://setuptools.pypa.io/en/latest/setuptools.html#creating-distutils-extensions>`_
for more details.
* The vestigial ``depends`` command has been removed. It was never finished
or documented, and never would have worked without EasyInstall - which it
pre-dated and was never compatible with.
+ * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
+ that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
+
+ * EasyInstall now handles symlinks in target directories by removing the link,
+ rather than attempting to overwrite the link's destination. This makes it
+ easier to set up an alternate Python "home" directory (as described in
+ the Non-Root Installation section of the docs).
+
+ * Added support for handling MacOS platform information in ``.egg`` filenames,
+ based on a contribution by Kevin Dangoor. You may wish to delete and
+ reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
+ because the format for this platform information has changed so that minor
+ OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
+ previous OS version to become obsolete.
+
+ * easy_install's dependency processing algorithms have changed. When using
+ ``--always-copy``, it now ensures that dependencies are copied too. When
+ not using ``--always-copy``, it tries to use a single resolution loop,
+ rather than recursing.
+
+ * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
+ extensions.
+
+ * Added ``--site-dirs`` option to allow adding custom "site" directories.
+ Made ``easy-install.pth`` work in platform-specific alternate site
+ directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
+
+ * If you manually delete the current version of a package, the next run of
+ EasyInstall against the target directory will now remove the stray entry
+ from the ``easy-install.pth`` file.
+
+ * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
+ as pointing to the named project's source checkout. Such URLs have a lower
+ match precedence than any other kind of distribution, so they'll only be
+ used if they have a higher version number than any other available
+ distribution, or if you use the ``--editable`` option. The ``#egg``
+ fragment can contain a version if it's formatted as ``#egg=proj-ver``,
+ where ``proj`` is the project name, and ``ver`` is the version number. You
+ *must* use the format for these values that the ``bdist_egg`` command uses;
+ i.e., all non-alphanumeric runs must be condensed to single underscore
+ characters.
+
+ * Added the ``--editable`` option; see Editing and Viewing Source Packages
+ in the docs. Also, slightly changed the behavior of the
+ ``--build-directory`` option.
+
+ * Fixed the setup script sandbox facility not recognizing certain paths as
+ valid on case-insensitive platforms.
+
0.5a12
------
@@ -3469,12 +6064,28 @@ easy_install
``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't
handle ``-m`` on zipped modules.
+ * Fix ``python -m easy_install`` not working due to setuptools being installed
+ as a zipfile. Update safety scanner to check for modules that might be used
+ as ``python -m`` scripts.
+
+ * Misc. fixes for win32.exe support, including changes to support Python 2.4's
+ changed ``bdist_wininst`` format.
+
0.5a11
------
* Fix breakage of the "develop" command that was caused by the addition of
``--always-unzip`` to the ``easy_install`` command.
+0.5a10
+------
+
+ * Put the ``easy_install`` module back in as a module, as it's needed for
+ ``python -m`` to run it!
+
+ * Allow ``--find-links/-f`` to accept local directories or filenames as well
+ as URLs.
+
0.5a9
-----
@@ -3509,6 +6120,31 @@ easy_install
* Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``.
+ * EasyInstall now automatically detects when an "unmanaged" package or
+ module is going to be on ``sys.path`` ahead of a package you're installing,
+ thereby preventing the newer version from being imported. By default, it
+ will abort installation to alert you of the problem, but there are also
+ new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
+ available to change the default behavior. (Note: this new feature doesn't
+ take effect for egg files that were built with older ``setuptools``
+ versions, because they lack the new metadata file required to implement it.)
+
+ * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
+ base error type for errors that should just issue a message to stderr and
+ exit the program without a traceback.
+
+ * EasyInstall can now be given a path to a directory containing a setup
+ script, and it will attempt to build and install the package there.
+
+ * EasyInstall now performs a safety analysis on module contents to determine
+ whether a package is likely to run in zipped form, and displays
+ information about what modules may be doing introspection that would break
+ when running as a zipfile.
+
+ * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
+ would ordinarily be considered safe to unzip, and changed the meaning of
+ ``--zip-ok/-z`` to "always leave everything zipped".
+
0.5a8
-----
@@ -3536,6 +6172,9 @@ easy_install
* Added a "setopt" command that sets a single option in a specified distutils
configuration file.
+ * There is now a separate documentation page for setuptools; revision
+ history that's not specific to EasyInstall has been moved to that page.
+
0.5a7
-----
@@ -3603,6 +6242,39 @@ easy_install
* Setup scripts using setuptools now always install using ``easy_install``
internally, for ease of uninstallation and upgrading.
+ * Added ``--always-copy/-a`` option to always copy needed packages to the
+ installation directory, even if they're already present elsewhere on
+ sys.path. (In previous versions, this was the default behavior, but now
+ you must request it.)
+
+ * Added ``--upgrade/-U`` option to force checking PyPI for latest available
+ version(s) of all packages requested by name and version, even if a matching
+ version is available locally.
+
+ * Added automatic installation of dependencies declared by a distribution
+ being installed. These dependencies must be listed in the distribution's
+ ``EGG-INFO`` directory, so the distribution has to have declared its
+ dependencies by using setuptools. If a package has requirements it didn't
+ declare, you'll still have to deal with them yourself. (E.g., by asking
+ EasyInstall to find and install them.)
+
+ * Added the ``--record`` option to ``easy_install`` for the benefit of tools
+ that run ``setup.py install --record=filename`` on behalf of another
+ packaging system.)
+
+0.5a3
+-----
+
+ * Fixed not setting script permissions to allow execution.
+
+ * Improved sandboxing so that setup scripts that want a temporary directory
+ (e.g. pychecker) can still run in the sandbox.
+
+0.5a2
+-----
+
+ * Fix stupid stupid refactoring-at-the-last-minute typos. :(
+
0.5a1
-----
@@ -3617,6 +6289,29 @@ easy_install
from setuptools import setup
# etc...
+ * Added support for converting ``.win32.exe`` installers to eggs on the fly.
+ EasyInstall will now recognize such files by name and install them.
+
+ * Fixed a problem with picking the "best" version to install (versions were
+ being sorted as strings, rather than as parsed values)
+
+0.4a4
+-----
+
+ * Added support for the distutils "verbose/quiet" and "dry-run" options, as
+ well as the "optimize" flag.
+
+ * Support downloading packages that were uploaded to PyPI (by scanning all
+ links on package pages, not just the homepage/download links).
+
+0.4a3
+-----
+
+ * Add progress messages to the search/download process so that you can tell
+ what URLs it's reading to find download links. (Hopefully, this will help
+ people report out-of-date and broken links to package authors, and to tell
+ when they've asked for a package that doesn't exist.)
+
0.4a2
-----
@@ -3644,6 +6339,44 @@ easy_install
their ``command_consumes_arguments`` attribute to ``True`` in order to
receive an ``args`` option containing the rest of the command line.
+ * Added support for installing scripts
+
+ * Added support for setting options via distutils configuration files, and
+ using distutils' default options as a basis for EasyInstall's defaults.
+
+ * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
+ script installation directory option.
+
+ * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
+ Python includes SSL support.
+
+0.4a1
+-----
+
+ * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
+ and search PyPI for needed packages.
+
+0.3a4
+-----
+
+ * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
+ URL installs, to avoid running the wrong setup.py.
+
+0.3a3
+-----
+
+ * Added ``--build-directory=DIR/-b DIR`` option.
+
+ * Added "installation report" that explains how to use 'require()' when doing
+ a multiversion install or alternate installation directory.
+
+ * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
+
+ * Added "sandboxing" that stops a setup script from running if it attempts to
+ write to the filesystem outside of the build area
+
+ * Added more workarounds for packages with quirky ``install_data`` hacks
+
0.3a2
-----
@@ -3651,6 +6384,9 @@ easy_install
with a subversion revision number, the current date, or an explicit tag
value. Run ``setup.py bdist_egg --help`` to get more information.
+ * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
+ automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
+
* Misc. bug fixes
0.3a1
diff --git a/LICENSE b/LICENSE
index 6e0693b..353924b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,19 +1,19 @@
-Copyright (C) 2016 Jason R Coombs <jaraco@jaraco.com>
+Copyright Jason R. Coombs
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/MANIFEST.in b/MANIFEST.in
index 325bbed..3e8f09d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,9 +1,12 @@
-recursive-include setuptools *.py *.exe *.xml
+recursive-include setuptools *.py *.exe *.xml *.tmpl
recursive-include tests *.py
recursive-include setuptools/tests *.html
-recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html
-recursive-include setuptools/_vendor *
+recursive-include docs *.py *.txt *.rst *.conf *.css *.css_t Makefile indexsidebar.html
+recursive-include setuptools/_vendor *.py *.txt
recursive-include pkg_resources *.py *.txt
+recursive-include pkg_resources/tests/data *
+recursive-include tools *
+recursive-include changelog.d *
include *.py
include *.rst
include MANIFEST.in
diff --git a/METADATA b/METADATA
index cfd0a77..350b785 100644
--- a/METADATA
+++ b/METADATA
@@ -9,9 +9,9 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://files.pythonhosted.org/packages/a6/5b/f399fcffb9128d642387133dc3aa9bb81f127b949cd4d9f63e5602ad1d71/setuptools-39.1.0.zip"
+ value: "https://files.pythonhosted.org/packages/27/01/6d721dbe597bd8234d978270b99717d017cafb6b1115e0eaafd373eaaa68/setuptools-61.1.0.tar.gz"
}
- version: "v39.1.0"
+ version: "v61.1.0"
license_type: NOTICE
- last_upgrade_date { year: 2018 month: 5 day: 23 }
+ last_upgrade_date { year: 2022 month: 3 day: 25 }
}
diff --git a/PKG-INFO b/PKG-INFO
index 51e6da6..542cb26 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,65 +1,103 @@
Metadata-Version: 2.1
Name: setuptools
-Version: 39.1.0
+Version: 61.1.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages
Home-page: https://github.com/pypa/setuptools
Author: Python Packaging Authority
Author-email: distutils-sig@python.org
License: UNKNOWN
-Project-URL: Documentation, https://setuptools.readthedocs.io/
-Description: .. image:: https://img.shields.io/pypi/v/setuptools.svg
- :target: https://pypi.org/project/setuptools
-
- .. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest
- :target: https://setuptools.readthedocs.io
-
- .. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI
- :target: https://travis-ci.org/pypa/setuptools
-
- .. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
- :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master
-
- .. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
-
- See the `Installation Instructions
- <https://packaging.python.org/installing/>`_ in the Python Packaging
- User's Guide for instructions on installing, upgrading, and uninstalling
- Setuptools.
-
- The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.
-
- Questions and comments should be directed to the `distutils-sig
- mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.
- Bug reports and especially tested patches may be
- submitted directly to the `bug tracker
- <https://github.com/pypa/setuptools/issues>`_.
-
-
- Code of Conduct
- ---------------
-
- Everyone interacting in the setuptools project's codebases, issue trackers,
- chat rooms, and mailing lists is expected to follow the
- `PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
-
+Project-URL: Documentation, https://setuptools.pypa.io/
Keywords: CPAN PyPI distutils eggs package management
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: System :: Archiving :: Packaging
Classifier: Topic :: System :: Systems Administration
Classifier: Topic :: Utilities
-Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*
-Description-Content-Type: text/x-rst; charset=UTF-8
+Requires-Python: >=3.7
+Provides-Extra: testing
+Provides-Extra: testing-integration
+Provides-Extra: docs
Provides-Extra: ssl
Provides-Extra: certs
+License-File: LICENSE
+
+.. image:: https://raw.githubusercontent.com/pypa/setuptools/main/docs/images/banner-640x320.svg
+ :align: center
+
+|
+
+.. image:: https://img.shields.io/pypi/v/setuptools.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/setuptools
+
+.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg
+ :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+ :target: https://setuptools.pypa.io
+
+.. image:: https://img.shields.io/badge/skeleton-2022-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+ :target: https://codecov.io/gh/pypa/setuptools
+
+.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+ :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
+
+.. image:: https://img.shields.io/discord/803025117553754132
+ :target: https://discord.com/channels/803025117553754132/815945031150993468
+ :alt: Discord
+
+See the `Installation Instructions
+<https://packaging.python.org/installing/>`_ in the Python Packaging
+User's Guide for instructions on installing, upgrading, and uninstalling
+Setuptools.
+
+Questions and comments should be directed to `GitHub Discussions
+<https://github.com/pypa/setuptools/discussions>`_.
+Bug reports and especially tested patches may be
+submitted directly to the `bug tracker
+<https://github.com/pypa/setuptools/issues>`_.
+
+
+Code of Conduct
+===============
+
+Everyone interacting in the setuptools project's codebases, issue trackers,
+chat rooms, and fora is expected to follow the
+`PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
+
+
+Security Contact
+================
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+
diff --git a/README.rst b/README.rst
index f754d96..e421de5 100755..100644
--- a/README.rst
+++ b/README.rst
@@ -1,34 +1,73 @@
+.. image:: https://raw.githubusercontent.com/pypa/setuptools/main/docs/images/banner-640x320.svg
+ :align: center
+
+|
+
.. image:: https://img.shields.io/pypi/v/setuptools.svg
- :target: https://pypi.org/project/setuptools
+ :target: `PyPI link`_
-.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest
- :target: https://setuptools.readthedocs.io
+.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+ :target: `PyPI link`_
-.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI
- :target: https://travis-ci.org/pypa/setuptools
+.. _PyPI link: https://pypi.org/project/setuptools
-.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
- :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master
+.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg
+ :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
+ :alt: tests
-.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+ :target: https://setuptools.pypa.io
+
+.. image:: https://img.shields.io/badge/skeleton-2022-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+ :target: https://codecov.io/gh/pypa/setuptools
+
+.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+ :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
+
+.. image:: https://img.shields.io/discord/803025117553754132
+ :target: https://discord.com/channels/803025117553754132/815945031150993468
+ :alt: Discord
See the `Installation Instructions
<https://packaging.python.org/installing/>`_ in the Python Packaging
User's Guide for instructions on installing, upgrading, and uninstalling
Setuptools.
-The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.
-
-Questions and comments should be directed to the `distutils-sig
-mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.
+Questions and comments should be directed to `GitHub Discussions
+<https://github.com/pypa/setuptools/discussions>`_.
Bug reports and especially tested patches may be
submitted directly to the `bug tracker
<https://github.com/pypa/setuptools/issues>`_.
Code of Conduct
----------------
+===============
Everyone interacting in the setuptools project's codebases, issue trackers,
-chat rooms, and mailing lists is expected to follow the
-`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
+chat rooms, and fora is expected to follow the
+`PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
+
+
+Security Contact
+================
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
new file mode 100644
index 0000000..605a6ed
--- /dev/null
+++ b/_distutils_hack/__init__.py
@@ -0,0 +1,187 @@
+# don't import any costly modules
+import sys
+import os
+
+
+is_pypy = '__pypy__' in sys.builtin_module_names
+
+
+def warn_distutils_present():
+ if 'distutils' not in sys.modules:
+ return
+ if is_pypy and sys.version_info < (3, 7):
+ # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
+ # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
+ return
+ import warnings
+ warnings.warn(
+ "Distutils was imported before Setuptools, but importing Setuptools "
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
+ "using distutils directly, ensure that setuptools is installed in the "
+ "traditional way (e.g. not an editable install), and/or make sure "
+ "that setuptools is always imported before distutils.")
+
+
+def clear_distutils():
+ if 'distutils' not in sys.modules:
+ return
+ import warnings
+ warnings.warn("Setuptools is replacing distutils.")
+ mods = [
+ name for name in sys.modules
+ if name == "distutils" or name.startswith("distutils.")
+ ]
+ for name in mods:
+ del sys.modules[name]
+
+
+def enabled():
+ """
+ Allow selection of distutils by environment variable.
+ """
+ which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
+ return which == 'local'
+
+
+def ensure_local_distutils():
+ import importlib
+ clear_distutils()
+
+ # With the DistutilsMetaFinder in place,
+ # perform an import to cause distutils to be
+ # loaded from setuptools._distutils. Ref #2906.
+ with shim():
+ importlib.import_module('distutils')
+
+ # check that submodules load as expected
+ core = importlib.import_module('distutils.core')
+ assert '_distutils' in core.__file__, core.__file__
+ assert 'setuptools._distutils.log' not in sys.modules
+
+
+def do_override():
+ """
+ Ensure that the local copy of distutils is preferred over stdlib.
+
+ See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
+ for more motivation.
+ """
+ if enabled():
+ warn_distutils_present()
+ ensure_local_distutils()
+
+
+class _TrivialRe:
+ def __init__(self, *patterns):
+ self._patterns = patterns
+
+ def match(self, string):
+ return all(pat in string for pat in self._patterns)
+
+
+class DistutilsMetaFinder:
+ def find_spec(self, fullname, path, target=None):
+ if path is not None:
+ return
+
+ method_name = 'spec_for_{fullname}'.format(**locals())
+ method = getattr(self, method_name, lambda: None)
+ return method()
+
+ def spec_for_distutils(self):
+ if self.is_cpython():
+ return
+
+ import importlib
+ import importlib.abc
+ import importlib.util
+
+ try:
+ mod = importlib.import_module('setuptools._distutils')
+ except Exception:
+ # There are a couple of cases where setuptools._distutils
+ # may not be present:
+ # - An older Setuptools without a local distutils is
+ # taking precedence. Ref #2957.
+ # - Path manipulation during sitecustomize removes
+ # setuptools from the path but only after the hook
+ # has been loaded. Ref #2980.
+ # In either case, fall back to stdlib behavior.
+ return
+
+ class DistutilsLoader(importlib.abc.Loader):
+
+ def create_module(self, spec):
+ mod.__name__ = 'distutils'
+ return mod
+
+ def exec_module(self, module):
+ pass
+
+ return importlib.util.spec_from_loader(
+ 'distutils', DistutilsLoader(), origin=mod.__file__
+ )
+
+ @staticmethod
+ def is_cpython():
+ """
+ Suppress supplying distutils for CPython (build and tests).
+ Ref #2965 and #3007.
+ """
+ return os.path.isfile('pybuilddir.txt')
+
+ def spec_for_pip(self):
+ """
+ Ensure stdlib distutils when running under pip.
+ See pypa/pip#8761 for rationale.
+ """
+ if self.pip_imported_during_build():
+ return
+ clear_distutils()
+ self.spec_for_distutils = lambda: None
+
+ @classmethod
+ def pip_imported_during_build(cls):
+ """
+ Detect if pip is being imported in a build script. Ref #2355.
+ """
+ import traceback
+ return any(
+ cls.frame_file_is_setup(frame)
+ for frame, line in traceback.walk_stack(None)
+ )
+
+ @staticmethod
+ def frame_file_is_setup(frame):
+ """
+ Return True if the indicated frame suggests a setup.py file.
+ """
+ # some frames may not have __file__ (#2940)
+ return frame.f_globals.get('__file__', '').endswith('setup.py')
+
+
+DISTUTILS_FINDER = DistutilsMetaFinder()
+
+
+def add_shim():
+ DISTUTILS_FINDER in sys.meta_path or insert_shim()
+
+
+class shim:
+ def __enter__(self):
+ insert_shim()
+
+ def __exit__(self, exc, value, tb):
+ remove_shim()
+
+
+def insert_shim():
+ sys.meta_path.insert(0, DISTUTILS_FINDER)
+
+
+def remove_shim():
+ try:
+ sys.meta_path.remove(DISTUTILS_FINDER)
+ except ValueError:
+ pass
diff --git a/_distutils_hack/override.py b/_distutils_hack/override.py
new file mode 100644
index 0000000..2cc433a
--- /dev/null
+++ b/_distutils_hack/override.py
@@ -0,0 +1 @@
+__import__('_distutils_hack').do_override()
diff --git a/bootstrap.py b/bootstrap.py
deleted file mode 100644
index 8c7d7fc..0000000
--- a/bootstrap.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
-If setuptools is not already installed in the environment, it's not possible
-to invoke setuptools' own commands. This routine will bootstrap this local
-environment by creating a minimal egg-info directory and then invoking the
-egg-info command to flesh out the egg-info directory.
-"""
-
-from __future__ import unicode_literals
-
-import os
-import sys
-import textwrap
-import subprocess
-import io
-
-
-minimal_egg_info = textwrap.dedent("""
- [distutils.commands]
- egg_info = setuptools.command.egg_info:egg_info
-
- [distutils.setup_keywords]
- include_package_data = setuptools.dist:assert_bool
- install_requires = setuptools.dist:check_requirements
- extras_require = setuptools.dist:check_extras
- entry_points = setuptools.dist:check_entry_points
-
- [egg_info.writers]
- dependency_links.txt = setuptools.command.egg_info:overwrite_arg
- entry_points.txt = setuptools.command.egg_info:write_entries
- requires.txt = setuptools.command.egg_info:write_requirements
- """)
-
-
-def ensure_egg_info():
- if os.path.exists('setuptools.egg-info'):
- return
- print("adding minimal entry_points")
- build_egg_info()
-
-
-def build_egg_info():
- """
- Build a minimal egg-info, enough to invoke egg_info
- """
-
- os.mkdir('setuptools.egg-info')
- with io.open('setuptools.egg-info/entry_points.txt', 'w') as ep:
- ep.write(minimal_egg_info)
-
-
-def run_egg_info():
- cmd = [sys.executable, 'setup.py', 'egg_info']
- print("Regenerating egg_info")
- subprocess.check_call(cmd)
- print("...and again.")
- subprocess.check_call(cmd)
-
-
-def main():
- ensure_egg_info()
- run_egg_info()
-
-
-__name__ == '__main__' and main()
diff --git a/changelog.d/.gitignore b/changelog.d/.gitignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/changelog.d/.gitignore
diff --git a/changelog.d/README.rst b/changelog.d/README.rst
new file mode 100644
index 0000000..6def76b
--- /dev/null
+++ b/changelog.d/README.rst
@@ -0,0 +1,93 @@
+.. _Adding change notes with your PRs:
+
+Adding change notes with your PRs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It is very important to maintain a log for news of how
+updating to the new version of the software will affect
+end-users. This is why we enforce collection of the change
+fragment files in pull requests as per `Towncrier philosophy`_.
+
+The idea is that when somebody makes a change, they must record
+the bits that would affect end-users only including information
+that would be useful to them. Then, when the maintainers publish
+a new release, they'll automatically use these records to compose
+a change log for the respective version. It is important to
+understand that including unnecessary low-level implementation
+related details generates noise that is not particularly useful
+to the end-users most of the time. And so such details should be
+recorded in the Git history rather than a changelog.
+
+Alright! So how to add a news fragment?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+``setuptools`` uses :pypi:`towncrier` for changelog management.
+To submit a change note about your PR, add a text file into the
+``changelog.d/`` folder. It should contain an
+explanation of what applying this PR will change in the way
+end-users interact with the project. One sentence is usually
+enough but feel free to add as many details as you feel necessary
+for the users to understand what it means.
+
+**Use the past tense** for the text in your fragment because,
+combined with others, it will be a part of the "news digest"
+telling the readers **what changed** in a specific version of
+the library *since the previous version*. You should also use
+reStructuredText syntax for highlighting code (inline or block),
+linking parts of the docs or external sites.
+If you wish to sign your change, feel free to add ``-- by
+:user:`github-username``` at the end (replace ``github-username``
+with your own!).
+
+Finally, name your file following the convention that Towncrier
+understands: it should start with the number of an issue or a
+PR followed by a dot, then add a patch type, like ``change``,
+``doc``, ``misc`` etc., and add ``.rst`` as a suffix. If you
+need to add more than one fragment, you may add an optional
+sequence number (delimited with another period) between the type
+and the suffix.
+
+In general the name will follow ``<pr_number>.<category>.rst`` pattern,
+where the categories are:
+
+- ``change``: Any backwards compatible code change
+- ``breaking``: Any backwards-compatibility breaking change
+- ``doc``: A change to the documentation
+- ``misc``: Changes internal to the repo like CI, test and build changes
+- ``deprecation``: For deprecations of an existing feature or behavior
+
+A pull request may have more than one of these components, for example
+a code change may introduce a new feature that deprecates an old
+feature, in which case two fragments should be added. It is not
+necessary to make a separate documentation fragment for documentation
+changes accompanying the relevant code changes.
+
+Examples for adding changelog entries to your Pull Requests
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+File :file:`changelog.d/2395.doc.1.rst`:
+
+.. code-block:: rst
+
+ Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`
+
+File :file:`changelog.d/1354.misc.rst`:
+
+.. code-block:: rst
+
+ Added ``towncrier`` for changelog management -- by :user:`pganssle`
+
+File :file:`changelog.d/2355.change.rst`:
+
+.. code-block:: rst
+
+ When pip is imported as part of a build, leave :py:mod:`distutils`
+ patched -- by :user:`jaraco`
+
+.. tip::
+
+ See :file:`pyproject.toml` for all available categories
+ (``tool.towncrier.type``).
+
+.. _Towncrier philosophy:
+ https://towncrier.readthedocs.io/en/actual-freaking-docs/#philosophy
diff --git a/conftest.py b/conftest.py
index 3cccfe1..723e5b4 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,3 +1,8 @@
+import sys
+
+import pytest
+
+
pytest_plugins = 'setuptools.tests.fixtures'
@@ -6,3 +11,40 @@ def pytest_addoption(parser):
"--package_name", action="append", default=[],
help="list of package_name to pass to test functions",
)
+ parser.addoption(
+ "--integration", action="store_true", default=False,
+ help="run integration tests (only)"
+ )
+
+
+def pytest_configure(config):
+ config.addinivalue_line("markers", "integration: integration tests")
+ config.addinivalue_line("markers", "uses_network: tests may try to download files")
+
+
+collect_ignore = [
+ 'tests/manual_test.py',
+ 'setuptools/tests/mod_with_constant.py',
+ 'setuptools/_distutils',
+ '_distutils_hack',
+ 'setuptools/extern',
+ 'pkg_resources/extern',
+ 'pkg_resources/tests/data',
+ 'setuptools/_vendor',
+ 'pkg_resources/_vendor',
+]
+
+
+if sys.version_info < (3, 6):
+ collect_ignore.append('docs/conf.py') # uses f-strings
+ collect_ignore.append('pavement.py')
+
+
+@pytest.fixture(autouse=True)
+def _skip_integration(request):
+ running_integration_tests = request.config.getoption("--integration")
+ is_integration_test = request.node.get_closest_marker("integration")
+ if running_integration_tests and not is_integration_test:
+ pytest.skip("running integration tests only")
+ if not running_integration_tests and is_integration_test:
+ pytest.skip("skipping integration tests")
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 30bf10a..0000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,75 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-PAPER =
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html web pickle htmlhelp latex changes linkcheck
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " changes to make an overview over all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
-
-clean:
- -rm -rf build/*
-
-html:
- mkdir -p build/html build/doctrees
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
- @echo
- @echo "Build finished. The HTML pages are in build/html."
-
-pickle:
- mkdir -p build/pickle build/doctrees
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-web: pickle
-
-json:
- mkdir -p build/json build/doctrees
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- mkdir -p build/htmlhelp build/doctrees
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in build/htmlhelp."
-
-latex:
- mkdir -p build/latex build/doctrees
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
- @echo
- @echo "Build finished; the LaTeX files are in build/latex."
- @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
- "run these through (pdf)latex."
-
-changes:
- mkdir -p build/changes build/doctrees
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
- @echo
- @echo "The overview file is in build/changes."
-
-linkcheck:
- mkdir -p build/linkcheck build/doctrees
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in build/linkcheck/output.txt."
diff --git a/docs/_templates/indexsidebar.html b/docs/_templates/indexsidebar.html
deleted file mode 100644
index 80002d0..0000000
--- a/docs/_templates/indexsidebar.html
+++ /dev/null
@@ -1,8 +0,0 @@
-<h3>Download</h3>
-
-<p>Current version: <b>{{ version }}</b></p>
-<p>Get Setuptools from the <a href="https://pypi.org/project/setuptools/"> Python Package Index</a>
-
-<h3>Questions? Suggestions? Contributions?</h3>
-
-<p>Visit the <a href="https://github.com/pypa/setuptools">Setuptools project page</a> </p>
diff --git a/docs/_theme/nature/static/nature.css_t b/docs/_theme/nature/static/nature.css_t
deleted file mode 100644
index 1a65426..0000000
--- a/docs/_theme/nature/static/nature.css_t
+++ /dev/null
@@ -1,237 +0,0 @@
-/**
- * Sphinx stylesheet -- default theme
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
-
-@import url("basic.css");
-
-/* -- page layout ----------------------------------------------------------- */
-
-body {
- font-family: Arial, sans-serif;
- font-size: 100%;
- background-color: #111111;
- color: #555555;
- margin: 0;
- padding: 0;
-}
-
-div.documentwrapper {
- float: left;
- width: 100%;
-}
-
-div.bodywrapper {
- margin: 0 0 0 300px;
-}
-
-hr{
- border: 1px solid #B1B4B6;
-}
-
-div.document {
- background-color: #fafafa;
-}
-
-div.body {
- background-color: #ffffff;
- color: #3E4349;
- padding: 1em 30px 30px 30px;
- font-size: 0.9em;
-}
-
-div.footer {
- color: #555;
- width: 100%;
- padding: 13px 0;
- text-align: center;
- font-size: 75%;
-}
-
-div.footer a {
- color: #444444;
-}
-
-div.related {
- background-color: #6BA81E;
- line-height: 36px;
- color: #ffffff;
- text-shadow: 0px 1px 0 #444444;
- font-size: 1.1em;
-}
-
-div.related a {
- color: #E2F3CC;
-}
-
-div.related .right {
- font-size: 0.9em;
-}
-
-div.sphinxsidebar {
- font-size: 0.9em;
- line-height: 1.5em;
- width: 300px;
-}
-
-div.sphinxsidebarwrapper{
- padding: 20px 0;
-}
-
-div.sphinxsidebar h3,
-div.sphinxsidebar h4 {
- font-family: Arial, sans-serif;
- color: #222222;
- font-size: 1.2em;
- font-weight: bold;
- margin: 0;
- padding: 5px 10px;
- text-shadow: 1px 1px 0 white
-}
-
-div.sphinxsidebar h3 a {
- color: #444444;
-}
-
-div.sphinxsidebar p {
- color: #888888;
- padding: 5px 20px;
- margin: 0.5em 0px;
-}
-
-div.sphinxsidebar p.topless {
-}
-
-div.sphinxsidebar ul {
- margin: 10px 10px 10px 20px;
- padding: 0;
- color: #000000;
-}
-
-div.sphinxsidebar a {
- color: #444444;
-}
-
-div.sphinxsidebar a:hover {
- color: #E32E00;
-}
-
-div.sphinxsidebar input {
- border: 1px solid #cccccc;
- font-family: sans-serif;
- font-size: 1.1em;
- padding: 0.15em 0.3em;
-}
-
-div.sphinxsidebar input[type=text]{
- margin-left: 20px;
-}
-
-/* -- body styles ----------------------------------------------------------- */
-
-a {
- color: #005B81;
- text-decoration: none;
-}
-
-a:hover {
- color: #E32E00;
-}
-
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
- font-family: Arial, sans-serif;
- font-weight: normal;
- color: #212224;
- margin: 30px 0px 10px 0px;
- padding: 5px 0 5px 0px;
- text-shadow: 0px 1px 0 white;
- border-bottom: 1px solid #C8D5E3;
-}
-
-div.body h1 { margin-top: 0; font-size: 200%; }
-div.body h2 { font-size: 150%; }
-div.body h3 { font-size: 120%; }
-div.body h4 { font-size: 110%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
-
-a.headerlink {
- color: #c60f0f;
- font-size: 0.8em;
- padding: 0 4px 0 4px;
- text-decoration: none;
-}
-
-a.headerlink:hover {
- background-color: #c60f0f;
- color: white;
-}
-
-div.body p, div.body dd, div.body li {
- line-height: 1.8em;
-}
-
-div.admonition p.admonition-title + p {
- display: inline;
-}
-
-div.highlight{
- background-color: white;
-}
-
-div.note {
- background-color: #eeeeee;
- border: 1px solid #cccccc;
-}
-
-div.seealso {
- background-color: #ffffcc;
- border: 1px solid #ffff66;
-}
-
-div.topic {
- background-color: #fafafa;
- border-width: 0;
-}
-
-div.warning {
- background-color: #ffe4e4;
- border: 1px solid #ff6666;
-}
-
-p.admonition-title {
- display: inline;
-}
-
-p.admonition-title:after {
- content: ":";
-}
-
-pre {
- padding: 10px;
- background-color: #fafafa;
- color: #222222;
- line-height: 1.5em;
- font-size: 1.1em;
- margin: 1.5em 0 1.5em 0;
- -webkit-box-shadow: 0px 0px 4px #d8d8d8;
- -moz-box-shadow: 0px 0px 4px #d8d8d8;
- box-shadow: 0px 0px 4px #d8d8d8;
-}
-
-tt {
- color: #222222;
- padding: 1px 2px;
- font-size: 1.2em;
- font-family: monospace;
-}
-
-#table-of-contents ul {
- padding-left: 2em;
-}
-
diff --git a/docs/_theme/nature/static/pygments.css b/docs/_theme/nature/static/pygments.css
deleted file mode 100644
index 652b761..0000000
--- a/docs/_theme/nature/static/pygments.css
+++ /dev/null
@@ -1,54 +0,0 @@
-.c { color: #999988; font-style: italic } /* Comment */
-.k { font-weight: bold } /* Keyword */
-.o { font-weight: bold } /* Operator */
-.cm { color: #999988; font-style: italic } /* Comment.Multiline */
-.cp { color: #999999; font-weight: bold } /* Comment.preproc */
-.c1 { color: #999988; font-style: italic } /* Comment.Single */
-.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #aa0000 } /* Generic.Error */
-.gh { color: #999999 } /* Generic.Heading */
-.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
-.go { color: #111 } /* Generic.Output */
-.gp { color: #555555 } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #aaaaaa } /* Generic.Subheading */
-.gt { color: #aa0000 } /* Generic.Traceback */
-.kc { font-weight: bold } /* Keyword.Constant */
-.kd { font-weight: bold } /* Keyword.Declaration */
-.kp { font-weight: bold } /* Keyword.Pseudo */
-.kr { font-weight: bold } /* Keyword.Reserved */
-.kt { color: #445588; font-weight: bold } /* Keyword.Type */
-.m { color: #009999 } /* Literal.Number */
-.s { color: #bb8844 } /* Literal.String */
-.na { color: #008080 } /* Name.Attribute */
-.nb { color: #999999 } /* Name.Builtin */
-.nc { color: #445588; font-weight: bold } /* Name.Class */
-.no { color: #ff99ff } /* Name.Constant */
-.ni { color: #800080 } /* Name.Entity */
-.ne { color: #990000; font-weight: bold } /* Name.Exception */
-.nf { color: #990000; font-weight: bold } /* Name.Function */
-.nn { color: #555555 } /* Name.Namespace */
-.nt { color: #000080 } /* Name.Tag */
-.nv { color: purple } /* Name.Variable */
-.ow { font-weight: bold } /* Operator.Word */
-.mf { color: #009999 } /* Literal.Number.Float */
-.mh { color: #009999 } /* Literal.Number.Hex */
-.mi { color: #009999 } /* Literal.Number.Integer */
-.mo { color: #009999 } /* Literal.Number.Oct */
-.sb { color: #bb8844 } /* Literal.String.Backtick */
-.sc { color: #bb8844 } /* Literal.String.Char */
-.sd { color: #bb8844 } /* Literal.String.Doc */
-.s2 { color: #bb8844 } /* Literal.String.Double */
-.se { color: #bb8844 } /* Literal.String.Escape */
-.sh { color: #bb8844 } /* Literal.String.Heredoc */
-.si { color: #bb8844 } /* Literal.String.Interpol */
-.sx { color: #bb8844 } /* Literal.String.Other */
-.sr { color: #808000 } /* Literal.String.Regex */
-.s1 { color: #bb8844 } /* Literal.String.Single */
-.ss { color: #bb8844 } /* Literal.String.Symbol */
-.bp { color: #999999 } /* Name.Builtin.Pseudo */
-.vc { color: #ff99ff } /* Name.Variable.Class */
-.vg { color: #ff99ff } /* Name.Variable.Global */
-.vi { color: #ff99ff } /* Name.Variable.Instance */
-.il { color: #009999 } /* Literal.Number.Integer.Long */ \ No newline at end of file
diff --git a/docs/_theme/nature/theme.conf b/docs/_theme/nature/theme.conf
deleted file mode 100644
index 1cc4004..0000000
--- a/docs/_theme/nature/theme.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[theme]
-inherit = basic
-stylesheet = nature.css
-pygments_style = tango
diff --git a/docs/artwork.rst b/docs/artwork.rst
new file mode 100644
index 0000000..907e62a
--- /dev/null
+++ b/docs/artwork.rst
@@ -0,0 +1,119 @@
+=======
+Artwork
+=======
+
+.. figure:: images/logo-over-white.svg
+ :align: center
+
+ Setuptools logo, designed in 2021 by `Anderson Bravalheri`_
+
+Elements of Design
+==================
+
+The main colours of the design are a dark pastel azure (``#336790``) and a pale
+orange (``#E5B62F``), referred in this document simply as "blue" and "yellow"
+respectively. The text uses the *Monoid* typeface, an open source webfont that
+was developed by Andreas Larsen and contributors in 2015 and is distributed
+under the MIT or SIL licenses (more information at
+https://github.com/larsenwork/monoid)
+
+
+Usage
+=====
+
+The preferred way of using the setuptools logo is over a white (or light)
+background. Alternatively, the following options can be considered, depending
+on the circumstances:
+
+- *"negative"* design - for dark backgrounds (e.g. website displayed in "dark
+ mode"): the white colour (``#FFFFFF``) of the background and the "blue"
+ (``#336790``) colour of the design can be swapped.
+- *"monochrome"* - when colours are not available (e.g. black and white printed
+ media): a completely black or white version of the logo can also be used.
+- *"banner"* mode: the symbol and text can be used alongside depending on the
+ available space.
+
+The following image illustrate these alternatives:
+
+.. image:: images/logo-demo.svg
+ :align: center
+
+Please refer to the SVG files in the `setuptools repository`_ for the specific
+shapes and proportions between the elements of the design.
+
+
+Working with the Design
+=======================
+
+The `setuptools repository`_ contains a series of vector representations of the
+design under the ``docs/images`` directory. These representations can be
+manipulated via any graphic editor that support SVG files,
+however the free and open-source software Inkscape_ is recommended for maximum
+compatibility.
+
+When selecting the right file to work with, file names including
+``editable-inkscape`` indicate "more editable" elements (e.g. editable text),
+while the others prioritise SVG paths for maximum reproducibility.
+
+Also notice that you might have to `install the correct fonts`_ to be able to
+visualise or edit some of the designs.
+
+
+Inspiration
+===========
+
+This design was inspired by :user:`cajhne`'s `original proposal`_ and the
+ancient symbol of the ouroboros_.
+It features a snake moving in a circular trajectory not only as a reference to
+the Python programming language but also to the `wheel package format`_ as one
+of the distribution formats supported by setuptools.
+The shape of the snake also resembles a cog, which together with the hammer is
+a nod to the two words that compose the name of the project.
+
+
+License
+=======
+
+
+This logo, design variations or a modified version may be used by anyone to
+refer to setuptools, but does not indicate endorsement by the project.
+
+Redistribution, usage and derivative works are permitted under the same license
+used by the setuptools software (MIT):
+
+.. code-block:: text
+
+ Copyright (c) Anderson Bravalheri
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to
+ deal in the Software without restriction, including without limitation the
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ sell copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ IN THE SOFTWARE.
+
+ THE USAGE OF THIS LOGO AND ARTWORK DOES NOT INDICATE ENDORSEMENT BY THE
+ SETUPTOOLS PROJECT.
+
+Whenever possible, please make the image a link to
+https://github.com/pypa/setuptools or https://setuptools.pypa.io.
+
+
+.. _Anderson Bravalheri: https://github.com/abravalheri
+.. _Inkscape: https://inkscape.org
+.. _setuptools repository: https://github.com/pypa/setuptools
+.. _install the correct fonts: https://wiki.inkscape.org/wiki/Installing_fonts
+.. _original proposal: https://github.com/pypa/setuptools/issues/2227#issuecomment-653628344
+.. _wheel package format: https://www.python.org/dev/peps/pep-0427/
+.. _ouroboros: https://en.wikipedia.org/wiki/Ouroboros
diff --git a/docs/build_meta.rst b/docs/build_meta.rst
new file mode 100644
index 0000000..cb37272
--- /dev/null
+++ b/docs/build_meta.rst
@@ -0,0 +1,176 @@
+=======================================
+Build System Support
+=======================================
+
+What is it?
+-------------
+
+Python packaging has come `a long way <https://bernat.tech/posts/pep-517-518/>`_.
+
+The traditional ``setuptools`` way of packaging Python modules
+uses a ``setup()`` function within the ``setup.py`` script. Commands such as
+``python setup.py bdist`` or ``python setup.py bdist_wheel`` generate a
+distribution bundle and ``python setup.py install`` installs the distribution.
+This interface makes it difficult to choose other packaging tools without an
+overhaul. Because ``setup.py`` scripts allowed for arbitrary execution, it
+proved difficult to provide a reliable user experience across environments
+and history.
+
+`PEP 517 <https://www.python.org/dev/peps/pep-0517/>`_ therefore came to
+rescue and specified a new standard to
+package and distribute Python modules. Under PEP 517:
+
+ a ``pyproject.toml`` file is used to specify what program to use
+ for generating distribution.
+
+ Then, two functions provided by the program, ``build_wheel(directory: str)``
+ and ``build_sdist(directory: str)`` create the distribution bundle at the
+ specified ``directory``. The program is free to use its own configuration
+ script or extend the ``.toml`` file.
+
+ Lastly, ``pip install *.whl`` or ``pip install *.tar.gz`` does the actual
+ installation. If ``*.whl`` is available, ``pip`` will go ahead and copy
+ the files into ``site-packages`` directory. If not, ``pip`` will look at
+ ``pyproject.toml`` and decide what program to use to 'build from source'
+ (the default is ``setuptools``)
+
+With this standard, switching between packaging tools becomes a lot easier. ``build_meta``
+implements ``setuptools``' build system support.
+
+How to use it?
+--------------
+
+Starting with a package that you want to distribute. You will need your source
+scripts, a ``pyproject.toml`` file and a ``setup.cfg`` file::
+
+ ~/meowpkg/
+ pyproject.toml
+ setup.cfg
+ meowpkg/__init__.py
+
+The pyproject.toml file is required to specify the build system (i.e. what is
+being used to package your scripts and install from source). To use it with
+setuptools, the content would be::
+
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+The ``setuptools`` package implements the ``build_sdist``
+command and the ``wheel`` package implements the ``build_wheel``
+command; the latter is a dependency of the former
+exposed via :pep:`517` hooks.
+
+Use ``setuptools``' :ref:`declarative config <declarative config>` to
+specify the package information::
+
+ [metadata]
+ name = meowpkg
+ version = 0.0.1
+ description = a package that meows
+
+ [options]
+ packages = find:
+
+.. _building:
+
+Now generate the distribution. To build the package, use
+`PyPA build <https://pypa-build.readthedocs.io/en/latest/>`_::
+
+ $ pip install -q build
+ $ python -m build
+
+And now it's done! The ``.whl`` file and ``.tar.gz`` can then be distributed
+and installed::
+
+ dist/
+ meowpkg-0.0.1.whl
+ meowpkg-0.0.1.tar.gz
+
+ $ pip install dist/meowpkg-0.0.1.whl
+
+or::
+
+ $ pip install dist/meowpkg-0.0.1.tar.gz
+
+Dynamic build dependencies and other ``build_meta`` tweaks
+----------------------------------------------------------
+
+With the changes introduced by :pep:`517` and :pep:`518`, the
+``setup_requires`` configuration field was made deprecated in ``setup.cfg`` and
+``setup.py``, in favour of directly listing build dependencies in the
+``requires`` field of the ``build-system`` table of ``pyproject.toml``.
+This approach has a series of advantages and gives package managers and
+installers the ability to inspect in advance the build requirements and
+perform a series of optimisations.
+
+However some package authors might still need to dynamically inspect the final
+users machine before deciding these requirements. One way of doing that, as
+specified by :pep:`517`, is to "tweak" ``setuptools.build_meta`` by using a
+:pep:`in-tree backend <517#in-tree-build-backends>`.
+
+.. tip:: Before implementing a *in-tree* backend, have a look on
+ :pep:`PEP 508 <508#environment-markers>`. Most of the times, dependencies
+ with **environment markers** are enough to differentiate operating systems
+ and platforms.
+
+If you add the following configuration to your ``pyprojec.toml``:
+
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "backend"
+ backend-path = ["_custom_build"]
+
+
+then you should be able to implement a thin wrapper around ``build_meta`` in
+the ``_custom_build/backend.py`` file, as shown in the following example:
+
+.. code-block:: python
+
+ from setuptools import build_meta as _orig
+
+ prepare_metadata_for_build_wheel = _orig.prepare_metadata_for_build_wheel
+ build_wheel = _orig.build_wheel
+ build_sdist = _orig.build_sdist
+
+
+ def get_requires_for_build_wheel(self, config_settings=None):
+ return _orig.get_requires_for_build_wheel(config_settings) + [...]
+
+
+ def get_requires_for_build_sdist(self, config_settings=None):
+ return _orig.get_requires_for_build_sdist(config_settings) + [...]
+
+
+Note that you can override any of the functions specified in :pep:`PEP 517
+<517#build-backend-interface>`, not only the ones responsible for gathering
+requirements.
+
+.. important:: Make sure your backend script is included in the :doc:`source
+ distribution </userguide/distribution>`, otherwise the build will fail.
+ This can be done by using a SCM_/VCS_ plugin (like :pypi:`setuptools-scm`
+ and :pypi:`setuptools-svn`), or by correctly setting up :ref:`MANIFEST.in
+ <manifest>`.
+
+ If this is the first time you are using a customised backend, please have a
+ look on the generated ``.tar.gz`` and ``.whl``.
+ On POSIX systems that can be done with ``tar -tf dist/*.tar.gz``
+ and ``unzip -l dist/*.whl``.
+ On Windows systems you can rename the ``.whl`` to ``.zip`` to be able to
+ inspect it on the file explorer, and use the same ``tar`` command in a
+ command prompt (alternativelly there are GUI programs like `7-zip`_ that
+ handle ``.tar.gz``).
+
+ In general the backend script should be present in the ``.tar.gz`` (so the
+ project can be build from the source) but not in the ``.whl`` (otherwise the
+ backend script would end up being distributed alongside your package).
+ See ":doc:`/userguide/package_discovery`" for more details about package
+ files.
+
+
+.. _SCM: https://en.wikipedia.org/wiki/Software_configuration_management
+.. _VCS: https://en.wikipedia.org/wiki/Version_control
+.. _7-zip: https://www.7-zip.org
diff --git a/docs/conf.py b/docs/conf.py
index f7d0230..4ebb521 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,87 +1,6 @@
-# -*- coding: utf-8 -*-
-#
-# Setuptools documentation build configuration file, created by
-# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# The contents of this file are pickled, so don't put values in the namespace
-# that aren't pickleable (module imports are okay, they're removed automatically).
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-
-import subprocess
-import sys
-import os
-
-
-# hack to run the bootstrap script so that jaraco.packaging.sphinx
-# can invoke setup.py
-'READTHEDOCS' in os.environ and subprocess.check_call(
- [sys.executable, 'bootstrap.py'],
- cwd=os.path.join(os.path.dirname(__file__), os.path.pardir),
-)
-
-# -- General configuration -----------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['jaraco.packaging.sphinx', 'rst.linker', 'sphinx.ext.autosectionlabel']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.txt'
-
-# The master toctree document.
-master_doc = 'index'
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_trees = []
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'nature'
+extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']
-# Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['_theme']
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-html_sidebars = {'index': 'indexsidebar.html'}
-
-# If false, no module index is generated.
-html_use_modindex = False
-
-# If false, no index is generated.
-html_use_index = False
-
-# -- Options for LaTeX output --------------------------------------------------
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
- ('index', 'Setuptools.tex', 'Setuptools Documentation',
- 'The fellowship of the packaging', 'manual'),
-]
+master_doc = "index"
link_files = {
'../CHANGES.rst': dict(
@@ -91,7 +10,11 @@ link_files = {
),
replace=[
dict(
- pattern=r'(Issue )?#(?P<issue>\d+)',
+ pattern=r'(?<!\w)PR #(?P<pull>\d+)',
+ url='{package_url}/pull/{pull}',
+ ),
+ dict(
+ pattern=r'(?<!\w)(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
@@ -115,7 +38,7 @@ link_files = {
url='http://bugs.jython.org/issue{jython}',
),
dict(
- pattern=r'Python #(?P<python>\d+)',
+ pattern=r'(Python #|bpo-)(?P<python>\d+)',
url='http://bugs.python.org/issue{python}',
),
dict(
@@ -135,7 +58,7 @@ link_files = {
url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst',
),
dict(
- pattern=r'PEP[- ](?P<pep_number>\d+)',
+ pattern=r'(?<![`/\w])PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
dict(
@@ -143,9 +66,150 @@ link_files = {
url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}',
),
dict(
+ pattern=r'pypa/(?P<issue_repo>[\-\.\w]+)#(?P<issue_number>\d+)',
+ url='{GH}/pypa/{issue_repo}/issues/{issue_number}',
+ ),
+ dict(
+ pattern=r'pypa/(?P<commit_repo>[\-\.\w]+)@(?P<commit_number>[\da-f]+)',
+ url='{GH}/pypa/{commit_repo}/commit/{commit_number}',
+ ),
+ dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
],
),
}
+
+# Be strict about any broken references:
+nitpicky = True
+
+# Include Python intersphinx mapping to prevent failures
+# jaraco/skeleton#51
+extensions += ['sphinx.ext.intersphinx']
+intersphinx_mapping = {
+ 'python': ('https://docs.python.org/3', None),
+}
+
+intersphinx_mapping.update({
+ 'pypa-build': ('https://pypa-build.readthedocs.io/en/latest/', None)
+})
+
+# Add support for linking usernames
+github_url = 'https://github.com'
+github_repo_org = 'pypa'
+github_repo_name = 'setuptools'
+github_repo_slug = f'{github_repo_org}/{github_repo_name}'
+github_repo_url = f'{github_url}/{github_repo_slug}'
+github_sponsors_url = f'{github_url}/sponsors'
+extlinks = {
+ 'user': (f'{github_sponsors_url}/%s', '@'), # noqa: WPS323
+ 'pypi': ('https://pypi.org/project/%s', '%s'), # noqa: WPS323
+ 'wiki': ('https://wikipedia.org/wiki/%s', '%s'), # noqa: WPS323
+}
+extensions += ['sphinx.ext.extlinks']
+
+# Ref: https://github.com/python-attrs/attrs/pull/571/files\
+# #diff-85987f48f1258d9ee486e3191495582dR82
+default_role = 'any'
+
+# HTML theme
+html_theme = 'furo'
+html_logo = "images/logo.svg"
+
+html_theme_options = {
+ "sidebar_hide_name": True,
+ "light_css_variables": {
+ "color-brand-primary": "#336790", # "blue"
+ "color-brand-content": "#336790",
+ },
+ "dark_css_variables": {
+ "color-brand-primary": "#E5B62F", # "yellow"
+ "color-brand-content": "#E5B62F",
+ },
+}
+
+# Add support for inline tabs
+extensions += ['sphinx_inline_tabs']
+
+# Support for distutils
+
+# Ref: https://stackoverflow.com/a/30624034/595220
+nitpick_ignore = [
+ ('c:func', 'SHGetSpecialFolderPath'), # ref to MS docs
+ ('envvar', 'DISTUTILS_DEBUG'), # undocumented
+ ('envvar', 'HOME'), # undocumented
+ ('envvar', 'PLAT'), # undocumented
+ ('py:attr', 'CCompiler.language_map'), # undocumented
+ ('py:attr', 'CCompiler.language_order'), # undocumented
+ ('py:class', 'distutils.dist.Distribution'), # undocumented
+ ('py:class', 'distutils.extension.Extension'), # undocumented
+ ('py:class', 'BorlandCCompiler'), # undocumented
+ ('py:class', 'CCompiler'), # undocumented
+ ('py:class', 'CygwinCCompiler'), # undocumented
+ ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented
+ ('py:class', 'FileList'), # undocumented
+ ('py:class', 'IShellLink'), # ref to MS docs
+ ('py:class', 'MSVCCompiler'), # undocumented
+ ('py:class', 'OptionDummy'), # undocumented
+ ('py:class', 'UnixCCompiler'), # undocumented
+ ('py:exc', 'CompileError'), # undocumented
+ ('py:exc', 'DistutilsExecError'), # undocumented
+ ('py:exc', 'DistutilsFileError'), # undocumented
+ ('py:exc', 'LibError'), # undocumented
+ ('py:exc', 'LinkError'), # undocumented
+ ('py:exc', 'PreprocessError'), # undocumented
+ ('py:func', 'distutils.CCompiler.new_compiler'), # undocumented
+ # undocumented:
+ ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'),
+ ('py:func', 'distutils.file_util._copy_file_contents'), # undocumented
+ ('py:func', 'distutils.log.debug'), # undocumented
+ ('py:func', 'distutils.spawn.find_executable'), # undocumented
+ ('py:func', 'distutils.spawn.spawn'), # undocumented
+ # TODO: check https://docutils.rtfd.io in the future
+ ('py:mod', 'docutils'), # there's no Sphinx site documenting this
+]
+
+# Allow linking objects on other Sphinx sites seamlessly:
+intersphinx_mapping.update(
+ python2=('https://docs.python.org/2', None),
+ python=('https://docs.python.org/3', None),
+)
+
+# Add support for the unreleased "next-version" change notes
+extensions += ['sphinxcontrib.towncrier']
+# Extension needs a path from here to the towncrier config.
+towncrier_draft_working_directory = '..'
+# Avoid an empty section for unpublished changes.
+towncrier_draft_include_empty = False
+
+extensions += ['jaraco.tidelift']
+
+# Add icons (aka "favicons") to documentation
+extensions += ['sphinx-favicon']
+html_static_path = ['images'] # should contain the folder with icons
+
+# List of dicts with <link> HTML attributes
+# static-file points to files in the html_static_path (href is computed)
+favicons = [
+ { # "Catch-all" goes first, otherwise some browsers will overwrite
+ "rel": "icon",
+ "type": "image/svg+xml",
+ "static-file": "logo-symbol-only.svg",
+ "sizes": "any"
+ },
+ { # Version with thicker strokes for better visibility at smaller sizes
+ "rel": "icon",
+ "type": "image/svg+xml",
+ "static-file": "favicon.svg",
+ "sizes": "16x16 24x24 32x32 48x48"
+ },
+ # rel="apple-touch-icon" does not support SVG yet
+]
+
+intersphinx_mapping['pip'] = 'https://pip.pypa.io/en/latest', None
+intersphinx_mapping['PyPUG'] = ('https://packaging.python.org/en/latest/', None)
+intersphinx_mapping['packaging'] = ('https://packaging.pypa.io/en/latest/', None)
+intersphinx_mapping['importlib-resources'] = (
+ 'https://importlib-resources.readthedocs.io/en/latest', None
+)
diff --git a/docs/deprecated/distutils-legacy.rst b/docs/deprecated/distutils-legacy.rst
new file mode 100644
index 0000000..e73cdff
--- /dev/null
+++ b/docs/deprecated/distutils-legacy.rst
@@ -0,0 +1,36 @@
+Porting from Distutils
+======================
+
+Setuptools and the PyPA have a `stated goal <https://github.com/pypa/packaging-problems/issues/127>`_ to make Setuptools the reference API for distutils.
+
+Since the 60.0.0 release, Setuptools includes a local, vendored copy of distutils (from late copies of CPython) that is enabled by default. To disable the use of this copy of distutils when invoking setuptools, set the enviroment variable:
+
+ SETUPTOOLS_USE_DISTUTILS=stdlib
+
+
+Prefer Setuptools
+-----------------
+
+As Distutils is deprecated, any usage of functions or objects from distutils is similarly discouraged, and Setuptools aims to replace or deprecate all such uses. This section describes the recommended replacements.
+
+``distutils.core.setup`` → ``setuptools.setup``
+
+``distutils.cmd.Command`` → ``setuptools.Command``
+
+``distutils.command.{build_clib,build_ext,build_py,sdist}`` → ``setuptools.command.*``
+
+``distutils.log`` → :mod:`logging` (standard library)
+
+``distutils.version.*`` → :doc:`packaging.version.* <packaging:version>`
+
+``distutils.errors.*`` → ``setuptools.errors.*`` [#errors]_
+
+
+Migration advice is also provided by :pep:`PEP 632 <632#migration-advice>`.
+
+If a project relies on uses of ``distutils`` that do not have a suitable replacement above, please search the `Setuptools issue tracker <https://github.com/pypa/setuptools/issues/>`_ and file a request, describing the use-case so that Setuptools' maintainers can investigate. Please provide enough detail to help the maintainers understand how distutils is used, what value it provides, and why that behavior should be supported.
+
+
+.. [#errors] Please notice errors related to the command line usage of
+ ``setup.py``, such as ``DistutilsArgError``, are intentionally not exposed
+ by setuptools, since this is considered a deprecated practice.
diff --git a/docs/deprecated/distutils/_setuptools_disclaimer.rst b/docs/deprecated/distutils/_setuptools_disclaimer.rst
new file mode 100644
index 0000000..628c2e4
--- /dev/null
+++ b/docs/deprecated/distutils/_setuptools_disclaimer.rst
@@ -0,0 +1,5 @@
+.. note::
+
+ This document is being retained solely until the ``setuptools`` documentation
+ at https://setuptools.pypa.io/en/latest/setuptools.html
+ independently covers all of the relevant information currently included here.
diff --git a/docs/deprecated/distutils/apiref.rst b/docs/deprecated/distutils/apiref.rst
new file mode 100644
index 0000000..f00ed74
--- /dev/null
+++ b/docs/deprecated/distutils/apiref.rst
@@ -0,0 +1,2053 @@
+.. _api-reference:
+
+*************
+API Reference
+*************
+
+.. seealso::
+
+ `New and changed setup.py arguments in setuptools`_
+ The ``setuptools`` project adds new capabilities to the ``setup`` function
+ and other APIs, makes the API consistent across different Python versions,
+ and is hence recommended over using ``distutils`` directly.
+
+.. _New and changed setup.py arguments in setuptools: https://setuptools.pypa.io/en/latest/setuptools.html#new-and-changed-setup-keywords
+
+.. include:: ./_setuptools_disclaimer.rst
+
+:mod:`distutils.core` --- Core Distutils functionality
+======================================================
+
+.. module:: distutils.core
+ :synopsis: The core Distutils functionality
+
+
+The :mod:`distutils.core` module is the only module that needs to be installed
+to use the Distutils. It provides the :func:`setup` (which is called from the
+setup script). Indirectly provides the :class:`distutils.dist.Distribution` and
+:class:`distutils.cmd.Command` class.
+
+
+.. function:: setup(arguments)
+
+ The basic do-everything function that does most everything you could ever ask
+ for from a Distutils method.
+
+ The setup function takes a large number of arguments. These are laid out in the
+ following table.
+
+ .. tabularcolumns:: |l|L|L|
+
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | argument name | value | type |
+ +====================+================================+=============================================================+
+ | *name* | The name of the package | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *version* | The version number of the | a string |
+ | | package; see | |
+ | | :mod:`distutils.version` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *description* | A single line describing the | a string |
+ | | package | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *long_description* | Longer description of the | a string |
+ | | package | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *author* | The name of the package author | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *author_email* | The email address of the | a string |
+ | | package author | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *maintainer* | The name of the current | a string |
+ | | maintainer, if different from | |
+ | | the author. Note that if | |
+ | | the maintainer is provided, | |
+ | | distutils will use it as the | |
+ | | author in :file:`PKG-INFO` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *maintainer_email* | The email address of the | a string |
+ | | current maintainer, if | |
+ | | different from the author | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *url* | A URL for the package | a string |
+ | | (homepage) | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *download_url* | A URL to download the package | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *packages* | A list of Python packages that | a list of strings |
+ | | distutils will manipulate | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *py_modules* | A list of Python modules that | a list of strings |
+ | | distutils will manipulate | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *scripts* | A list of standalone script | a list of strings |
+ | | files to be built and | |
+ | | installed | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *ext_modules* | A list of Python extensions to | a list of instances of |
+ | | be built | :class:`distutils.core.Extension` |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *classifiers* | A list of categories for the | a list of strings; valid classifiers are listed on `PyPI |
+ | | package | <https://pypi.org/classifiers>`_. |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *distclass* | the :class:`Distribution` | a subclass of |
+ | | class to use | :class:`distutils.core.Distribution` |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *script_name* | The name of the setup.py | a string |
+ | | script - defaults to | |
+ | | ``sys.argv[0]`` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *script_args* | Arguments to supply to the | a list of strings |
+ | | setup script | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *options* | default options for the setup | a dictionary |
+ | | script | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *license* | The license for the package | a string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *keywords* | Descriptive meta-data, see | a list of strings or a comma-separated string |
+ | | :pep:`314` | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *platforms* | | a list of strings or a comma-separated string |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *cmdclass* | A mapping of command names to | a dictionary |
+ | | :class:`Command` subclasses | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *data_files* | A list of data files to | a list |
+ | | install | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+ | *package_dir* | A mapping of package to | a dictionary |
+ | | directory names | |
+ +--------------------+--------------------------------+-------------------------------------------------------------+
+
+
+
+.. function:: run_setup(script_name[, script_args=None, stop_after='run'])
+
+ Run a setup script in a somewhat controlled environment, and return the
+ :class:`distutils.dist.Distribution` instance that drives things. This is
+ useful if you need to find out the distribution meta-data (passed as keyword
+ args from *script* to :func:`setup`), or the contents of the config files or
+ command-line.
+
+ *script_name* is a file that will be read and run with :func:`exec`. ``sys.argv[0]``
+ will be replaced with *script* for the duration of the call. *script_args* is a
+ list of strings; if supplied, ``sys.argv[1:]`` will be replaced by *script_args*
+ for the duration of the call.
+
+ *stop_after* tells :func:`setup` when to stop processing; possible values:
+
+ .. tabularcolumns:: |l|L|
+
+ +---------------+---------------------------------------------+
+ | value | description |
+ +===============+=============================================+
+ | *init* | Stop after the :class:`Distribution` |
+ | | instance has been created and populated |
+ | | with the keyword arguments to :func:`setup` |
+ +---------------+---------------------------------------------+
+ | *config* | Stop after config files have been parsed |
+ | | (and their data stored in the |
+ | | :class:`Distribution` instance) |
+ +---------------+---------------------------------------------+
+ | *commandline* | Stop after the command-line |
+ | | (``sys.argv[1:]`` or *script_args*) have |
+ | | been parsed (and the data stored in the |
+ | | :class:`Distribution` instance.) |
+ +---------------+---------------------------------------------+
+ | *run* | Stop after all commands have been run (the |
+ | | same as if :func:`setup` had been called |
+ | | in the usual way). This is the default |
+ | | value. |
+ +---------------+---------------------------------------------+
+
+In addition, the :mod:`distutils.core` module exposed a number of classes that
+live elsewhere.
+
+* :class:`~distutils.extension.Extension` from :mod:`distutils.extension`
+
+* :class:`~distutils.cmd.Command` from :mod:`distutils.cmd`
+
+* :class:`~distutils.dist.Distribution` from :mod:`distutils.dist`
+
+A short description of each of these follows, but see the relevant module for
+the full reference.
+
+
+.. class:: Extension
+
+ The Extension class describes a single C or C++ extension module in a setup
+ script. It accepts the following keyword arguments in its constructor:
+
+ .. tabularcolumns:: |l|L|l|
+
+ +------------------------+--------------------------------+---------------------------+
+ | argument name | value | type |
+ +========================+================================+===========================+
+ | *name* | the full name of the | a string |
+ | | extension, including any | |
+ | | packages --- ie. *not* a | |
+ | | filename or pathname, but | |
+ | | Python dotted name | |
+ +------------------------+--------------------------------+---------------------------+
+ | *sources* | list of source filenames, | a list of strings |
+ | | relative to the distribution | |
+ | | root (where the setup script | |
+ | | lives), in Unix form | |
+ | | (slash-separated) for | |
+ | | portability. | |
+ | | Source files may be C, C++, | |
+ | | SWIG (.i), platform-specific | |
+ | | resource files, or whatever | |
+ | | else is recognized by the | |
+ | | :command:`build_ext` command | |
+ | | as source for a Python | |
+ | | extension. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *include_dirs* | list of directories to search | a list of strings |
+ | | for C/C++ header files (in | |
+ | | Unix form for portability) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *define_macros* | list of macros to define; each | a list of tuples |
+ | | macro is defined using a | |
+ | | 2-tuple ``(name, value)``, | |
+ | | where *value* is | |
+ | | either the string to define it | |
+ | | to or ``None`` to define it | |
+ | | without a particular value | |
+ | | (equivalent of ``#define FOO`` | |
+ | | in source or :option:`!-DFOO` | |
+ | | on Unix C compiler command | |
+ | | line) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *undef_macros* | list of macros to undefine | a list of strings |
+ | | explicitly | |
+ +------------------------+--------------------------------+---------------------------+
+ | *library_dirs* | list of directories to search | a list of strings |
+ | | for C/C++ libraries at link | |
+ | | time | |
+ +------------------------+--------------------------------+---------------------------+
+ | *libraries* | list of library names (not | a list of strings |
+ | | filenames or paths) to link | |
+ | | against | |
+ +------------------------+--------------------------------+---------------------------+
+ | *runtime_library_dirs* | list of directories to search | a list of strings |
+ | | for C/C++ libraries at run | |
+ | | time (for shared extensions, | |
+ | | this is when the extension is | |
+ | | loaded) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_objects* | list of extra files to link | a list of strings |
+ | | with (eg. object files not | |
+ | | implied by 'sources', static | |
+ | | library that must be | |
+ | | explicitly specified, binary | |
+ | | resource files, etc.) | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_compile_args* | any extra platform- and | a list of strings |
+ | | compiler-specific information | |
+ | | to use when compiling the | |
+ | | source files in 'sources'. For | |
+ | | platforms and compilers where | |
+ | | a command line makes sense, | |
+ | | this is typically a list of | |
+ | | command-line arguments, but | |
+ | | for other platforms it could | |
+ | | be anything. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *extra_link_args* | any extra platform- and | a list of strings |
+ | | compiler-specific information | |
+ | | to use when linking object | |
+ | | files together to create the | |
+ | | extension (or to create a new | |
+ | | static Python interpreter). | |
+ | | Similar interpretation as for | |
+ | | 'extra_compile_args'. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *export_symbols* | list of symbols to be exported | a list of strings |
+ | | from a shared extension. Not | |
+ | | used on all platforms, and not | |
+ | | generally necessary for Python | |
+ | | extensions, which typically | |
+ | | export exactly one symbol: | |
+ | | ``init`` + extension_name. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *depends* | list of files that the | a list of strings |
+ | | extension depends on | |
+ +------------------------+--------------------------------+---------------------------+
+ | *language* | extension language (i.e. | a string |
+ | | ``'c'``, ``'c++'``, | |
+ | | ``'objc'``). Will be detected | |
+ | | from the source extensions if | |
+ | | not provided. | |
+ +------------------------+--------------------------------+---------------------------+
+ | *optional* | specifies that a build failure | a boolean |
+ | | in the extension should not | |
+ | | abort the build process, but | |
+ | | simply skip the extension. | |
+ +------------------------+--------------------------------+---------------------------+
+
+ .. versionchanged:: 3.8
+
+ On Unix, C extensions are no longer linked to libpython except on
+ Android and Cygwin.
+
+
+.. class:: Distribution
+
+ A :class:`Distribution` describes how to build, install and package up a Python
+ software package.
+
+ See the :func:`setup` function for a list of keyword arguments accepted by the
+ Distribution constructor. :func:`setup` creates a Distribution instance.
+
+ .. versionchanged:: 3.7
+ :class:`~distutils.core.Distribution` now warns if ``classifiers``,
+ ``keywords`` and ``platforms`` fields are not specified as a list or
+ a string.
+
+.. class:: Command
+
+ A :class:`Command` class (or rather, an instance of one of its subclasses)
+ implement a single distutils command.
+
+
+:mod:`distutils.ccompiler` --- CCompiler base class
+===================================================
+
+.. module:: distutils.ccompiler
+ :synopsis: Abstract CCompiler class
+
+
+This module provides the abstract base class for the :class:`CCompiler`
+classes. A :class:`CCompiler` instance can be used for all the compile and
+link steps needed to build a single project. Methods are provided to set
+options for the compiler --- macro definitions, include directories, link path,
+libraries and the like.
+
+This module provides the following functions.
+
+
+.. function:: gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries)
+
+ Generate linker options for searching library directories and linking with
+ specific libraries. *libraries* and *library_dirs* are, respectively, lists of
+ library names (not filenames!) and search directories. Returns a list of
+ command-line options suitable for use with some compiler (depending on the two
+ format strings passed in).
+
+
+.. function:: gen_preprocess_options(macros, include_dirs)
+
+ Generate C pre-processor options (:option:`!-D`, :option:`!-U`, :option:`!-I`) as
+ used by at least two types of compilers: the typical Unix compiler and Visual
+ C++. *macros* is the usual thing, a list of 1- or 2-tuples, where ``(name,)``
+ means undefine (:option:`!-U`) macro *name*, and ``(name, value)`` means define
+ (:option:`!-D`) macro *name* to *value*. *include_dirs* is just a list of
+ directory names to be added to the header file search path (:option:`!-I`).
+ Returns a list of command-line options suitable for either Unix compilers or
+ Visual C++.
+
+
+.. function:: get_default_compiler(osname, platform)
+
+ Determine the default compiler to use for the given platform.
+
+ *osname* should be one of the standard Python OS names (i.e. the ones returned
+ by ``os.name``) and *platform* the common value returned by ``sys.platform`` for
+ the platform in question.
+
+ The default values are ``os.name`` and ``sys.platform`` in case the parameters
+ are not given.
+
+
+.. function:: new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0)
+
+ Factory function to generate an instance of some CCompiler subclass for the
+ supplied platform/compiler combination. *plat* defaults to ``os.name`` (eg.
+ ``'posix'``, ``'nt'``), and *compiler* defaults to the default compiler for
+ that platform. Currently only ``'posix'`` and ``'nt'`` are supported, and the
+ default compilers are "traditional Unix interface" (:class:`UnixCCompiler`
+ class) and Visual C++ (:class:`MSVCCompiler` class). Note that it's perfectly
+ possible to ask for a Unix compiler object under Windows, and a Microsoft
+ compiler object under Unix---if you supply a value for *compiler*, *plat* is
+ ignored.
+
+ .. % Is the posix/nt only thing still true? Mac OS X seems to work, and
+ .. % returns a UnixCCompiler instance. How to document this... hmm.
+
+
+.. function:: show_compilers()
+
+ Print list of available compilers (used by the :option:`!--help-compiler` options
+ to :command:`build`, :command:`build_ext`, :command:`build_clib`).
+
+
+.. class:: CCompiler([verbose=0, dry_run=0, force=0])
+
+ The abstract base class :class:`CCompiler` defines the interface that must be
+ implemented by real compiler classes. The class also has some utility methods
+ used by several compiler classes.
+
+ The basic idea behind a compiler abstraction class is that each instance can be
+ used for all the compile/link steps in building a single project. Thus,
+ attributes common to all of those compile and link steps --- include
+ directories, macros to define, libraries to link against, etc. --- are
+ attributes of the compiler instance. To allow for variability in how individual
+ files are treated, most of those attributes may be varied on a per-compilation
+ or per-link basis.
+
+ The constructor for each subclass creates an instance of the Compiler object.
+ Flags are *verbose* (show verbose output), *dry_run* (don't actually execute the
+ steps) and *force* (rebuild everything, regardless of dependencies). All of
+ these flags default to ``0`` (off). Note that you probably don't want to
+ instantiate :class:`CCompiler` or one of its subclasses directly - use the
+ :func:`distutils.CCompiler.new_compiler` factory function instead.
+
+ The following methods allow you to manually alter compiler options for the
+ instance of the Compiler class.
+
+
+ .. method:: CCompiler.add_include_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for header files.
+ The compiler is instructed to search directories in the order in which they are
+ supplied by successive calls to :meth:`add_include_dir`.
+
+
+ .. method:: CCompiler.set_include_dirs(dirs)
+
+ Set the list of directories that will be searched to *dirs* (a list of strings).
+ Overrides any preceding calls to :meth:`add_include_dir`; subsequent calls to
+ :meth:`add_include_dir` add to the list passed to :meth:`set_include_dirs`.
+ This does not affect any list of standard include directories that the compiler
+ may search by default.
+
+
+ .. method:: CCompiler.add_library(libname)
+
+ Add *libname* to the list of libraries that will be included in all links driven
+ by this compiler object. Note that *libname* should \*not\* be the name of a
+ file containing a library, but the name of the library itself: the actual
+ filename will be inferred by the linker, the compiler, or the compiler class
+ (depending on the platform).
+
+ The linker will be instructed to link against libraries in the order they were
+ supplied to :meth:`add_library` and/or :meth:`set_libraries`. It is perfectly
+ valid to duplicate library names; the linker will be instructed to link against
+ libraries as many times as they are mentioned.
+
+
+ .. method:: CCompiler.set_libraries(libnames)
+
+ Set the list of libraries to be included in all links driven by this compiler
+ object to *libnames* (a list of strings). This does not affect any standard
+ system libraries that the linker may include by default.
+
+
+ .. method:: CCompiler.add_library_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for libraries
+ specified to :meth:`add_library` and :meth:`set_libraries`. The linker will be
+ instructed to search for libraries in the order they are supplied to
+ :meth:`add_library_dir` and/or :meth:`set_library_dirs`.
+
+
+ .. method:: CCompiler.set_library_dirs(dirs)
+
+ Set the list of library search directories to *dirs* (a list of strings). This
+ does not affect any standard library search path that the linker may search by
+ default.
+
+
+ .. method:: CCompiler.add_runtime_library_dir(dir)
+
+ Add *dir* to the list of directories that will be searched for shared libraries
+ at runtime.
+
+
+ .. method:: CCompiler.set_runtime_library_dirs(dirs)
+
+ Set the list of directories to search for shared libraries at runtime to *dirs*
+ (a list of strings). This does not affect any standard search path that the
+ runtime linker may search by default.
+
+
+ .. method:: CCompiler.define_macro(name[, value=None])
+
+ Define a preprocessor macro for all compilations driven by this compiler object.
+ The optional parameter *value* should be a string; if it is not supplied, then
+ the macro will be defined without an explicit value and the exact outcome
+ depends on the compiler used.
+
+ .. XXX true? does ANSI say anything about this?
+
+
+ .. method:: CCompiler.undefine_macro(name)
+
+ Undefine a preprocessor macro for all compilations driven by this compiler
+ object. If the same macro is defined by :meth:`define_macro` and
+ undefined by :meth:`undefine_macro` the last call takes precedence
+ (including multiple redefinitions or undefinitions). If the macro is
+ redefined/undefined on a per-compilation basis (ie. in the call to
+ :meth:`compile`), then that takes precedence.
+
+
+ .. method:: CCompiler.add_link_object(object)
+
+ Add *object* to the list of object files (or analogues, such as explicitly named
+ library files or the output of "resource compilers") to be included in every
+ link driven by this compiler object.
+
+
+ .. method:: CCompiler.set_link_objects(objects)
+
+ Set the list of object files (or analogues) to be included in every link to
+ *objects*. This does not affect any standard object files that the linker may
+ include by default (such as system libraries).
+
+ The following methods implement methods for autodetection of compiler options,
+ providing some functionality similar to GNU :program:`autoconf`.
+
+
+ .. method:: CCompiler.detect_language(sources)
+
+ Detect the language of a given file, or list of files. Uses the instance
+ attributes :attr:`~CCompiler.language_map` (a dictionary), and :attr:`~CCompiler.language_order` (a
+ list) to do the job.
+
+
+ .. method:: CCompiler.find_library_file(dirs, lib[, debug=0])
+
+ Search the specified list of directories for a static or shared library file
+ *lib* and return the full path to that file. If *debug* is true, look for a
+ debugging version (if that makes sense on the current platform). Return
+ ``None`` if *lib* wasn't found in any of the specified directories.
+
+
+ .. method:: CCompiler.has_function(funcname [, includes=None, include_dirs=None, libraries=None, library_dirs=None])
+
+ Return a boolean indicating whether *funcname* is supported on the current
+ platform. The optional arguments can be used to augment the compilation
+ environment by providing additional include files and paths and libraries and
+ paths.
+
+
+ .. method:: CCompiler.library_dir_option(dir)
+
+ Return the compiler option to add *dir* to the list of directories searched for
+ libraries.
+
+
+ .. method:: CCompiler.library_option(lib)
+
+ Return the compiler option to add *lib* to the list of libraries linked into the
+ shared library or executable.
+
+
+ .. method:: CCompiler.runtime_library_dir_option(dir)
+
+ Return the compiler option to add *dir* to the list of directories searched for
+ runtime libraries.
+
+
+ .. method:: CCompiler.set_executables(**args)
+
+ Define the executables (and options for them) that will be run to perform the
+ various stages of compilation. The exact set of executables that may be
+ specified here depends on the compiler class (via the 'executables' class
+ attribute), but most will have:
+
+ +--------------+------------------------------------------+
+ | attribute | description |
+ +==============+==========================================+
+ | *compiler* | the C/C++ compiler |
+ +--------------+------------------------------------------+
+ | *linker_so* | linker used to create shared objects and |
+ | | libraries |
+ +--------------+------------------------------------------+
+ | *linker_exe* | linker used to create binary executables |
+ +--------------+------------------------------------------+
+ | *archiver* | static library creator |
+ +--------------+------------------------------------------+
+
+ On platforms with a command-line (Unix, DOS/Windows), each of these is a string
+ that will be split into executable name and (optional) list of arguments.
+ (Splitting the string is done similarly to how Unix shells operate: words are
+ delimited by spaces, but quotes and backslashes can override this. See
+ :func:`distutils.util.split_quoted`.)
+
+ The following methods invoke stages in the build process.
+
+
+ .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None])
+
+ Compile one or more source files. Generates object files (e.g. transforms a
+ :file:`.c` file to a :file:`.o` file.)
+
+ *sources* must be a list of filenames, most likely C/C++ files, but in reality
+ anything that can be handled by a particular compiler and compiler class (eg.
+ :class:`MSVCCompiler` can handle resource files in *sources*). Return a list of
+ object filenames, one per source filename in *sources*. Depending on the
+ implementation, not all source files will necessarily be compiled, but all
+ corresponding object filenames will be returned.
+
+ If *output_dir* is given, object files will be put under it, while retaining
+ their original path component. That is, :file:`foo/bar.c` normally compiles to
+ :file:`foo/bar.o` (for a Unix implementation); if *output_dir* is *build*, then
+ it would compile to :file:`build/foo/bar.o`.
+
+ *macros*, if given, must be a list of macro definitions. A macro definition is
+ either a ``(name, value)`` 2-tuple or a ``(name,)`` 1-tuple. The former defines
+ a macro; if the value is ``None``, the macro is defined without an explicit
+ value. The 1-tuple case undefines a macro. Later
+ definitions/redefinitions/undefinitions take precedence.
+
+ *include_dirs*, if given, must be a list of strings, the directories to add to
+ the default include file search path for this compilation only.
+
+ *debug* is a boolean; if true, the compiler will be instructed to output debug
+ symbols in (or alongside) the object file(s).
+
+ *extra_preargs* and *extra_postargs* are implementation-dependent. On platforms
+ that have the notion of a command-line (e.g. Unix, DOS/Windows), they are most
+ likely lists of strings: extra command-line arguments to prepend/append to the
+ compiler command line. On other platforms, consult the implementation class
+ documentation. In any event, they are intended as an escape hatch for those
+ occasions when the abstract compiler framework doesn't cut the mustard.
+
+ *depends*, if given, is a list of filenames that all targets depend on. If a
+ source file is older than any file in depends, then the source file will be
+ recompiled. This supports dependency tracking, but only at a coarse
+ granularity.
+
+ Raises :exc:`CompileError` on failure.
+
+
+ .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=0, target_lang=None])
+
+ Link a bunch of stuff together to create a static library file. The "bunch of
+ stuff" consists of the list of object files supplied as *objects*, the extra
+ object files supplied to :meth:`add_link_object` and/or
+ :meth:`set_link_objects`, the libraries supplied to :meth:`add_library` and/or
+ :meth:`set_libraries`, and the libraries supplied as *libraries* (if any).
+
+ *output_libname* should be a library name, not a filename; the filename will be
+ inferred from the library name. *output_dir* is the directory where the library
+ file will be put.
+
+ .. XXX defaults to what?
+
+ *debug* is a boolean; if true, debugging information will be included in the
+ library (note that on most platforms, it is the compile step where this matters:
+ the *debug* flag is included here just for consistency).
+
+ *target_lang* is the target language for which the given objects are being
+ compiled. This allows specific linkage time treatment of certain languages.
+
+ Raises :exc:`LibError` on failure.
+
+
+ .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a bunch of stuff together to create an executable or shared library file.
+
+ The "bunch of stuff" consists of the list of object files supplied as *objects*.
+ *output_filename* should be a filename. If *output_dir* is supplied,
+ *output_filename* is relative to it (i.e. *output_filename* can provide
+ directory components if needed).
+
+ *libraries* is a list of libraries to link against. These are library names,
+ not filenames, since they're translated into filenames in a platform-specific
+ way (eg. *foo* becomes :file:`libfoo.a` on Unix and :file:`foo.lib` on
+ DOS/Windows). However, they can include a directory component, which means the
+ linker will look in that specific directory rather than searching all the normal
+ locations.
+
+ *library_dirs*, if supplied, should be a list of directories to search for
+ libraries that were specified as bare library names (ie. no directory
+ component). These are on top of the system default and those supplied to
+ :meth:`add_library_dir` and/or :meth:`set_library_dirs`. *runtime_library_dirs*
+ is a list of directories that will be embedded into the shared library and used
+ to search for other shared libraries that \*it\* depends on at run-time. (This
+ may only be relevant on Unix.)
+
+ *export_symbols* is a list of symbols that the shared library will export.
+ (This appears to be relevant only on Windows.)
+
+ *debug* is as for :meth:`compile` and :meth:`create_static_lib`, with the
+ slight distinction that it actually matters on most platforms (as opposed to
+ :meth:`create_static_lib`, which includes a *debug* flag mostly for form's
+ sake).
+
+ *extra_preargs* and *extra_postargs* are as for :meth:`compile` (except of
+ course that they supply command-line arguments for the particular linker being
+ used).
+
+ *target_lang* is the target language for which the given objects are being
+ compiled. This allows specific linkage time treatment of certain languages.
+
+ Raises :exc:`LinkError` on failure.
+
+
+ .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None])
+
+ Link an executable. *output_progname* is the name of the file executable, while
+ *objects* are a list of object filenames to link in. Other arguments are as for
+ the :meth:`link` method.
+
+
+ .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a shared library. *output_libname* is the name of the output library,
+ while *objects* is a list of object filenames to link in. Other arguments are
+ as for the :meth:`link` method.
+
+
+ .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+ Link a shared object. *output_filename* is the name of the shared object that
+ will be created, while *objects* is a list of object filenames to link in.
+ Other arguments are as for the :meth:`link` method.
+
+
+ .. method:: CCompiler.preprocess(source[, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None])
+
+ Preprocess a single C/C++ source file, named in *source*. Output will be written
+ to file named *output_file*, or *stdout* if *output_file* not supplied.
+ *macros* is a list of macro definitions as for :meth:`compile`, which will
+ augment the macros set with :meth:`define_macro` and :meth:`undefine_macro`.
+ *include_dirs* is a list of directory names that will be added to the default
+ list, in the same way as :meth:`add_include_dir`.
+
+ Raises :exc:`PreprocessError` on failure.
+
+ The following utility methods are defined by the :class:`CCompiler` class, for
+ use by the various concrete subclasses.
+
+
+ .. method:: CCompiler.executable_filename(basename[, strip_dir=0, output_dir=''])
+
+ Returns the filename of the executable for the given *basename*. Typically for
+ non-Windows platforms this is the same as the basename, while Windows will get
+ a :file:`.exe` added.
+
+
+ .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=0, output_dir=''])
+
+ Returns the filename for the given library name on the current platform. On Unix
+ a library with *lib_type* of ``'static'`` will typically be of the form
+ :file:`liblibname.a`, while a *lib_type* of ``'dynamic'`` will be of the form
+ :file:`liblibname.so`.
+
+
+ .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=0, output_dir=''])
+
+ Returns the name of the object files for the given source files.
+ *source_filenames* should be a list of filenames.
+
+
+ .. method:: CCompiler.shared_object_filename(basename[, strip_dir=0, output_dir=''])
+
+ Returns the name of a shared object file for the given file name *basename*.
+
+
+ .. method:: CCompiler.execute(func, args[, msg=None, level=1])
+
+ Invokes :func:`distutils.util.execute`. This method invokes a Python function
+ *func* with the given arguments *args*, after logging and taking into account
+ the *dry_run* flag.
+
+
+ .. method:: CCompiler.spawn(cmd)
+
+ Invokes :func:`distutils.spawn.spawn`. This invokes an external process to run
+ the given command.
+
+
+ .. method:: CCompiler.mkpath(name[, mode=511])
+
+ Invokes :func:`distutils.dir_util.mkpath`. This creates a directory and any
+ missing ancestor directories.
+
+
+ .. method:: CCompiler.move_file(src, dst)
+
+ Invokes :meth:`distutils.file_util.move_file`. Renames *src* to *dst*.
+
+
+ .. method:: CCompiler.announce(msg[, level=1])
+
+ Write a message using :func:`distutils.log.debug`.
+
+
+ .. method:: CCompiler.warn(msg)
+
+ Write a warning message *msg* to standard error.
+
+
+ .. method:: CCompiler.debug_print(msg)
+
+ If the *debug* flag is set on this :class:`CCompiler` instance, print *msg* to
+ standard output, otherwise do nothing.
+
+.. % \subsection{Compiler-specific modules}
+.. %
+.. % The following modules implement concrete subclasses of the abstract
+.. % \class{CCompiler} class. They should not be instantiated directly, but should
+.. % be created using \function{distutils.ccompiler.new_compiler()} factory
+.. % function.
+
+
+:mod:`distutils.unixccompiler` --- Unix C Compiler
+==================================================
+
+.. module:: distutils.unixccompiler
+ :synopsis: UNIX C Compiler
+
+
+This module provides the :class:`UnixCCompiler` class, a subclass of
+:class:`CCompiler` that handles the typical Unix-style command-line C compiler:
+
+* macros defined with :option:`!-Dname[=value]`
+
+* macros undefined with :option:`!-Uname`
+
+* include search directories specified with :option:`!-Idir`
+
+* libraries specified with :option:`!-llib`
+
+* library search directories specified with :option:`!-Ldir`
+
+* compile handled by :program:`cc` (or similar) executable with :option:`!-c`
+ option: compiles :file:`.c` to :file:`.o`
+
+* link static library handled by :program:`ar` command (possibly with
+ :program:`ranlib`)
+
+* link shared library handled by :program:`cc` :option:`!-shared`
+
+
+:mod:`distutils.msvccompiler` --- Microsoft Compiler
+====================================================
+
+.. module:: distutils.msvccompiler
+ :synopsis: Microsoft Compiler
+
+.. XXX: This is *waaaaay* out of date!
+
+This module provides :class:`MSVCCompiler`, an implementation of the abstract
+:class:`CCompiler` class for Microsoft Visual Studio. Typically, extension
+modules need to be compiled with the same compiler that was used to compile
+Python. For Python 2.3 and earlier, the compiler was Visual Studio 6. For Python
+2.4 and 2.5, the compiler is Visual Studio .NET 2003.
+
+:class:`MSVCCompiler` will normally choose the right compiler, linker etc. on
+its own. To override this choice, the environment variables *DISTUTILS_USE_SDK*
+and *MSSdk* must be both set. *MSSdk* indicates that the current environment has
+been setup by the SDK's ``SetEnv.Cmd`` script, or that the environment variables
+had been registered when the SDK was installed; *DISTUTILS_USE_SDK* indicates
+that the distutils user has made an explicit choice to override the compiler
+selection by :class:`MSVCCompiler`.
+
+
+:mod:`distutils.bcppcompiler` --- Borland Compiler
+==================================================
+
+.. module:: distutils.bcppcompiler
+
+
+This module provides :class:`BorlandCCompiler`, a subclass of the abstract
+:class:`CCompiler` class for the Borland C++ compiler.
+
+
+:mod:`distutils.cygwinccompiler` --- Cygwin Compiler
+====================================================
+
+.. module:: distutils.cygwinccompiler
+
+
+This module provides the :class:`CygwinCCompiler` class, a subclass of
+:class:`UnixCCompiler` that handles the Cygwin port of the GNU C compiler to
+Windows. It also contains the Mingw32CCompiler class which handles the mingw32
+port of GCC (same as cygwin in no-cygwin mode).
+
+
+:mod:`distutils.archive_util` --- Archiving utilities
+======================================================
+
+.. module:: distutils.archive_util
+ :synopsis: Utility functions for creating archive files (tarballs, zip files, ...)
+
+
+This module provides a few functions for creating archive files, such as
+tarballs or zipfiles.
+
+
+.. function:: make_archive(base_name, format[, root_dir=None, base_dir=None, verbose=0, dry_run=0])
+
+ Create an archive file (eg. ``zip`` or ``tar``). *base_name* is the name of
+ the file to create, minus any format-specific extension; *format* is the
+ archive format: one of ``zip``, ``tar``, ``gztar``, ``bztar``, ``xztar``, or
+ ``ztar``. *root_dir* is a directory that will be the root directory of the
+ archive; ie. we typically ``chdir`` into *root_dir* before creating the
+ archive. *base_dir* is the directory where we start archiving from; ie.
+ *base_dir* will be the common prefix of all files and directories in the
+ archive. *root_dir* and *base_dir* both default to the current directory.
+ Returns the name of the archive file.
+
+ .. versionchanged:: 3.5
+ Added support for the ``xztar`` format.
+
+
+.. function:: make_tarball(base_name, base_dir[, compress='gzip', verbose=0, dry_run=0])
+
+ 'Create an (optional compressed) archive as a tar file from all files in and
+ under *base_dir*. *compress* must be ``'gzip'`` (the default),
+ ``'bzip2'``, ``'xz'``, ``'compress'``, or ``None``. For the ``'compress'``
+ method the compression utility named by :program:`compress` must be on the
+ default program search path, so this is probably Unix-specific. The output
+ tar file will be named :file:`base_dir.tar`, possibly plus the appropriate
+ compression extension (``.gz``, ``.bz2``, ``.xz`` or ``.Z``). Return the
+ output filename.
+
+ .. versionchanged:: 3.5
+ Added support for the ``xz`` compression.
+
+
+.. function:: make_zipfile(base_name, base_dir[, verbose=0, dry_run=0])
+
+ Create a zip file from all files in and under *base_dir*. The output zip file
+ will be named *base_name* + :file:`.zip`. Uses either the :mod:`zipfile` Python
+ module (if available) or the InfoZIP :file:`zip` utility (if installed and
+ found on the default search path). If neither tool is available, raises
+ :exc:`DistutilsExecError`. Returns the name of the output zip file.
+
+
+:mod:`distutils.dep_util` --- Dependency checking
+=================================================
+
+.. module:: distutils.dep_util
+ :synopsis: Utility functions for simple dependency checking
+
+
+This module provides functions for performing simple, timestamp-based
+dependency of files and groups of files; also, functions based entirely on such
+timestamp dependency analysis.
+
+
+.. function:: newer(source, target)
+
+ Return true if *source* exists and is more recently modified than *target*, or
+ if *source* exists and *target* doesn't. Return false if both exist and *target*
+ is the same age or newer than *source*. Raise :exc:`DistutilsFileError` if
+ *source* does not exist.
+
+
+.. function:: newer_pairwise(sources, targets)
+
+ Walk two filename lists in parallel, testing if each source is newer than its
+ corresponding target. Return a pair of lists (*sources*, *targets*) where
+ source is newer than target, according to the semantics of :func:`newer`.
+
+ .. % % equivalent to a listcomp...
+
+
+.. function:: newer_group(sources, target[, missing='error'])
+
+ Return true if *target* is out-of-date with respect to any file listed in
+ *sources*. In other words, if *target* exists and is newer than every file in
+ *sources*, return false; otherwise return true. *missing* controls what we do
+ when a source file is missing; the default (``'error'``) is to blow up with an
+ :exc:`OSError` from inside :func:`os.stat`; if it is ``'ignore'``, we silently
+ drop any missing source files; if it is ``'newer'``, any missing source files
+ make us assume that *target* is out-of-date (this is handy in "dry-run" mode:
+ it'll make you pretend to carry out commands that wouldn't work because inputs
+ are missing, but that doesn't matter because you're not actually going to run
+ the commands).
+
+
+:mod:`distutils.dir_util` --- Directory tree operations
+=======================================================
+
+.. module:: distutils.dir_util
+ :synopsis: Utility functions for operating on directories and directory trees
+
+
+This module provides functions for operating on directories and trees of
+directories.
+
+
+.. function:: mkpath(name[, mode=0o777, verbose=0, dry_run=0])
+
+ Create a directory and any missing ancestor directories. If the directory
+ already exists (or if *name* is the empty string, which means the current
+ directory, which of course exists), then do nothing. Raise
+ :exc:`DistutilsFileError` if unable to create some directory along the way (eg.
+ some sub-path exists, but is a file rather than a directory). If *verbose* is
+ true, print a one-line summary of each mkdir to stdout. Return the list of
+ directories actually created.
+
+
+.. function:: create_tree(base_dir, files[, mode=0o777, verbose=0, dry_run=0])
+
+ Create all the empty directories under *base_dir* needed to put *files* there.
+ *base_dir* is just the name of a directory which doesn't necessarily exist
+ yet; *files* is a list of filenames to be interpreted relative to *base_dir*.
+ *base_dir* + the directory portion of every file in *files* will be created if
+ it doesn't already exist. *mode*, *verbose* and *dry_run* flags are as for
+ :func:`mkpath`.
+
+
+.. function:: copy_tree(src, dst[, preserve_mode=1, preserve_times=1, preserve_symlinks=0, update=0, verbose=0, dry_run=0])
+
+ Copy an entire directory tree *src* to a new location *dst*. Both *src* and
+ *dst* must be directory names. If *src* is not a directory, raise
+ :exc:`DistutilsFileError`. If *dst* does not exist, it is created with
+ :func:`mkpath`. The end result of the copy is that every file in *src* is
+ copied to *dst*, and directories under *src* are recursively copied to *dst*.
+ Return the list of files that were copied or might have been copied, using their
+ output name. The return value is unaffected by *update* or *dry_run*: it is
+ simply the list of all files under *src*, with the names changed to be under
+ *dst*.
+
+ *preserve_mode* and *preserve_times* are the same as for
+ :func:`distutils.file_util.copy_file`; note that they only apply to
+ regular files, not to
+ directories. If *preserve_symlinks* is true, symlinks will be copied as
+ symlinks (on platforms that support them!); otherwise (the default), the
+ destination of the symlink will be copied. *update* and *verbose* are the same
+ as for :func:`~distutils.file_util.copy_file`.
+
+ Files in *src* that begin with :file:`.nfs` are skipped (more information on
+ these files is available in answer D2 of the `NFS FAQ page
+ <http://nfs.sourceforge.net/#section_d>`_).
+
+ .. versionchanged:: 3.3.1
+ NFS files are ignored.
+
+.. function:: remove_tree(directory[, verbose=0, dry_run=0])
+
+ Recursively remove *directory* and all files and directories underneath it. Any
+ errors are ignored (apart from being reported to ``sys.stdout`` if *verbose* is
+ true).
+
+
+:mod:`distutils.file_util` --- Single file operations
+=====================================================
+
+.. module:: distutils.file_util
+ :synopsis: Utility functions for operating on single files
+
+
+This module contains some utility functions for operating on individual files.
+
+
+.. function:: copy_file(src, dst[, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=0, dry_run=0])
+
+ Copy file *src* to *dst*. If *dst* is a directory, then *src* is copied there
+ with the same name; otherwise, it must be a filename. (If the file exists, it
+ will be ruthlessly clobbered.) If *preserve_mode* is true (the default), the
+ file's mode (type and permission bits, or whatever is analogous on the
+ current platform) is copied. If *preserve_times* is true (the default), the
+ last-modified and last-access times are copied as well. If *update* is true,
+ *src* will only be copied if *dst* does not exist, or if *dst* does exist but
+ is older than *src*.
+
+ *link* allows you to make hard links (using :func:`os.link`) or symbolic links
+ (using :func:`os.symlink`) instead of copying: set it to ``'hard'`` or
+ ``'sym'``; if it is ``None`` (the default), files are copied. Don't set *link*
+ on systems that don't support it: :func:`copy_file` doesn't check if hard or
+ symbolic linking is available. It uses :func:`~distutils.file_util._copy_file_contents` to copy file
+ contents.
+
+ Return a tuple ``(dest_name, copied)``: *dest_name* is the actual name of the
+ output file, and *copied* is true if the file was copied (or would have been
+ copied, if *dry_run* true).
+
+ .. % XXX if the destination file already exists, we clobber it if
+ .. % copying, but blow up if linking. Hmmm. And I don't know what
+ .. % macostools.copyfile() does. Should definitely be consistent, and
+ .. % should probably blow up if destination exists and we would be
+ .. % changing it (ie. it's not already a hard/soft link to src OR
+ .. % (not update) and (src newer than dst)).
+
+
+.. function:: move_file(src, dst[, verbose, dry_run])
+
+ Move file *src* to *dst*. If *dst* is a directory, the file will be moved into
+ it with the same name; otherwise, *src* is just renamed to *dst*. Returns the
+ new full name of the file.
+
+ .. warning::
+
+ Handles cross-device moves on Unix using :func:`copy_file`. What about
+ other systems?
+
+
+.. function:: write_file(filename, contents)
+
+ Create a file called *filename* and write *contents* (a sequence of strings
+ without line terminators) to it.
+
+
+:mod:`distutils.util` --- Miscellaneous other utility functions
+===============================================================
+
+.. module:: distutils.util
+ :synopsis: Miscellaneous other utility functions
+
+
+This module contains other assorted bits and pieces that don't fit into any
+other utility module.
+
+
+.. function:: get_platform()
+
+ Return a string that identifies the current platform. This is used mainly to
+ distinguish platform-specific build directories and platform-specific built
+ distributions. Typically includes the OS name and version and the
+ architecture (as supplied by 'os.uname()'), although the exact information
+ included depends on the OS; e.g., on Linux, the kernel version isn't
+ particularly important.
+
+ Examples of returned values:
+
+ * ``linux-i586``
+ * ``linux-alpha``
+ * ``solaris-2.6-sun4u``
+
+ For non-POSIX platforms, currently just returns ``sys.platform``.
+
+ For Mac OS X systems the OS version reflects the minimal version on which
+ binaries will run (that is, the value of ``MACOSX_DEPLOYMENT_TARGET``
+ during the build of Python), not the OS version of the current system.
+
+ For universal binary builds on Mac OS X the architecture value reflects
+ the universal binary status instead of the architecture of the current
+ processor. For 32-bit universal binaries the architecture is ``fat``,
+ for 64-bit universal binaries the architecture is ``fat64``, and
+ for 4-way universal binaries the architecture is ``universal``. Starting
+ from Python 2.7 and Python 3.2 the architecture ``fat3`` is used for
+ a 3-way universal build (ppc, i386, x86_64) and ``intel`` is used for
+ a universal build with the i386 and x86_64 architectures
+
+ Examples of returned values on Mac OS X:
+
+ * ``macosx-10.3-ppc``
+
+ * ``macosx-10.3-fat``
+
+ * ``macosx-10.5-universal``
+
+ * ``macosx-10.6-intel``
+
+ For AIX, Python 3.9 and later return a string starting with "aix", followed
+ by additional fields (separated by ``'-'``) that represent the combined
+ values of AIX Version, Release and Technology Level (first field), Build Date
+ (second field), and bit-size (third field). Python 3.8 and earlier returned
+ only a single additional field with the AIX Version and Release.
+
+ Examples of returned values on AIX:
+
+ * ``aix-5307-0747-32`` # 32-bit build on AIX ``oslevel -s``: 5300-07-00-0000
+
+ * ``aix-7105-1731-64`` # 64-bit build on AIX ``oslevel -s``: 7100-05-01-1731
+
+ * ``aix-7.2`` # Legacy form reported in Python 3.8 and earlier
+
+ .. versionchanged:: 3.9
+ The AIX platform string format now also includes the technology level,
+ build date, and ABI bit-size.
+
+
+.. function:: convert_path(pathname)
+
+ Return 'pathname' as a name that will work on the native filesystem, i.e. split
+ it on '/' and put it back together again using the current directory separator.
+ Needed because filenames in the setup script are always supplied in Unix style,
+ and have to be converted to the local convention before we can actually use them
+ in the filesystem. Raises :exc:`ValueError` on non-Unix-ish systems if
+ *pathname* either starts or ends with a slash.
+
+
+.. function:: change_root(new_root, pathname)
+
+ Return *pathname* with *new_root* prepended. If *pathname* is relative, this is
+ equivalent to ``os.path.join(new_root,pathname)`` Otherwise, it requires making
+ *pathname* relative and then joining the two, which is tricky on DOS/Windows.
+
+
+.. function:: check_environ()
+
+ Ensure that 'os.environ' has all the environment variables we guarantee that
+ users can use in config files, command-line options, etc. Currently this
+ includes:
+
+ * :envvar:`HOME` - user's home directory (Unix only)
+ * :envvar:`PLAT` - description of the current platform, including hardware and
+ OS (see :func:`get_platform`)
+
+
+.. function:: subst_vars(s, local_vars)
+
+ Perform shell/Perl-style variable substitution on *s*. Every occurrence of
+ ``$`` followed by a name is considered a variable, and variable is substituted
+ by the value found in the *local_vars* dictionary, or in ``os.environ`` if it's
+ not in *local_vars*. *os.environ* is first checked/augmented to guarantee that
+ it contains certain values: see :func:`check_environ`. Raise :exc:`ValueError`
+ for any variables not found in either *local_vars* or ``os.environ``.
+
+ Note that this is not a fully-fledged string interpolation function. A valid
+ ``$variable`` can consist only of upper and lower case letters, numbers and an
+ underscore. No { } or ( ) style quoting is available.
+
+
+.. function:: split_quoted(s)
+
+ Split a string up according to Unix shell-like rules for quotes and backslashes.
+ In short: words are delimited by spaces, as long as those spaces are not escaped
+ by a backslash, or inside a quoted string. Single and double quotes are
+ equivalent, and the quote characters can be backslash-escaped. The backslash is
+ stripped from any two-character escape sequence, leaving only the escaped
+ character. The quote characters are stripped from any quoted string. Returns a
+ list of words.
+
+ .. % Should probably be moved into the standard library.
+
+
+.. function:: execute(func, args[, msg=None, verbose=0, dry_run=0])
+
+ Perform some action that affects the outside world (for instance, writing to the
+ filesystem). Such actions are special because they are disabled by the
+ *dry_run* flag. This method takes care of all that bureaucracy for you; all
+ you have to do is supply the function to call and an argument tuple for it (to
+ embody the "external action" being performed), and an optional message to print.
+
+
+.. function:: strtobool(val)
+
+ Convert a string representation of truth to true (1) or false (0).
+
+ True values are ``y``, ``yes``, ``t``, ``true``, ``on`` and ``1``; false values
+ are ``n``, ``no``, ``f``, ``false``, ``off`` and ``0``. Raises
+ :exc:`ValueError` if *val* is anything else.
+
+
+.. function:: byte_compile(py_files[, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None])
+
+ Byte-compile a collection of Python source files to :file:`.pyc` files in a
+ :file:`__pycache__` subdirectory (see :pep:`3147` and :pep:`488`).
+ *py_files* is a list of files to compile; any files that don't end in
+ :file:`.py` are silently skipped. *optimize* must be one of the following:
+
+ * ``0`` - don't optimize
+ * ``1`` - normal optimization (like ``python -O``)
+ * ``2`` - extra optimization (like ``python -OO``)
+
+ If *force* is true, all files are recompiled regardless of timestamps.
+
+ The source filename encoded in each :term:`bytecode` file defaults to the filenames
+ listed in *py_files*; you can modify these with *prefix* and *basedir*.
+ *prefix* is a string that will be stripped off of each source filename, and
+ *base_dir* is a directory name that will be prepended (after *prefix* is
+ stripped). You can supply either or both (or neither) of *prefix* and
+ *base_dir*, as you wish.
+
+ If *dry_run* is true, doesn't actually do anything that would affect the
+ filesystem.
+
+ Byte-compilation is either done directly in this interpreter process with the
+ standard :mod:`py_compile` module, or indirectly by writing a temporary script
+ and executing it. Normally, you should let :func:`byte_compile` figure out to
+ use direct compilation or not (see the source for details). The *direct* flag
+ is used by the script generated in indirect mode; unless you know what you're
+ doing, leave it set to ``None``.
+
+ .. versionchanged:: 3.2.3
+ Create ``.pyc`` files with an :func:`import magic tag
+ <imp.get_tag>` in their name, in a :file:`__pycache__` subdirectory
+ instead of files without tag in the current directory.
+
+ .. versionchanged:: 3.5
+ Create ``.pyc`` files according to :pep:`488`.
+
+
+.. function:: rfc822_escape(header)
+
+ Return a version of *header* escaped for inclusion in an :rfc:`822` header, by
+ ensuring there are 8 spaces space after each newline. Note that it does no other
+ modification of the string.
+
+ .. % this _can_ be replaced
+
+.. % \subsection{Distutils objects}
+
+
+:mod:`distutils.dist` --- The Distribution class
+================================================
+
+.. module:: distutils.dist
+ :synopsis: Provides the Distribution class, which represents the module distribution being
+ built/installed/distributed
+
+
+This module provides the :class:`~distutils.core.Distribution` class, which
+represents the module distribution being built/installed/distributed.
+
+
+:mod:`distutils.extension` --- The Extension class
+==================================================
+
+.. module:: distutils.extension
+ :synopsis: Provides the Extension class, used to describe C/C++ extension modules in setup
+ scripts
+
+
+This module provides the :class:`~distutils.extension.Extension` class,
+used to describe C/C++ extension modules in setup scripts.
+
+.. % \subsection{Ungrouped modules}
+.. % The following haven't been moved into a more appropriate section yet.
+
+
+:mod:`distutils.debug` --- Distutils debug mode
+===============================================
+
+.. module:: distutils.debug
+ :synopsis: Provides the debug flag for distutils
+
+
+This module provides the DEBUG flag.
+
+
+:mod:`distutils.errors` --- Distutils exceptions
+================================================
+
+.. module:: distutils.errors
+ :synopsis: Provides standard distutils exceptions
+
+
+Provides exceptions used by the Distutils modules. Note that Distutils modules
+may raise standard exceptions; in particular, SystemExit is usually raised for
+errors that are obviously the end-user's fault (eg. bad command-line arguments).
+
+This module is safe to use in ``from ... import *`` mode; it only exports
+symbols whose names start with ``Distutils`` and end with ``Error``.
+
+
+:mod:`distutils.fancy_getopt` --- Wrapper around the standard getopt module
+===========================================================================
+
+.. module:: distutils.fancy_getopt
+ :synopsis: Additional getopt functionality
+
+
+This module provides a wrapper around the standard :mod:`getopt` module that
+provides the following additional features:
+
+* short and long options are tied together
+
+* options have help strings, so :func:`fancy_getopt` could potentially create a
+ complete usage summary
+
+* options set attributes of a passed-in object
+
+* boolean options can have "negative aliases" --- eg. if :option:`!--quiet` is
+ the "negative alias" of :option:`!--verbose`, then :option:`!--quiet` on the
+ command line sets *verbose* to false.
+
+.. function:: fancy_getopt(options, negative_opt, object, args)
+
+ Wrapper function. *options* is a list of ``(long_option, short_option,
+ help_string)`` 3-tuples as described in the constructor for
+ :class:`FancyGetopt`. *negative_opt* should be a dictionary mapping option names
+ to option names, both the key and value should be in the *options* list.
+ *object* is an object which will be used to store values (see the :meth:`~FancyGetopt.getopt`
+ method of the :class:`FancyGetopt` class). *args* is the argument list. Will use
+ ``sys.argv[1:]`` if you pass ``None`` as *args*.
+
+
+.. function:: wrap_text(text, width)
+
+ Wraps *text* to less than *width* wide.
+
+
+.. class:: FancyGetopt([option_table=None])
+
+ The option_table is a list of 3-tuples: ``(long_option, short_option,
+ help_string)``
+
+ If an option takes an argument, its *long_option* should have ``'='`` appended;
+ *short_option* should just be a single character, no ``':'`` in any case.
+ *short_option* should be ``None`` if a *long_option* doesn't have a
+ corresponding *short_option*. All option tuples must have long options.
+
+The :class:`FancyGetopt` class provides the following methods:
+
+
+.. method:: FancyGetopt.getopt([args=None, object=None])
+
+ Parse command-line options in args. Store as attributes on *object*.
+
+ If *args* is ``None`` or not supplied, uses ``sys.argv[1:]``. If *object* is
+ ``None`` or not supplied, creates a new :class:`OptionDummy` instance, stores
+ option values there, and returns a tuple ``(args, object)``. If *object* is
+ supplied, it is modified in place and :func:`getopt` just returns *args*; in
+ both cases, the returned *args* is a modified copy of the passed-in *args* list,
+ which is left untouched.
+
+ .. % and args returned are?
+
+
+.. method:: FancyGetopt.get_option_order()
+
+ Returns the list of ``(option, value)`` tuples processed by the previous run of
+ :meth:`getopt` Raises :exc:`RuntimeError` if :meth:`getopt` hasn't been called
+ yet.
+
+
+.. method:: FancyGetopt.generate_help([header=None])
+
+ Generate help text (a list of strings, one per suggested line of output) from
+ the option table for this :class:`FancyGetopt` object.
+
+ If supplied, prints the supplied *header* at the top of the help.
+
+
+:mod:`distutils.filelist` --- The FileList class
+================================================
+
+.. module:: distutils.filelist
+ :synopsis: The FileList class, used for poking about the file system and
+ building lists of files.
+
+
+This module provides the :class:`FileList` class, used for poking about the
+filesystem and building lists of files.
+
+
+:mod:`distutils.log` --- Simple :pep:`282`-style logging
+========================================================
+
+.. module:: distutils.log
+ :synopsis: A simple logging mechanism, :pep:`282`-style
+
+
+:mod:`distutils.spawn` --- Spawn a sub-process
+==============================================
+
+.. module:: distutils.spawn
+ :synopsis: Provides the spawn() function
+
+
+This module provides the :func:`~distutils.spawn.spawn` function, a
+front-end to various platform-specific functions for launching another
+program in a sub-process.
+Also provides :func:`~distutils.spawn.find_executable` to search the path for a given executable
+name.
+
+
+:mod:`distutils.sysconfig` --- System configuration information
+===============================================================
+
+.. module:: distutils.sysconfig
+ :synopsis: Low-level access to configuration information of the Python interpreter.
+.. moduleauthor:: Fred L. Drake, Jr. <fdrake@acm.org>
+.. moduleauthor:: Greg Ward <gward@python.net>
+.. sectionauthor:: Fred L. Drake, Jr. <fdrake@acm.org>
+
+
+The :mod:`distutils.sysconfig` module provides access to Python's low-level
+configuration information. The specific configuration variables available
+depend heavily on the platform and configuration. The specific variables depend
+on the build process for the specific version of Python being run; the variables
+are those found in the :file:`Makefile` and configuration header that are
+installed with Python on Unix systems. The configuration header is called
+:file:`pyconfig.h` for Python versions starting with 2.2, and :file:`config.h`
+for earlier versions of Python.
+
+Some additional functions are provided which perform some useful manipulations
+for other parts of the :mod:`distutils` package.
+
+
+.. data:: PREFIX
+
+ The result of ``os.path.normpath(sys.prefix)``.
+
+
+.. data:: EXEC_PREFIX
+
+ The result of ``os.path.normpath(sys.exec_prefix)``.
+
+
+.. function:: get_config_var(name)
+
+ Return the value of a single variable. This is equivalent to
+ ``get_config_vars().get(name)``.
+
+
+.. function:: get_config_vars(...)
+
+ Return a set of variable definitions. If there are no arguments, this returns a
+ dictionary mapping names of configuration variables to values. If arguments are
+ provided, they should be strings, and the return value will be a sequence giving
+ the associated values. If a given name does not have a corresponding value,
+ ``None`` will be included for that variable.
+
+
+.. function:: get_config_h_filename()
+
+ Return the full path name of the configuration header. For Unix, this will be
+ the header generated by the :program:`configure` script; for other platforms the
+ header will have been supplied directly by the Python source distribution. The
+ file is a platform-specific text file.
+
+
+.. function:: get_makefile_filename()
+
+ Return the full path name of the :file:`Makefile` used to build Python. For
+ Unix, this will be a file generated by the :program:`configure` script; the
+ meaning for other platforms will vary. The file is a platform-specific text
+ file, if it exists. This function is only useful on POSIX platforms.
+
+
+.. function:: get_python_inc([plat_specific[, prefix]])
+
+ Return the directory for either the general or platform-dependent C include
+ files. If *plat_specific* is true, the platform-dependent include directory is
+ returned; if false or omitted, the platform-independent directory is returned.
+ If *prefix* is given, it is used as either the prefix instead of
+ :const:`PREFIX`, or as the exec-prefix instead of :const:`EXEC_PREFIX` if
+ *plat_specific* is true.
+
+
+.. function:: get_python_lib([plat_specific[, standard_lib[, prefix]]])
+
+ Return the directory for either the general or platform-dependent library
+ installation. If *plat_specific* is true, the platform-dependent include
+ directory is returned; if false or omitted, the platform-independent directory
+ is returned. If *prefix* is given, it is used as either the prefix instead of
+ :const:`PREFIX`, or as the exec-prefix instead of :const:`EXEC_PREFIX` if
+ *plat_specific* is true. If *standard_lib* is true, the directory for the
+ standard library is returned rather than the directory for the installation of
+ third-party extensions.
+
+The following function is only intended for use within the :mod:`distutils`
+package.
+
+
+.. function:: customize_compiler(compiler)
+
+ Do any platform-specific customization of a
+ :class:`distutils.ccompiler.CCompiler` instance.
+
+ This function is only needed on Unix at this time, but should be called
+ consistently to support forward-compatibility. It inserts the information that
+ varies across Unix flavors and is stored in Python's :file:`Makefile`. This
+ information includes the selected compiler, compiler and linker options, and the
+ extension used by the linker for shared objects.
+
+This function is even more special-purpose, and should only be used from
+Python's own build procedures.
+
+
+.. function:: set_python_build()
+
+ Inform the :mod:`distutils.sysconfig` module that it is being used as part of
+ the build process for Python. This changes a lot of relative locations for
+ files, allowing them to be located in the build area rather than in an installed
+ Python.
+
+
+:mod:`distutils.text_file` --- The TextFile class
+=================================================
+
+.. module:: distutils.text_file
+ :synopsis: Provides the TextFile class, a simple interface to text files
+
+
+This module provides the :class:`TextFile` class, which gives an interface to
+text files that (optionally) takes care of stripping comments, ignoring blank
+lines, and joining lines with backslashes.
+
+
+.. class:: TextFile([filename=None, file=None, **options])
+
+ This class provides a file-like object that takes care of all the things you
+ commonly want to do when processing a text file that has some line-by-line
+ syntax: strip comments (as long as ``#`` is your comment character), skip blank
+ lines, join adjacent lines by escaping the newline (ie. backslash at end of
+ line), strip leading and/or trailing whitespace. All of these are optional and
+ independently controllable.
+
+ The class provides a :meth:`warn` method so you can generate warning messages
+ that report physical line number, even if the logical line in question spans
+ multiple physical lines. Also provides :meth:`unreadline` for implementing
+ line-at-a-time lookahead.
+
+ :class:`TextFile` instances are create with either *filename*, *file*, or both.
+ :exc:`RuntimeError` is raised if both are ``None``. *filename* should be a
+ string, and *file* a file object (or something that provides :meth:`readline`
+ and :meth:`close` methods). It is recommended that you supply at least
+ *filename*, so that :class:`TextFile` can include it in warning messages. If
+ *file* is not supplied, :class:`TextFile` creates its own using the
+ :func:`open` built-in function.
+
+ The options are all boolean, and affect the values returned by :meth:`readline`
+
+ .. tabularcolumns:: |l|L|l|
+
+ +------------------+--------------------------------+---------+
+ | option name | description | default |
+ +==================+================================+=========+
+ | *strip_comments* | strip from ``'#'`` to | true |
+ | | end-of-line, as well as any | |
+ | | whitespace leading up to the | |
+ | | ``'#'``\ ---unless it is | |
+ | | escaped by a backslash | |
+ +------------------+--------------------------------+---------+
+ | *lstrip_ws* | strip leading whitespace from | false |
+ | | each line before returning it | |
+ +------------------+--------------------------------+---------+
+ | *rstrip_ws* | strip trailing whitespace | true |
+ | | (including line terminator!) | |
+ | | from each line before | |
+ | | returning it. | |
+ +------------------+--------------------------------+---------+
+ | *skip_blanks* | skip lines that are empty | true |
+ | | \*after\* stripping comments | |
+ | | and whitespace. (If both | |
+ | | lstrip_ws and rstrip_ws are | |
+ | | false, then some lines may | |
+ | | consist of solely whitespace: | |
+ | | these will \*not\* be skipped, | |
+ | | even if *skip_blanks* is | |
+ | | true.) | |
+ +------------------+--------------------------------+---------+
+ | *join_lines* | if a backslash is the last | false |
+ | | non-newline character on a | |
+ | | line after stripping comments | |
+ | | and whitespace, join the | |
+ | | following line to it to form | |
+ | | one logical line; if N | |
+ | | consecutive lines end with a | |
+ | | backslash, then N+1 physical | |
+ | | lines will be joined to form | |
+ | | one logical line. | |
+ +------------------+--------------------------------+---------+
+ | *collapse_join* | strip leading whitespace from | false |
+ | | lines that are joined to their | |
+ | | predecessor; only matters if | |
+ | | ``(join_lines and not | |
+ | | lstrip_ws)`` | |
+ +------------------+--------------------------------+---------+
+
+ Note that since *rstrip_ws* can strip the trailing newline, the semantics of
+ :meth:`readline` must differ from those of the built-in file object's
+ :meth:`readline` method! In particular, :meth:`readline` returns ``None`` for
+ end-of-file: an empty string might just be a blank line (or an all-whitespace
+ line), if *rstrip_ws* is true but *skip_blanks* is not.
+
+
+ .. method:: TextFile.open(filename)
+
+ Open a new file *filename*. This overrides any *file* or *filename*
+ constructor arguments.
+
+
+ .. method:: TextFile.close()
+
+ Close the current file and forget everything we know about it (including the
+ filename and the current line number).
+
+
+ .. method:: TextFile.warn(msg[,line=None])
+
+ Print (to stderr) a warning message tied to the current logical line in the
+ current file. If the current logical line in the file spans multiple physical
+ lines, the warning refers to the whole range, such as ``"lines 3-5"``. If
+ *line* is supplied, it overrides the current line number; it may be a list or
+ tuple to indicate a range of physical lines, or an integer for a single
+ physical line.
+
+
+ .. method:: TextFile.readline()
+
+ Read and return a single logical line from the current file (or from an internal
+ buffer if lines have previously been "unread" with :meth:`unreadline`). If the
+ *join_lines* option is true, this may involve reading multiple physical lines
+ concatenated into a single string. Updates the current line number, so calling
+ :meth:`warn` after :meth:`readline` emits a warning about the physical line(s)
+ just read. Returns ``None`` on end-of-file, since the empty string can occur
+ if *rstrip_ws* is true but *strip_blanks* is not.
+
+
+ .. method:: TextFile.readlines()
+
+ Read and return the list of all logical lines remaining in the current file.
+ This updates the current line number to the last line of the file.
+
+
+ .. method:: TextFile.unreadline(line)
+
+ Push *line* (a string) onto an internal buffer that will be checked by future
+ :meth:`readline` calls. Handy for implementing a parser with line-at-a-time
+ lookahead. Note that lines that are "unread" with :meth:`unreadline` are not
+ subsequently re-cleansed (whitespace stripped, or whatever) when read with
+ :meth:`readline`. If multiple calls are made to :meth:`unreadline` before a call
+ to :meth:`readline`, the lines will be returned most in most recent first order.
+
+
+:mod:`distutils.version` --- Version number classes
+===================================================
+
+.. module:: distutils.version
+ :synopsis: Implements classes that represent module version numbers.
+
+
+.. % todo
+.. % \section{Distutils Commands}
+.. %
+.. % This part of Distutils implements the various Distutils commands, such
+.. % as \code{build}, \code{install} \&c. Each command is implemented as a
+.. % separate module, with the command name as the name of the module.
+
+
+:mod:`distutils.cmd` --- Abstract base class for Distutils commands
+===================================================================
+
+.. module:: distutils.cmd
+ :synopsis: Provides the abstract base class :class:`~distutils.cmd.Command`. This class
+ is subclassed by the modules in the distutils.command subpackage.
+
+
+This module supplies the abstract base class :class:`Command`.
+
+
+.. class:: Command(dist)
+
+ Abstract base class for defining command classes, the "worker bees" of the
+ Distutils. A useful analogy for command classes is to think of them as
+ subroutines with local variables called *options*. The options are declared
+ in :meth:`initialize_options` and defined (given their final values) in
+ :meth:`finalize_options`, both of which must be defined by every command
+ class. The distinction between the two is necessary because option values
+ might come from the outside world (command line, config file, ...), and any
+ options dependent on other options must be computed after these outside
+ influences have been processed --- hence :meth:`finalize_options`. The body
+ of the subroutine, where it does all its work based on the values of its
+ options, is the :meth:`run` method, which must also be implemented by every
+ command class.
+
+ The class constructor takes a single argument *dist*, a
+ :class:`~distutils.core.Distribution` instance.
+
+
+Creating a new Distutils command
+================================
+
+This section outlines the steps to create a new Distutils command.
+
+A new command lives in a module in the :mod:`distutils.command` package. There
+is a sample template in that directory called :file:`command_template`. Copy
+this file to a new module with the same name as the new command you're
+implementing. This module should implement a class with the same name as the
+module (and the command). So, for instance, to create the command
+``peel_banana`` (so that users can run ``setup.py peel_banana``), you'd copy
+:file:`command_template` to :file:`distutils/command/peel_banana.py`, then edit
+it so that it's implementing the class ``peel_banana``, a subclass of
+:class:`distutils.cmd.Command`.
+
+Subclasses of :class:`Command` must define the following methods.
+
+.. method:: Command.initialize_options()
+
+ Set default values for all the options that this command supports. Note that
+ these defaults may be overridden by other commands, by the setup script, by
+ config files, or by the command-line. Thus, this is not the place to code
+ dependencies between options; generally, :meth:`initialize_options`
+ implementations are just a bunch of ``self.foo = None`` assignments.
+
+
+.. method:: Command.finalize_options()
+
+ Set final values for all the options that this command supports. This is
+ always called as late as possible, ie. after any option assignments from the
+ command-line or from other commands have been done. Thus, this is the place
+ to code option dependencies: if *foo* depends on *bar*, then it is safe to
+ set *foo* from *bar* as long as *foo* still has the same value it was
+ assigned in :meth:`initialize_options`.
+
+
+.. method:: Command.run()
+
+ A command's raison d'etre: carry out the action it exists to perform, controlled
+ by the options initialized in :meth:`initialize_options`, customized by other
+ commands, the setup script, the command-line, and config files, and finalized in
+ :meth:`finalize_options`. All terminal output and filesystem interaction should
+ be done by :meth:`run`.
+
+
+.. attribute:: Command.sub_commands
+
+ *sub_commands* formalizes the notion of a "family" of commands,
+ e.g. ``install`` as the parent with sub-commands ``install_lib``,
+ ``install_headers``, etc. The parent of a family of commands defines
+ *sub_commands* as a class attribute; it's a list of 2-tuples ``(command_name,
+ predicate)``, with *command_name* a string and *predicate* a function, a
+ string or ``None``. *predicate* is a method of the parent command that
+ determines whether the corresponding command is applicable in the current
+ situation. (E.g. ``install_headers`` is only applicable if we have any C
+ header files to install.) If *predicate* is ``None``, that command is always
+ applicable.
+
+ *sub_commands* is usually defined at the *end* of a class, because
+ predicates can be methods of the class, so they must already have been
+ defined. The canonical example is the :command:`install` command.
+
+
+:mod:`distutils.command` --- Individual Distutils commands
+==========================================================
+
+.. module:: distutils.command
+ :synopsis: Contains one module for each standard Distutils command.
+
+
+.. % \subsubsection{Individual Distutils commands}
+.. % todo
+
+
+:mod:`distutils.command.bdist` --- Build a binary installer
+===========================================================
+
+.. module:: distutils.command.bdist
+ :synopsis: Build a binary installer for a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_packager` --- Abstract base class for packagers
+=============================================================================
+
+.. module:: distutils.command.bdist_packager
+ :synopsis: Abstract base class for packagers
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_dumb` --- Build a "dumb" installer
+================================================================
+
+.. module:: distutils.command.bdist_dumb
+ :synopsis: Build a "dumb" installer - a simple archive of files
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_msi` --- Build a Microsoft Installer binary package
+=================================================================================
+
+.. module:: distutils.command.bdist_msi
+ :synopsis: Build a binary distribution as a Windows MSI file
+
+.. class:: bdist_msi
+
+.. deprecated:: 3.9
+ Use bdist_wheel (wheel packages) instead.
+
+ Builds a `Windows Installer`_ (.msi) binary package.
+
+ .. _Windows Installer: https://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
+
+ In most cases, the ``bdist_msi`` installer is a better choice than the
+ ``bdist_wininst`` installer, because it provides better support for
+ Win64 platforms, allows administrators to perform non-interactive
+ installations, and allows installation through group policies.
+
+
+:mod:`distutils.command.bdist_rpm` --- Build a binary distribution as a Redhat RPM and SRPM
+===========================================================================================
+
+.. module:: distutils.command.bdist_rpm
+ :synopsis: Build a binary distribution as a Redhat RPM and SRPM
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_wininst` --- Build a Windows installer
+====================================================================
+
+.. module:: distutils.command.bdist_wininst
+ :synopsis: Build a Windows installer
+
+.. deprecated:: 3.8
+ Use bdist_wheel (wheel packages) instead.
+
+
+.. % todo
+
+
+:mod:`distutils.command.sdist` --- Build a source distribution
+==============================================================
+
+.. module:: distutils.command.sdist
+ :synopsis: Build a source distribution
+
+
+.. % todo
+
+
+:mod:`distutils.command.build` --- Build all files of a package
+===============================================================
+
+.. module:: distutils.command.build
+ :synopsis: Build all files of a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_clib` --- Build any C libraries in a package
+==========================================================================
+
+.. module:: distutils.command.build_clib
+ :synopsis: Build any C libraries in a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_ext` --- Build any extensions in a package
+========================================================================
+
+.. module:: distutils.command.build_ext
+ :synopsis: Build any extensions in a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_py` --- Build the .py/.pyc files of a package
+===========================================================================
+
+.. module:: distutils.command.build_py
+ :synopsis: Build the .py/.pyc files of a package
+
+
+.. class:: build_py
+
+
+:mod:`distutils.command.build_scripts` --- Build the scripts of a package
+=========================================================================
+
+.. module:: distutils.command.build_scripts
+ :synopsis: Build the scripts of a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.clean` --- Clean a package build area
+=============================================================
+
+.. module:: distutils.command.clean
+ :synopsis: Clean a package build area
+
+This command removes the temporary files created by :command:`build`
+and its subcommands, like intermediary compiled object files. With
+the ``--all`` option, the complete build directory will be removed.
+
+Extension modules built :ref:`in place <distutils-build-ext-inplace>`
+will not be cleaned, as they are not in the build directory.
+
+
+:mod:`distutils.command.config` --- Perform package configuration
+=================================================================
+
+.. module:: distutils.command.config
+ :synopsis: Perform package configuration
+
+
+.. % todo
+
+
+:mod:`distutils.command.install` --- Install a package
+======================================================
+
+.. module:: distutils.command.install
+ :synopsis: Install a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_data` --- Install data files from a package
+===========================================================================
+
+.. module:: distutils.command.install_data
+ :synopsis: Install data files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_headers` --- Install C/C++ header files from a package
+======================================================================================
+
+.. module:: distutils.command.install_headers
+ :synopsis: Install C/C++ header files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_lib` --- Install library files from a package
+=============================================================================
+
+.. module:: distutils.command.install_lib
+ :synopsis: Install library files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_scripts` --- Install script files from a package
+================================================================================
+
+.. module:: distutils.command.install_scripts
+ :synopsis: Install script files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.register` --- Register a module with the Python Package Index
+=====================================================================================
+
+.. module:: distutils.command.register
+ :synopsis: Register a module with the Python Package Index
+
+
+The ``register`` command registers the package with the Python Package Index.
+This is described in more detail in :pep:`301`.
+
+.. % todo
+
+
+:mod:`distutils.command.check` --- Check the meta-data of a package
+===================================================================
+
+.. module:: distutils.command.check
+ :synopsis: Check the meta-data of a package
+
+
+The ``check`` command performs some tests on the meta-data of a package.
+For example, it verifies that all required meta-data are provided as
+the arguments passed to the :func:`~distutils.core.setup` function.
+
+.. % todo
diff --git a/docs/deprecated/distutils/builtdist.rst b/docs/deprecated/distutils/builtdist.rst
new file mode 100644
index 0000000..e032c03
--- /dev/null
+++ b/docs/deprecated/distutils/builtdist.rst
@@ -0,0 +1,479 @@
+.. _built-dist:
+
+****************************
+Creating Built Distributions
+****************************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+A "built distribution" is what you're probably used to thinking of either as a
+"binary package" or an "installer" (depending on your background). It's not
+necessarily binary, though, because it might contain only Python source code
+and/or byte-code; and we don't call it a package, because that word is already
+spoken for in Python. (And "installer" is a term specific to the world of
+mainstream desktop systems.)
+
+A built distribution is how you make life as easy as possible for installers of
+your module distribution: for users of RPM-based Linux systems, it's a binary
+RPM; for Windows users, it's an executable installer; for Debian-based Linux
+users, it's a Debian package; and so forth. Obviously, no one person will be
+able to create built distributions for every platform under the sun, so the
+Distutils are designed to enable module developers to concentrate on their
+specialty---writing code and creating source distributions---while an
+intermediary species called *packagers* springs up to turn source distributions
+into built distributions for as many platforms as there are packagers.
+
+Of course, the module developer could be their own packager; or the packager could
+be a volunteer "out there" somewhere who has access to a platform which the
+original developer does not; or it could be software periodically grabbing new
+source distributions and turning them into built distributions for as many
+platforms as the software has access to. Regardless of who they are, a packager
+uses the setup script and the :command:`bdist` command family to generate built
+distributions.
+
+As a simple example, if I run the following command in the Distutils source
+tree::
+
+ python setup.py bdist
+
+then the Distutils builds my module distribution (the Distutils itself in this
+case), does a "fake" installation (also in the :file:`build` directory), and
+creates the default type of built distribution for my platform. The default
+format for built distributions is a "dumb" tar file on Unix, and a simple
+executable installer on Windows. (That tar file is considered "dumb" because it
+has to be unpacked in a specific location to work.)
+
+Thus, the above command on a Unix system creates
+:file:`Distutils-1.0.{plat}.tar.gz`; unpacking this tarball from the right place
+installs the Distutils just as though you had downloaded the source distribution
+and run ``python setup.py install``. (The "right place" is either the root of
+the filesystem or Python's :file:`{prefix}` directory, depending on the options
+given to the :command:`bdist_dumb` command; the default is to make dumb
+distributions relative to :file:`{prefix}`.)
+
+Obviously, for pure Python distributions, this isn't any simpler than just
+running ``python setup.py install``\ ---but for non-pure distributions, which
+include extensions that would need to be compiled, it can mean the difference
+between someone being able to use your extensions or not. And creating "smart"
+built distributions, such as an RPM package or an executable installer for
+Windows, is far more convenient for users even if your distribution doesn't
+include any extensions.
+
+The :command:`bdist` command has a :option:`!--formats` option, similar to the
+:command:`sdist` command, which you can use to select the types of built
+distribution to generate: for example, ::
+
+ python setup.py bdist --format=zip
+
+would, when run on a Unix system, create
+:file:`Distutils-1.0.{plat}.zip`\ ---again, this archive would be unpacked
+from the root directory to install the Distutils.
+
+The available formats for built distributions are:
+
++-------------+------------------------------+---------+
+| Format | Description | Notes |
++=============+==============================+=========+
+| ``gztar`` | gzipped tar file | \(1) |
+| | (:file:`.tar.gz`) | |
++-------------+------------------------------+---------+
+| ``bztar`` | bzipped tar file | |
+| | (:file:`.tar.bz2`) | |
++-------------+------------------------------+---------+
+| ``xztar`` | xzipped tar file | |
+| | (:file:`.tar.xz`) | |
++-------------+------------------------------+---------+
+| ``ztar`` | compressed tar file | \(3) |
+| | (:file:`.tar.Z`) | |
++-------------+------------------------------+---------+
+| ``tar`` | tar file (:file:`.tar`) | |
++-------------+------------------------------+---------+
+| ``zip`` | zip file (:file:`.zip`) | (2),(4) |
++-------------+------------------------------+---------+
+| ``rpm`` | RPM | \(5) |
++-------------+------------------------------+---------+
+| ``pkgtool`` | Solaris :program:`pkgtool` | |
++-------------+------------------------------+---------+
+| ``sdux`` | HP-UX :program:`swinstall` | |
++-------------+------------------------------+---------+
+| ``wininst`` | self-extracting ZIP file for | \(4) |
+| | Windows | |
++-------------+------------------------------+---------+
+| ``msi`` | Microsoft Installer. | |
++-------------+------------------------------+---------+
+
+.. versionchanged:: 3.5
+ Added support for the ``xztar`` format.
+
+
+Notes:
+
+(1)
+ default on Unix
+
+(2)
+ default on Windows
+
+(3)
+ requires external :program:`compress` utility.
+
+(4)
+ requires either external :program:`zip` utility or :mod:`zipfile` module (part
+ of the standard Python library since Python 1.6)
+
+(5)
+ requires external :program:`rpm` utility, version 3.0.4 or better (use ``rpm
+ --version`` to find out which version you have)
+
+You don't have to use the :command:`bdist` command with the :option:`!--formats`
+option; you can also use the command that directly implements the format you're
+interested in. Some of these :command:`bdist` "sub-commands" actually generate
+several similar formats; for instance, the :command:`bdist_dumb` command
+generates all the "dumb" archive formats (``tar``, ``gztar``, ``bztar``,
+``xztar``, ``ztar``, and ``zip``), and :command:`bdist_rpm` generates both
+binary and source RPMs. The :command:`bdist` sub-commands, and the formats
+generated by each, are:
+
++--------------------------+-------------------------------------+
+| Command | Formats |
++==========================+=====================================+
+| :command:`bdist_dumb` | tar, gztar, bztar, xztar, ztar, zip |
++--------------------------+-------------------------------------+
+| :command:`bdist_rpm` | rpm, srpm |
++--------------------------+-------------------------------------+
+| :command:`bdist_wininst` | wininst |
++--------------------------+-------------------------------------+
+| :command:`bdist_msi` | msi |
++--------------------------+-------------------------------------+
+
+.. note::
+ bdist_wininst is deprecated since Python 3.8.
+
+.. note::
+ bdist_msi is deprecated since Python 3.9.
+
+The following sections give details on the individual :command:`bdist_\*`
+commands.
+
+
+.. .. _creating-dumb:
+
+.. Creating dumb built distributions
+.. =================================
+
+.. XXX Need to document absolute vs. prefix-relative packages here, but first
+ I have to implement it!
+
+
+.. _creating-rpms:
+
+Creating RPM packages
+=====================
+
+The RPM format is used by many popular Linux distributions, including Red Hat,
+SuSE, and Mandrake. If one of these (or any of the other RPM-based Linux
+distributions) is your usual environment, creating RPM packages for other users
+of that same distribution is trivial. Depending on the complexity of your module
+distribution and differences between Linux distributions, you may also be able
+to create RPMs that work on different RPM-based distributions.
+
+The usual way to create an RPM of your module distribution is to run the
+:command:`bdist_rpm` command::
+
+ python setup.py bdist_rpm
+
+or the :command:`bdist` command with the :option:`!--format` option::
+
+ python setup.py bdist --formats=rpm
+
+The former allows you to specify RPM-specific options; the latter allows you to
+easily specify multiple formats in one run. If you need to do both, you can
+explicitly specify multiple :command:`bdist_\*` commands and their options::
+
+ python setup.py bdist_rpm --packager="John Doe <jdoe@example.org>" \
+ bdist_wininst --target-version="2.0"
+
+Creating RPM packages is driven by a :file:`.spec` file, much as using the
+Distutils is driven by the setup script. To make your life easier, the
+:command:`bdist_rpm` command normally creates a :file:`.spec` file based on the
+information you supply in the setup script, on the command line, and in any
+Distutils configuration files. Various options and sections in the
+:file:`.spec` file are derived from options in the setup script as follows:
+
++------------------------------------------+----------------------------------------------+
+| RPM :file:`.spec` file option or section | Distutils setup script option |
++==========================================+==============================================+
+| Name | ``name`` |
++------------------------------------------+----------------------------------------------+
+| Summary (in preamble) | ``description`` |
++------------------------------------------+----------------------------------------------+
+| Version | ``version`` |
++------------------------------------------+----------------------------------------------+
+| Vendor | ``author`` and ``author_email``, |
+| | or --- & ``maintainer`` and |
+| | ``maintainer_email`` |
++------------------------------------------+----------------------------------------------+
+| Copyright | ``license`` |
++------------------------------------------+----------------------------------------------+
+| Url | ``url`` |
++------------------------------------------+----------------------------------------------+
+| %description (section) | ``long_description`` |
++------------------------------------------+----------------------------------------------+
+
+Additionally, there are many options in :file:`.spec` files that don't have
+corresponding options in the setup script. Most of these are handled through
+options to the :command:`bdist_rpm` command as follows:
+
++-------------------------------+-----------------------------+-------------------------+
+| RPM :file:`.spec` file option | :command:`bdist_rpm` option | default value |
+| or section | | |
++===============================+=============================+=========================+
+| Release | ``release`` | "1" |
++-------------------------------+-----------------------------+-------------------------+
+| Group | ``group`` | "Development/Libraries" |
++-------------------------------+-----------------------------+-------------------------+
+| Vendor | ``vendor`` | (see above) |
++-------------------------------+-----------------------------+-------------------------+
+| Packager | ``packager`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Provides | ``provides`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Requires | ``requires`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Conflicts | ``conflicts`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Obsoletes | ``obsoletes`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Distribution | ``distribution_name`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| BuildRequires | ``build_requires`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+| Icon | ``icon`` | (none) |
++-------------------------------+-----------------------------+-------------------------+
+
+Obviously, supplying even a few of these options on the command-line would be
+tedious and error-prone, so it's usually best to put them in the setup
+configuration file, :file:`setup.cfg`\ ---see section :ref:`setup-config`. If
+you distribute or package many Python module distributions, you might want to
+put options that apply to all of them in your personal Distutils configuration
+file (:file:`~/.pydistutils.cfg`). If you want to temporarily disable
+this file, you can pass the :option:`!--no-user-cfg` option to :file:`setup.py`.
+
+There are three steps to building a binary RPM package, all of which are
+handled automatically by the Distutils:
+
+#. create a :file:`.spec` file, which describes the package (analogous to the
+ Distutils setup script; in fact, much of the information in the setup script
+ winds up in the :file:`.spec` file)
+
+#. create the source RPM
+
+#. create the "binary" RPM (which may or may not contain binary code, depending
+ on whether your module distribution contains Python extensions)
+
+Normally, RPM bundles the last two steps together; when you use the Distutils,
+all three steps are typically bundled together.
+
+If you wish, you can separate these three steps. You can use the
+:option:`!--spec-only` option to make :command:`bdist_rpm` just create the
+:file:`.spec` file and exit; in this case, the :file:`.spec` file will be
+written to the "distribution directory"---normally :file:`dist/`, but
+customizable with the :option:`!--dist-dir` option. (Normally, the :file:`.spec`
+file winds up deep in the "build tree," in a temporary directory created by
+:command:`bdist_rpm`.)
+
+.. % \XXX{this isn't implemented yet---is it needed?!}
+.. % You can also specify a custom \file{.spec} file with the
+.. % \longprogramopt{spec-file} option; used in conjunction with
+.. % \longprogramopt{spec-only}, this gives you an opportunity to customize
+.. % the \file{.spec} file manually:
+.. %
+.. % \ begin{verbatim}
+.. % > python setup.py bdist_rpm --spec-only
+.. % # ...edit dist/FooBar-1.0.spec
+.. % > python setup.py bdist_rpm --spec-file=dist/FooBar-1.0.spec
+.. % \ end{verbatim}
+.. %
+.. % (Although a better way to do this is probably to override the standard
+.. % \command{bdist\_rpm} command with one that writes whatever else you want
+.. % to the \file{.spec} file.)
+
+
+.. _creating-wininst:
+
+Creating Windows Installers
+===========================
+
+.. warning::
+ bdist_wininst is deprecated since Python 3.8.
+
+.. warning::
+ bdist_msi is deprecated since Python 3.9.
+
+Executable installers are the natural format for binary distributions on
+Windows. They display a nice graphical user interface, display some information
+about the module distribution to be installed taken from the metadata in the
+setup script, let the user select a few options, and start or cancel the
+installation.
+
+Since the metadata is taken from the setup script, creating Windows installers
+is usually as easy as running::
+
+ python setup.py bdist_wininst
+
+or the :command:`bdist` command with the :option:`!--formats` option::
+
+ python setup.py bdist --formats=wininst
+
+If you have a pure module distribution (only containing pure Python modules and
+packages), the resulting installer will be version independent and have a name
+like :file:`foo-1.0.win32.exe`. Note that creating ``wininst`` binary
+distributions in only supported on Windows systems.
+
+If you have a non-pure distribution, the extensions can only be created on a
+Windows platform, and will be Python version dependent. The installer filename
+will reflect this and now has the form :file:`foo-1.0.win32-py2.0.exe`. You
+have to create a separate installer for every Python version you want to
+support.
+
+The installer will try to compile pure modules into :term:`bytecode` after installation
+on the target system in normal and optimizing mode. If you don't want this to
+happen for some reason, you can run the :command:`bdist_wininst` command with
+the :option:`!--no-target-compile` and/or the :option:`!--no-target-optimize`
+option.
+
+By default the installer will display the cool "Python Powered" logo when it is
+run, but you can also supply your own 152x261 bitmap which must be a Windows
+:file:`.bmp` file with the :option:`!--bitmap` option.
+
+The installer will also display a large title on the desktop background window
+when it is run, which is constructed from the name of your distribution and the
+version number. This can be changed to another text by using the
+:option:`!--title` option.
+
+The installer file will be written to the "distribution directory" --- normally
+:file:`dist/`, but customizable with the :option:`!--dist-dir` option.
+
+.. _cross-compile-windows:
+
+Cross-compiling on Windows
+==========================
+
+Starting with Python 2.6, distutils is capable of cross-compiling between
+Windows platforms. In practice, this means that with the correct tools
+installed, you can use a 32bit version of Windows to create 64bit extensions
+and vice-versa.
+
+To build for an alternate platform, specify the :option:`!--plat-name` option
+to the build command. Valid values are currently 'win32', and 'win-amd64'.
+For example, on a 32bit version of Windows, you could execute::
+
+ python setup.py build --plat-name=win-amd64
+
+to build a 64bit version of your extension. The Windows Installers also
+support this option, so the command::
+
+ python setup.py build --plat-name=win-amd64 bdist_wininst
+
+would create a 64bit installation executable on your 32bit version of Windows.
+
+To cross-compile, you must download the Python source code and cross-compile
+Python itself for the platform you are targeting - it is not possible from a
+binary installation of Python (as the .lib etc file for other platforms are
+not included.) In practice, this means the user of a 32 bit operating
+system will need to use Visual Studio 2008 to open the
+:file:`PCbuild/PCbuild.sln` solution in the Python source tree and build the
+"x64" configuration of the 'pythoncore' project before cross-compiling
+extensions is possible.
+
+Note that by default, Visual Studio 2008 does not install 64bit compilers or
+tools. You may need to reexecute the Visual Studio setup process and select
+these tools (using Control Panel->[Add/Remove] Programs is a convenient way to
+check or modify your existing install.)
+
+.. _postinstallation-script:
+
+The Postinstallation script
+---------------------------
+
+Starting with Python 2.3, a postinstallation script can be specified with the
+:option:`!--install-script` option. The basename of the script must be
+specified, and the script filename must also be listed in the scripts argument
+to the setup function.
+
+This script will be run at installation time on the target system after all the
+files have been copied, with ``argv[1]`` set to :option:`!-install`, and again at
+uninstallation time before the files are removed with ``argv[1]`` set to
+:option:`!-remove`.
+
+The installation script runs embedded in the windows installer, every output
+(``sys.stdout``, ``sys.stderr``) is redirected into a buffer and will be
+displayed in the GUI after the script has finished.
+
+Some functions especially useful in this context are available as additional
+built-in functions in the installation script.
+
+
+.. function:: directory_created(path)
+ file_created(path)
+
+ These functions should be called when a directory or file is created by the
+ postinstall script at installation time. It will register *path* with the
+ uninstaller, so that it will be removed when the distribution is uninstalled.
+ To be safe, directories are only removed if they are empty.
+
+
+.. function:: get_special_folder_path(csidl_string)
+
+ This function can be used to retrieve special folder locations on Windows like
+ the Start Menu or the Desktop. It returns the full path to the folder.
+ *csidl_string* must be one of the following strings::
+
+ "CSIDL_APPDATA"
+
+ "CSIDL_COMMON_STARTMENU"
+ "CSIDL_STARTMENU"
+
+ "CSIDL_COMMON_DESKTOPDIRECTORY"
+ "CSIDL_DESKTOPDIRECTORY"
+
+ "CSIDL_COMMON_STARTUP"
+ "CSIDL_STARTUP"
+
+ "CSIDL_COMMON_PROGRAMS"
+ "CSIDL_PROGRAMS"
+
+ "CSIDL_FONTS"
+
+ If the folder cannot be retrieved, :exc:`OSError` is raised.
+
+ Which folders are available depends on the exact Windows version, and probably
+ also the configuration. For details refer to Microsoft's documentation of the
+ :c:func:`SHGetSpecialFolderPath` function.
+
+
+.. function:: create_shortcut(target, description, filename[, arguments[, workdir[, iconpath[, iconindex]]]])
+
+ This function creates a shortcut. *target* is the path to the program to be
+ started by the shortcut. *description* is the description of the shortcut.
+ *filename* is the title of the shortcut that the user will see. *arguments*
+ specifies the command line arguments, if any. *workdir* is the working directory
+ for the program. *iconpath* is the file containing the icon for the shortcut,
+ and *iconindex* is the index of the icon in the file *iconpath*. Again, for
+ details consult the Microsoft documentation for the :class:`IShellLink`
+ interface.
+
+
+Vista User Access Control (UAC)
+===============================
+
+Starting with Python 2.6, bdist_wininst supports a :option:`!--user-access-control`
+option. The default is 'none' (meaning no UAC handling is done), and other
+valid values are 'auto' (meaning prompt for UAC elevation if Python was
+installed for all users) and 'force' (meaning always prompt for elevation).
+
+.. note::
+ bdist_wininst is deprecated since Python 3.8.
+
+.. note::
+ bdist_msi is deprecated since Python 3.9.
diff --git a/docs/deprecated/distutils/commandref.rst b/docs/deprecated/distutils/commandref.rst
new file mode 100644
index 0000000..0f6fe2a
--- /dev/null
+++ b/docs/deprecated/distutils/commandref.rst
@@ -0,0 +1,106 @@
+.. _reference:
+
+*****************
+Command Reference
+*****************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+.. % \section{Building modules: the \protect\command{build} command family}
+.. % \label{build-cmds}
+.. % \subsubsection{\protect\command{build}}
+.. % \label{build-cmd}
+.. % \subsubsection{\protect\command{build\_py}}
+.. % \label{build-py-cmd}
+.. % \subsubsection{\protect\command{build\_ext}}
+.. % \label{build-ext-cmd}
+.. % \subsubsection{\protect\command{build\_clib}}
+.. % \label{build-clib-cmd}
+
+
+.. _install-cmd:
+
+Installing modules: the :command:`install` command family
+=========================================================
+
+The install command ensures that the build commands have been run and then runs
+the subcommands :command:`install_lib`, :command:`install_data` and
+:command:`install_scripts`.
+
+.. % \subsubsection{\protect\command{install\_lib}}
+.. % \label{install-lib-cmd}
+
+
+.. _install-data-cmd:
+
+:command:`install_data`
+-----------------------
+
+This command installs all data files provided with the distribution.
+
+
+.. _install-scripts-cmd:
+
+:command:`install_scripts`
+--------------------------
+
+This command installs all (Python) scripts in the distribution.
+
+.. % \subsection{Cleaning up: the \protect\command{clean} command}
+.. % \label{clean-cmd}
+
+
+.. _sdist-cmd:
+
+Creating a source distribution: the :command:`sdist` command
+============================================================
+
+.. XXX fragment moved down from above: needs context!
+
+The manifest template commands are:
+
++-------------------------------------------+-----------------------------------------------+
+| Command | Description |
++===========================================+===============================================+
+| :command:`include pat1 pat2 ...` | include all files matching any of the listed |
+| | patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`exclude pat1 pat2 ...` | exclude all files matching any of the listed |
+| | patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-include dir pat1 pat2 | include all files under *dir* matching any of |
+| ...` | the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-exclude dir pat1 pat2 | exclude all files under *dir* matching any of |
+| ...` | the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-include pat1 pat2 ...` | include all files anywhere in the source tree |
+| | matching --- & any of the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-exclude pat1 pat2 ...` | exclude all files anywhere in the source tree |
+| | matching --- & any of the listed patterns |
++-------------------------------------------+-----------------------------------------------+
+| :command:`prune dir` | exclude all files under *dir* |
++-------------------------------------------+-----------------------------------------------+
+| :command:`graft dir` | include all files under *dir* |
++-------------------------------------------+-----------------------------------------------+
+
+The patterns here are Unix-style "glob" patterns: ``*`` matches any sequence of
+regular filename characters, ``?`` matches any single regular filename
+character, and ``[range]`` matches any of the characters in *range* (e.g.,
+``a-z``, ``a-zA-Z``, ``a-f0-9_.``). The definition of "regular filename
+character" is platform-specific: on Unix it is anything except slash; on Windows
+anything except backslash or colon.
+
+.. XXX Windows support not there yet
+
+.. % \section{Creating a built distribution: the
+.. % \protect\command{bdist} command family}
+.. % \label{bdist-cmds}
+
+.. % \subsection{\protect\command{bdist}}
+.. % \subsection{\protect\command{bdist\_dumb}}
+.. % \subsection{\protect\command{bdist\_rpm}}
+.. % \subsection{\protect\command{bdist\_wininst}}
+
+
diff --git a/docs/deprecated/distutils/configfile.rst b/docs/deprecated/distutils/configfile.rst
new file mode 100644
index 0000000..328936f
--- /dev/null
+++ b/docs/deprecated/distutils/configfile.rst
@@ -0,0 +1,144 @@
+.. _setup-config:
+
+************************************
+Writing the Setup Configuration File
+************************************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+Often, it's not possible to write down everything needed to build a distribution
+*a priori*: you may need to get some information from the user, or from the
+user's system, in order to proceed. As long as that information is fairly
+simple---a list of directories to search for C header files or libraries, for
+example---then providing a configuration file, :file:`setup.cfg`, for users to
+edit is a cheap and easy way to solicit it. Configuration files also let you
+provide default values for any command option, which the installer can then
+override either on the command-line or by editing the config file.
+
+The setup configuration file is a useful middle-ground between the setup
+script---which, ideally, would be opaque to installers [#]_---and the command-line to
+the setup script, which is outside of your control and entirely up to the
+installer. In fact, :file:`setup.cfg` (and any other Distutils configuration
+files present on the target system) are processed after the contents of the
+setup script, but before the command-line. This has several useful
+consequences:
+
+.. % (If you have more advanced needs, such as determining which extensions
+.. % to build based on what capabilities are present on the target system,
+.. % then you need the Distutils ``auto-configuration'' facility. This
+.. % started to appear in Distutils 0.9 but, as of this writing, isn't mature
+.. % or stable enough yet for real-world use.)
+
+* installers can override some of what you put in :file:`setup.py` by editing
+ :file:`setup.cfg`
+
+* you can provide non-standard defaults for options that are not easily set in
+ :file:`setup.py`
+
+* installers can override anything in :file:`setup.cfg` using the command-line
+ options to :file:`setup.py`
+
+The basic syntax of the configuration file is simple:
+
+.. code-block:: ini
+
+ [command]
+ option=value
+ ...
+
+where *command* is one of the Distutils commands (e.g. :command:`build_py`,
+:command:`install`), and *option* is one of the options that command supports.
+Any number of options can be supplied for each command, and any number of
+command sections can be included in the file. Blank lines are ignored, as are
+comments, which run from a ``'#'`` character until the end of the line. Long
+option values can be split across multiple lines simply by indenting the
+continuation lines.
+
+You can find out the list of options supported by a particular command with the
+universal :option:`!--help` option, e.g.
+
+.. code-block:: shell-session
+
+ $ python setup.py --help build_ext
+ [...]
+ Options for 'build_ext' command:
+ --build-lib (-b) directory for compiled extension modules
+ --build-temp (-t) directory for temporary files (build by-products)
+ --inplace (-i) ignore build-lib and put compiled extensions into the
+ source directory alongside your pure Python modules
+ --include-dirs (-I) list of directories to search for header files
+ --define (-D) C preprocessor macros to define
+ --undef (-U) C preprocessor macros to undefine
+ --swig-opts list of SWIG command line options
+ [...]
+
+Note that an option spelled :option:`!--foo-bar` on the command-line is spelled
+``foo_bar`` in configuration files.
+
+.. _distutils-build-ext-inplace:
+
+For example, say you want your extensions to be built "in-place"---that is, you
+have an extension ``pkg.ext``, and you want the compiled extension file
+(:file:`ext.so` on Unix, say) to be put in the same source directory as your
+pure Python modules ``pkg.mod1`` and ``pkg.mod2``. You can always use the
+:option:`!--inplace` option on the command-line to ensure this:
+
+.. code-block:: sh
+
+ python setup.py build_ext --inplace
+
+But this requires that you always specify the :command:`build_ext` command
+explicitly, and remember to provide :option:`!--inplace`. An easier way is to
+"set and forget" this option, by encoding it in :file:`setup.cfg`, the
+configuration file for this distribution:
+
+.. code-block:: ini
+
+ [build_ext]
+ inplace=1
+
+This will affect all builds of this module distribution, whether or not you
+explicitly specify :command:`build_ext`. If you include :file:`setup.cfg` in
+your source distribution, it will also affect end-user builds---which is
+probably a bad idea for this option, since always building extensions in-place
+would break installation of the module distribution. In certain peculiar cases,
+though, modules are built right in their installation directory, so this is
+conceivably a useful ability. (Distributing extensions that expect to be built
+in their installation directory is almost always a bad idea, though.)
+
+Another example: certain commands take a lot of options that don't change from
+run to run; for example, :command:`bdist_rpm` needs to know everything required
+to generate a "spec" file for creating an RPM distribution. Some of this
+information comes from the setup script, and some is automatically generated by
+the Distutils (such as the list of files installed). But some of it has to be
+supplied as options to :command:`bdist_rpm`, which would be very tedious to do
+on the command-line for every run. Hence, here is a snippet from the Distutils'
+own :file:`setup.cfg`:
+
+.. code-block:: ini
+
+ [bdist_rpm]
+ release = 1
+ packager = Greg Ward <gward@python.net>
+ doc_files = CHANGES.txt
+ README.txt
+ USAGE.txt
+ doc/
+ examples/
+
+Note that the ``doc_files`` option is simply a whitespace-separated string
+split across multiple lines for readability.
+
+
+.. seealso::
+
+ :ref:`inst-config-syntax` in "Installing Python Modules"
+ More information on the configuration files is available in the manual for
+ system administrators.
+
+
+.. rubric:: Footnotes
+
+.. [#] This ideal probably won't be achieved until auto-configuration is fully
+ supported by the Distutils.
+
diff --git a/docs/deprecated/distutils/examples.rst b/docs/deprecated/distutils/examples.rst
new file mode 100644
index 0000000..d098465
--- /dev/null
+++ b/docs/deprecated/distutils/examples.rst
@@ -0,0 +1,340 @@
+.. _distutils_examples:
+
+******************
+Distutils Examples
+******************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+This chapter provides a number of basic examples to help get started with
+distutils. Additional information about using distutils can be found in the
+Distutils Cookbook.
+
+
+.. seealso::
+
+ `Distutils Cookbook <https://wiki.python.org/moin/Distutils/Cookbook>`_
+ Collection of recipes showing how to achieve more control over distutils.
+
+
+.. _pure-mod:
+
+Pure Python distribution (by module)
+====================================
+
+If you're just distributing a couple of modules, especially if they don't live
+in a particular package, you can specify them individually using the
+``py_modules`` option in the setup script.
+
+In the simplest case, you'll have two files to worry about: a setup script and
+the single module you're distributing, :file:`foo.py` in this example::
+
+ <root>/
+ setup.py
+ foo.py
+
+(In all diagrams in this section, *<root>* will refer to the distribution root
+directory.) A minimal setup script to describe this situation would be::
+
+ from distutils.core import setup
+ setup(name='foo',
+ version='1.0',
+ py_modules=['foo'],
+ )
+
+Note that the name of the distribution is specified independently with the
+``name`` option, and there's no rule that says it has to be the same as
+the name of the sole module in the distribution (although that's probably a good
+convention to follow). However, the distribution name is used to generate
+filenames, so you should stick to letters, digits, underscores, and hyphens.
+
+Since ``py_modules`` is a list, you can of course specify multiple
+modules, eg. if you're distributing modules ``foo`` and ``bar``, your
+setup might look like this::
+
+ <root>/
+ setup.py
+ foo.py
+ bar.py
+
+and the setup script might be ::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ py_modules=['foo', 'bar'],
+ )
+
+You can put module source files into another directory, but if you have enough
+modules to do that, it's probably easier to specify modules by package rather
+than listing them individually.
+
+
+.. _pure-pkg:
+
+Pure Python distribution (by package)
+=====================================
+
+If you have more than a couple of modules to distribute, especially if they are
+in multiple packages, it's probably easier to specify whole packages rather than
+individual modules. This works even if your modules are not in a package; you
+can just tell the Distutils to process modules from the root package, and that
+works the same as any other package (except that you don't have to have an
+:file:`__init__.py` file).
+
+The setup script from the last example could also be written as ::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=[''],
+ )
+
+(The empty string stands for the root package.)
+
+If those two files are moved into a subdirectory, but remain in the root
+package, e.g.::
+
+ <root>/
+ setup.py
+ src/ foo.py
+ bar.py
+
+then you would still specify the root package, but you have to tell the
+Distutils where source files in the root package live::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'': 'src'},
+ packages=[''],
+ )
+
+More typically, though, you will want to distribute multiple modules in the same
+package (or in sub-packages). For example, if the ``foo`` and ``bar``
+modules belong in package ``foobar``, one way to layout your source tree is
+::
+
+ <root>/
+ setup.py
+ foobar/
+ __init__.py
+ foo.py
+ bar.py
+
+This is in fact the default layout expected by the Distutils, and the one that
+requires the least work to describe in your setup script::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=['foobar'],
+ )
+
+If you want to put modules in directories not named for their package, then you
+need to use the ``package_dir`` option again. For example, if the
+:file:`src` directory holds modules in the ``foobar`` package::
+
+ <root>/
+ setup.py
+ src/
+ __init__.py
+ foo.py
+ bar.py
+
+an appropriate setup script would be ::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'foobar': 'src'},
+ packages=['foobar'],
+ )
+
+Or, you might put modules from your main package right in the distribution
+root::
+
+ <root>/
+ setup.py
+ __init__.py
+ foo.py
+ bar.py
+
+in which case your setup script would be ::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ package_dir={'foobar': ''},
+ packages=['foobar'],
+ )
+
+(The empty string also stands for the current directory.)
+
+If you have sub-packages, they must be explicitly listed in ``packages``,
+but any entries in ``package_dir`` automatically extend to sub-packages.
+(In other words, the Distutils does *not* scan your source tree, trying to
+figure out which directories correspond to Python packages by looking for
+:file:`__init__.py` files.) Thus, if the default layout grows a sub-package::
+
+ <root>/
+ setup.py
+ foobar/
+ __init__.py
+ foo.py
+ bar.py
+ subfoo/
+ __init__.py
+ blah.py
+
+then the corresponding setup script would be ::
+
+ from distutils.core import setup
+ setup(name='foobar',
+ version='1.0',
+ packages=['foobar', 'foobar.subfoo'],
+ )
+
+
+.. _single-ext:
+
+Single extension module
+=======================
+
+Extension modules are specified using the ``ext_modules`` option.
+``package_dir`` has no effect on where extension source files are found;
+it only affects the source for pure Python modules. The simplest case, a
+single extension module in a single C source file, is::
+
+ <root>/
+ setup.py
+ foo.c
+
+If the ``foo`` extension belongs in the root package, the setup script for
+this could be ::
+
+ from distutils.core import setup
+ from distutils.extension import Extension
+ setup(name='foobar',
+ version='1.0',
+ ext_modules=[Extension('foo', ['foo.c'])],
+ )
+
+If the extension actually belongs in a package, say ``foopkg``, then
+
+With exactly the same source tree layout, this extension can be put in the
+``foopkg`` package simply by changing the name of the extension::
+
+ from distutils.core import setup
+ from distutils.extension import Extension
+ setup(name='foobar',
+ version='1.0',
+ ext_modules=[Extension('foopkg.foo', ['foo.c'])],
+ )
+
+Checking a package
+==================
+
+The ``check`` command allows you to verify if your package meta-data
+meet the minimum requirements to build a distribution.
+
+To run it, just call it using your :file:`setup.py` script. If something is
+missing, ``check`` will display a warning.
+
+Let's take an example with a simple script::
+
+ from distutils.core import setup
+
+ setup(name='foobar')
+
+Running the ``check`` command will display some warnings:
+
+.. code-block:: shell-session
+
+ $ python setup.py check
+ running check
+ warning: check: missing required meta-data: version, url
+ warning: check: missing meta-data: either (author and author_email) or
+ (maintainer and maintainer_email) should be supplied
+
+
+If you use the reStructuredText syntax in the ``long_description`` field and
+`docutils`_ is installed you can check if the syntax is fine with the
+``check`` command, using the ``restructuredtext`` option.
+
+For example, if the :file:`setup.py` script is changed like this::
+
+ from distutils.core import setup
+
+ desc = """\
+ My description
+ ==============
+
+ This is the description of the ``foobar`` package.
+ """
+
+ setup(name='foobar', version='1', author='tarek',
+ author_email='tarek@ziade.org',
+ url='http://example.com', long_description=desc)
+
+Where the long description is broken, ``check`` will be able to detect it
+by using the :mod:`docutils` parser:
+
+.. code-block:: shell-session
+
+ $ python setup.py check --restructuredtext
+ running check
+ warning: check: Title underline too short. (line 2)
+ warning: check: Could not finish the parsing.
+
+Reading the metadata
+=====================
+
+The :func:`distutils.core.setup` function provides a command-line interface
+that allows you to query the metadata fields of a project through the
+``setup.py`` script of a given project:
+
+.. code-block:: shell-session
+
+ $ python setup.py --name
+ distribute
+
+This call reads the ``name`` metadata by running the
+:func:`distutils.core.setup` function. Although, when a source or binary
+distribution is created with Distutils, the metadata fields are written
+in a static file called :file:`PKG-INFO`. When a Distutils-based project is
+installed in Python, the :file:`PKG-INFO` file is copied alongside the modules
+and packages of the distribution under :file:`NAME-VERSION-pyX.X.egg-info`,
+where ``NAME`` is the name of the project, ``VERSION`` its version as defined
+in the Metadata, and ``pyX.X`` the major and minor version of Python like
+``2.7`` or ``3.2``.
+
+You can read back this static file, by using the
+:class:`distutils.dist.DistributionMetadata` class and its
+:func:`~distutils.dist.DistributionMetadata.read_pkg_file` method::
+
+ >>> from distutils.dist import DistributionMetadata
+ >>> metadata = DistributionMetadata()
+ >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info'))
+ >>> metadata.name
+ 'distribute'
+ >>> metadata.version
+ '0.6.8'
+ >>> metadata.description
+ 'Easily download, build, install, upgrade, and uninstall Python packages'
+
+Notice that the class can also be instantiated with a metadata file path to
+loads its values::
+
+ >>> pkg_info_path = 'distribute-0.6.8-py2.7.egg-info'
+ >>> DistributionMetadata(pkg_info_path).name
+ 'distribute'
+
+
+.. % \section{Multiple extension modules}
+.. % \label{multiple-ext}
+
+.. % \section{Putting it all together}
+
+
+.. _docutils: http://docutils.sourceforge.net
diff --git a/docs/deprecated/distutils/extending.rst b/docs/deprecated/distutils/extending.rst
new file mode 100644
index 0000000..c99d3c7
--- /dev/null
+++ b/docs/deprecated/distutils/extending.rst
@@ -0,0 +1,98 @@
+.. _extending-distutils:
+
+*******************
+Extending Distutils
+*******************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+Distutils can be extended in various ways. Most extensions take the form of new
+commands or replacements for existing commands. New commands may be written to
+support new types of platform-specific packaging, for example, while
+replacements for existing commands may be made to modify details of how the
+command operates on a package.
+
+Most extensions of the distutils are made within :file:`setup.py` scripts that
+want to modify existing commands; many simply add a few file extensions that
+should be copied into packages in addition to :file:`.py` files as a
+convenience.
+
+Most distutils command implementations are subclasses of the
+:class:`distutils.cmd.Command` class. New commands may directly inherit from
+:class:`~distutils.cmd.Command`, while replacements often derive from :class:`~distutils.cmd.Command`
+indirectly, directly subclassing the command they are replacing. Commands are
+required to derive from :class:`~distutils.cmd.Command`.
+
+.. % \section{Extending existing commands}
+.. % \label{extend-existing}
+
+.. % \section{Writing new commands}
+.. % \label{new-commands}
+.. % \XXX{Would an uninstall command be a good example here?}
+
+
+Integrating new commands
+========================
+
+There are different ways to integrate new command implementations into
+distutils. The most difficult is to lobby for the inclusion of the new features
+in distutils itself, and wait for (and require) a version of Python that
+provides that support. This is really hard for many reasons.
+
+The most common, and possibly the most reasonable for most needs, is to include
+the new implementations with your :file:`setup.py` script, and cause the
+:func:`distutils.core.setup` function use them::
+
+ from distutils.command.build_py import build_py as _build_py
+ from distutils.core import setup
+
+ class build_py(_build_py):
+ """Specialized Python source builder."""
+
+ # implement whatever needs to be different...
+
+ setup(cmdclass={'build_py': build_py},
+ ...)
+
+This approach is most valuable if the new implementations must be used to use a
+particular package, as everyone interested in the package will need to have the
+new command implementation.
+
+Beginning with Python 2.4, a third option is available, intended to allow new
+commands to be added which can support existing :file:`setup.py` scripts without
+requiring modifications to the Python installation. This is expected to allow
+third-party extensions to provide support for additional packaging systems, but
+the commands can be used for anything distutils commands can be used for. A new
+configuration option, ``command_packages`` (command-line option
+:option:`!--command-packages`), can be used to specify additional packages to be
+searched for modules implementing commands. Like all distutils options, this
+can be specified on the command line or in a configuration file. This option
+can only be set in the ``[global]`` section of a configuration file, or before
+any commands on the command line. If set in a configuration file, it can be
+overridden from the command line; setting it to an empty string on the command
+line causes the default to be used. This should never be set in a configuration
+file provided with a package.
+
+This new option can be used to add any number of packages to the list of
+packages searched for command implementations; multiple package names should be
+separated by commas. When not specified, the search is only performed in the
+:mod:`distutils.command` package. When :file:`setup.py` is run with the option
+``--command-packages distcmds,buildcmds``, however, the packages
+:mod:`distutils.command`, ``distcmds``, and ``buildcmds`` will be searched
+in that order. New commands are expected to be implemented in modules of the
+same name as the command by classes sharing the same name. Given the example
+command line option above, the command :command:`bdist_openpkg` could be
+implemented by the class ``distcmds.bdist_openpkg.bdist_openpkg`` or
+``buildcmds.bdist_openpkg.bdist_openpkg``.
+
+
+Adding new distribution types
+=============================
+
+Commands that create distributions (files in the :file:`dist/` directory) need
+to add ``(command, filename)`` pairs to ``self.distribution.dist_files`` so that
+:command:`upload` can upload it to PyPI. The *filename* in the pair contains no
+path information, only the name of the file itself. In dry-run mode, pairs
+should still be added to represent what would have been created.
+
+
diff --git a/docs/deprecated/distutils/index.rst b/docs/deprecated/distutils/index.rst
new file mode 100644
index 0000000..1f72a25
--- /dev/null
+++ b/docs/deprecated/distutils/index.rst
@@ -0,0 +1,42 @@
+.. _distutils-index:
+
+##############################################
+ Distributing Python Modules (Legacy version)
+##############################################
+
+:Authors: Greg Ward, Anthony Baxter
+:Email: distutils-sig@python.org
+
+.. seealso::
+
+ :ref:`distributing-index`
+ The up to date module distribution documentations
+
+.. include:: ./_setuptools_disclaimer.rst
+
+.. note::
+
+ This guide only covers the basic tools for building and distributing
+ extensions that are provided as part of this version of Python. Third party
+ tools offer easier to use and more secure alternatives. Refer to the `quick
+ recommendations section <https://packaging.python.org/guides/tool-recommendations/>`__
+ in the Python Packaging User Guide for more information.
+
+This document describes the Python Distribution Utilities ("Distutils") from
+the module developer's point of view, describing the underlying capabilities
+that ``setuptools`` builds on to allow Python developers to make Python modules
+and extensions readily available to a wider audience.
+
+.. toctree::
+ :maxdepth: 2
+ :numbered:
+
+ introduction.rst
+ setupscript.rst
+ configfile.rst
+ sourcedist.rst
+ builtdist.rst
+ examples.rst
+ extending.rst
+ commandref.rst
+ apiref.rst
diff --git a/docs/deprecated/distutils/introduction.rst b/docs/deprecated/distutils/introduction.rst
new file mode 100644
index 0000000..7491b96
--- /dev/null
+++ b/docs/deprecated/distutils/introduction.rst
@@ -0,0 +1,214 @@
+.. _distutils-intro:
+
+****************************
+An Introduction to Distutils
+****************************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+This document covers using the Distutils to distribute your Python modules,
+concentrating on the role of developer/distributor: if you're looking for
+information on installing Python modules, you should refer to the
+:ref:`install-index` chapter.
+
+
+.. _distutils-concepts:
+
+Concepts & Terminology
+======================
+
+Using the Distutils is quite simple, both for module developers and for
+users/administrators installing third-party modules. As a developer, your
+responsibilities (apart from writing solid, well-documented and well-tested
+code, of course!) are:
+
+* write a setup script (:file:`setup.py` by convention)
+
+* (optional) write a setup configuration file
+
+* create a source distribution
+
+* (optional) create one or more built (binary) distributions
+
+Each of these tasks is covered in this document.
+
+Not all module developers have access to a multitude of platforms, so it's not
+always feasible to expect them to create a multitude of built distributions. It
+is hoped that a class of intermediaries, called *packagers*, will arise to
+address this need. Packagers will take source distributions released by module
+developers, build them on one or more platforms, and release the resulting built
+distributions. Thus, users on the most popular platforms will be able to
+install most popular Python module distributions in the most natural way for
+their platform, without having to run a single setup script or compile a line of
+code.
+
+
+.. _distutils-simple-example:
+
+A Simple Example
+================
+
+The setup script is usually quite simple, although since it's written in Python,
+there are no arbitrary limits to what you can do with it, though you should be
+careful about putting arbitrarily expensive operations in your setup script.
+Unlike, say, Autoconf-style configure scripts, the setup script may be run
+multiple times in the course of building and installing your module
+distribution.
+
+If all you want to do is distribute a module called ``foo``, contained in a
+file :file:`foo.py`, then your setup script can be as simple as this::
+
+ from distutils.core import setup
+ setup(name='foo',
+ version='1.0',
+ py_modules=['foo'],
+ )
+
+Some observations:
+
+* most information that you supply to the Distutils is supplied as keyword
+ arguments to the :func:`~distutils.core.setup` function
+
+* those keyword arguments fall into two categories: package metadata (name,
+ version number) and information about what's in the package (a list of pure
+ Python modules, in this case)
+
+* modules are specified by module name, not filename (the same will hold true
+ for packages and extensions)
+
+* it's recommended that you supply a little more metadata, in particular your
+ name, email address and a URL for the project (see section :ref:`setup-script`
+ for an example)
+
+To create a source distribution for this module, you would create a setup
+script, :file:`setup.py`, containing the above code, and run this command from a
+terminal::
+
+ python setup.py sdist
+
+For Windows, open a command prompt window (:menuselection:`Start -->
+Accessories`) and change the command to::
+
+ setup.py sdist
+
+:command:`sdist` will create an archive file (e.g., tarball on Unix, ZIP file on Windows)
+containing your setup script :file:`setup.py`, and your module :file:`foo.py`.
+The archive file will be named :file:`foo-1.0.tar.gz` (or :file:`.zip`), and
+will unpack into a directory :file:`foo-1.0`.
+
+If an end-user wishes to install your ``foo`` module, all they have to do is
+download :file:`foo-1.0.tar.gz` (or :file:`.zip`), unpack it, and---from the
+:file:`foo-1.0` directory---run ::
+
+ python setup.py install
+
+which will ultimately copy :file:`foo.py` to the appropriate directory for
+third-party modules in their Python installation.
+
+This simple example demonstrates some fundamental concepts of the Distutils.
+First, both developers and installers have the same basic user interface, i.e.
+the setup script. The difference is which Distutils *commands* they use: the
+:command:`sdist` command is almost exclusively for module developers, while
+:command:`install` is more often for installers (although most developers will
+want to install their own code occasionally).
+
+If you want to make things really easy for your users, you can create one or
+more built distributions for them. For instance, if you are running on a
+Windows machine, and want to make things easy for other Windows users, you can
+create an executable installer (the most appropriate type of built distribution
+for this platform) with the :command:`bdist_wininst` command. For example::
+
+ python setup.py bdist_wininst
+
+will create an executable installer, :file:`foo-1.0.win32.exe`, in the current
+directory.
+
+Other useful built distribution formats are RPM, implemented by the
+:command:`bdist_rpm` command, Solaris :program:`pkgtool`
+(:command:`bdist_pkgtool`), and HP-UX :program:`swinstall`
+(:command:`bdist_sdux`). For example, the following command will create an RPM
+file called :file:`foo-1.0.noarch.rpm`::
+
+ python setup.py bdist_rpm
+
+(The :command:`bdist_rpm` command uses the :command:`rpm` executable, therefore
+this has to be run on an RPM-based system such as Red Hat Linux, SuSE Linux, or
+Mandrake Linux.)
+
+You can find out what distribution formats are available at any time by running
+::
+
+ python setup.py bdist --help-formats
+
+
+.. _python-terms:
+
+General Python terminology
+==========================
+
+If you're reading this document, you probably have a good idea of what modules,
+extensions, and so forth are. Nevertheless, just to be sure that everyone is
+operating from a common starting point, we offer the following glossary of
+common Python terms:
+
+module
+ the basic unit of code reusability in Python: a block of code imported by some
+ other code. Three types of modules concern us here: pure Python modules,
+ extension modules, and packages.
+
+pure Python module
+ a module written in Python and contained in a single :file:`.py` file (and
+ possibly associated :file:`.pyc` files). Sometimes referred to as a
+ "pure module."
+
+extension module
+ a module written in the low-level language of the Python implementation: C/C++
+ for Python, Java for Jython. Typically contained in a single dynamically
+ loadable pre-compiled file, e.g. a shared object (:file:`.so`) file for Python
+ extensions on Unix, a DLL (given the :file:`.pyd` extension) for Python
+ extensions on Windows, or a Java class file for Jython extensions. (Note that
+ currently, the Distutils only handles C/C++ extensions for Python.)
+
+package
+ a module that contains other modules; typically contained in a directory in the
+ filesystem and distinguished from other directories by the presence of a file
+ :file:`__init__.py`.
+
+root package
+ the root of the hierarchy of packages. (This isn't really a package, since it
+ doesn't have an :file:`__init__.py` file. But we have to call it something.)
+ The vast majority of the standard library is in the root package, as are many
+ small, standalone third-party modules that don't belong to a larger module
+ collection. Unlike regular packages, modules in the root package can be found in
+ many directories: in fact, every directory listed in ``sys.path`` contributes
+ modules to the root package.
+
+
+.. _distutils-term:
+
+Distutils-specific terminology
+==============================
+
+The following terms apply more specifically to the domain of distributing Python
+modules using the Distutils:
+
+module distribution
+ a collection of Python modules distributed together as a single downloadable
+ resource and meant to be installed *en masse*. Examples of some well-known
+ module distributions are NumPy, SciPy, Pillow,
+ or mxBase. (This would be called a *package*, except that term is
+ already taken in the Python context: a single module distribution may contain
+ zero, one, or many Python packages.)
+
+pure module distribution
+ a module distribution that contains only pure Python modules and packages.
+ Sometimes referred to as a "pure distribution."
+
+non-pure module distribution
+ a module distribution that contains at least one extension module. Sometimes
+ referred to as a "non-pure distribution."
+
+distribution root
+ the top-level directory of your source tree (or source distribution); the
+ directory where :file:`setup.py` exists. Generally :file:`setup.py` will be
+ run from this directory.
diff --git a/docs/deprecated/distutils/packageindex.rst b/docs/deprecated/distutils/packageindex.rst
new file mode 100644
index 0000000..ccb9a59
--- /dev/null
+++ b/docs/deprecated/distutils/packageindex.rst
@@ -0,0 +1,16 @@
+:orphan:
+
+.. _package-index:
+
+*******************************
+The Python Package Index (PyPI)
+*******************************
+
+The `Python Package Index (PyPI)`_ stores metadata describing distributions
+packaged with distutils and other publishing tools, as well the distribution
+archives themselves.
+
+References to up to date PyPI documentation can be found at
+:ref:`publishing-python-packages`.
+
+.. _Python Package Index (PyPI): https://pypi.org
diff --git a/docs/deprecated/distutils/setupscript.rst b/docs/deprecated/distutils/setupscript.rst
new file mode 100644
index 0000000..f49c4f8
--- /dev/null
+++ b/docs/deprecated/distutils/setupscript.rst
@@ -0,0 +1,715 @@
+.. _setup-script:
+
+************************
+Writing the Setup Script
+************************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+The setup script is the centre of all activity in building, distributing, and
+installing modules using the Distutils. The main purpose of the setup script is
+to describe your module distribution to the Distutils, so that the various
+commands that operate on your modules do the right thing. As we saw in section
+:ref:`distutils-simple-example` above, the setup script consists mainly of a call to :func:`~distutils.core.setup`, and most information
+supplied to the Distutils by the module developer is supplied as keyword
+arguments to :func:`~distutils.core.setup`.
+
+Here's a slightly more involved example, which we'll follow for the next couple
+of sections: the Distutils' own setup script. (Keep in mind that although the
+Distutils are included with Python 1.6 and later, they also have an independent
+existence so that Python 1.5.2 users can use them to install other module
+distributions. The Distutils' own setup script, shown here, is used to install
+the package into Python 1.5.2.) ::
+
+ #!/usr/bin/env python
+
+ from distutils.core import setup
+
+ setup(name='Distutils',
+ version='1.0',
+ description='Python Distribution Utilities',
+ author='Greg Ward',
+ author_email='gward@python.net',
+ url='https://www.python.org/sigs/distutils-sig/',
+ packages=['distutils', 'distutils.command'],
+ )
+
+There are only two differences between this and the trivial one-file
+distribution presented in section :ref:`distutils-simple-example`: more metadata, and the
+specification of pure Python modules by package, rather than by module. This is
+important since the Distutils consist of a couple of dozen modules split into
+(so far) two packages; an explicit list of every module would be tedious to
+generate and difficult to maintain. For more information on the additional
+meta-data, see section :ref:`meta-data`.
+
+Note that any pathnames (files or directories) supplied in the setup script
+should be written using the Unix convention, i.e. slash-separated. The
+Distutils will take care of converting this platform-neutral representation into
+whatever is appropriate on your current platform before actually using the
+pathname. This makes your setup script portable across operating systems, which
+of course is one of the major goals of the Distutils. In this spirit, all
+pathnames in this document are slash-separated.
+
+This, of course, only applies to pathnames given to Distutils functions. If
+you, for example, use standard Python functions such as :func:`glob.glob` or
+:func:`os.listdir` to specify files, you should be careful to write portable
+code instead of hardcoding path separators::
+
+ glob.glob(os.path.join('mydir', 'subdir', '*.html'))
+ os.listdir(os.path.join('mydir', 'subdir'))
+
+
+.. _listing-packages:
+
+Listing whole packages
+======================
+
+The ``packages`` option tells the Distutils to process (build, distribute,
+install, etc.) all pure Python modules found in each package mentioned in the
+``packages`` list. In order to do this, of course, there has to be a
+correspondence between package names and directories in the filesystem. The
+default correspondence is the most obvious one, i.e. package :mod:`distutils` is
+found in the directory :file:`distutils` relative to the distribution root.
+Thus, when you say ``packages = ['foo']`` in your setup script, you are
+promising that the Distutils will find a file :file:`foo/__init__.py` (which
+might be spelled differently on your system, but you get the idea) relative to
+the directory where your setup script lives. If you break this promise, the
+Distutils will issue a warning but still process the broken package anyway.
+
+If you use a different convention to lay out your source directory, that's no
+problem: you just have to supply the ``package_dir`` option to tell the
+Distutils about your convention. For example, say you keep all Python source
+under :file:`lib`, so that modules in the "root package" (i.e., not in any
+package at all) are in :file:`lib`, modules in the ``foo`` package are in
+:file:`lib/foo`, and so forth. Then you would put ::
+
+ package_dir = {'': 'lib'}
+
+in your setup script. The keys to this dictionary are package names, and an
+empty package name stands for the root package. The values are directory names
+relative to your distribution root. In this case, when you say ``packages =
+['foo']``, you are promising that the file :file:`lib/foo/__init__.py` exists.
+
+Another possible convention is to put the ``foo`` package right in
+:file:`lib`, the ``foo.bar`` package in :file:`lib/bar`, etc. This would be
+written in the setup script as ::
+
+ package_dir = {'foo': 'lib'}
+
+A ``package: dir`` entry in the ``package_dir`` dictionary implicitly
+applies to all packages below *package*, so the ``foo.bar`` case is
+automatically handled here. In this example, having ``packages = ['foo',
+'foo.bar']`` tells the Distutils to look for :file:`lib/__init__.py` and
+:file:`lib/bar/__init__.py`. (Keep in mind that although ``package_dir``
+applies recursively, you must explicitly list all packages in
+``packages``: the Distutils will *not* recursively scan your source tree
+looking for any directory with an :file:`__init__.py` file.)
+
+
+.. _listing-modules:
+
+Listing individual modules
+==========================
+
+For a small module distribution, you might prefer to list all modules rather
+than listing packages---especially the case of a single module that goes in the
+"root package" (i.e., no package at all). This simplest case was shown in
+section :ref:`distutils-simple-example`; here is a slightly more involved example::
+
+ py_modules = ['mod1', 'pkg.mod2']
+
+This describes two modules, one of them in the "root" package, the other in the
+``pkg`` package. Again, the default package/directory layout implies that
+these two modules can be found in :file:`mod1.py` and :file:`pkg/mod2.py`, and
+that :file:`pkg/__init__.py` exists as well. And again, you can override the
+package/directory correspondence using the ``package_dir`` option.
+
+
+.. _describing-extensions:
+
+Describing extension modules
+============================
+
+Just as writing Python extension modules is a bit more complicated than writing
+pure Python modules, describing them to the Distutils is a bit more complicated.
+Unlike pure modules, it's not enough just to list modules or packages and expect
+the Distutils to go out and find the right files; you have to specify the
+extension name, source file(s), and any compile/link requirements (include
+directories, libraries to link with, etc.).
+
+.. XXX read over this section
+
+All of this is done through another keyword argument to
+:func:`~distutils.core.setup`, the
+``ext_modules`` option. ``ext_modules`` is just a list of
+:class:`~distutils.core.Extension` instances, each of which describes a
+single extension module.
+Suppose your distribution includes a single extension, called ``foo`` and
+implemented by :file:`foo.c`. If no additional instructions to the
+compiler/linker are needed, describing this extension is quite simple::
+
+ Extension('foo', ['foo.c'])
+
+The :class:`~distutils.extension.Extension` class can be imported from :mod:`distutils.core` along
+with :func:`~distutils.core.setup`. Thus, the setup script for a module distribution that
+contains only this one extension and nothing else might be::
+
+ from distutils.core import setup, Extension
+ setup(name='foo',
+ version='1.0',
+ ext_modules=[Extension('foo', ['foo.c'])],
+ )
+
+The :class:`~distutils.extension.Extension` class (actually, the underlying extension-building
+machinery implemented by the :command:`build_ext` command) supports a great deal
+of flexibility in describing Python extensions, which is explained in the
+following sections.
+
+
+Extension names and packages
+----------------------------
+
+The first argument to the :class:`~distutils.core.Extension` constructor is
+always the name of the extension, including any package names. For example, ::
+
+ Extension('foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes an extension that lives in the root package, while ::
+
+ Extension('pkg.foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes the same extension in the ``pkg`` package. The source files and
+resulting object code are identical in both cases; the only difference is where
+in the filesystem (and therefore where in Python's namespace hierarchy) the
+resulting extension lives.
+
+If you have a number of extensions all in the same package (or all under the
+same base package), use the ``ext_package`` keyword argument to
+:func:`~distutils.core.setup`. For example, ::
+
+ setup(...,
+ ext_package='pkg',
+ ext_modules=[Extension('foo', ['foo.c']),
+ Extension('subpkg.bar', ['bar.c'])],
+ )
+
+will compile :file:`foo.c` to the extension ``pkg.foo``, and
+:file:`bar.c` to ``pkg.subpkg.bar``.
+
+
+Extension source files
+----------------------
+
+The second argument to the :class:`~distutils.core.Extension` constructor is
+a list of source
+files. Since the Distutils currently only support C, C++, and Objective-C
+extensions, these are normally C/C++/Objective-C source files. (Be sure to use
+appropriate extensions to distinguish C++ source files: :file:`.cc` and
+:file:`.cpp` seem to be recognized by both Unix and Windows compilers.)
+
+However, you can also include SWIG interface (:file:`.i`) files in the list; the
+:command:`build_ext` command knows how to deal with SWIG extensions: it will run
+SWIG on the interface file and compile the resulting C/C++ file into your
+extension.
+
+.. XXX SWIG support is rough around the edges and largely untested!
+
+This warning notwithstanding, options to SWIG can be currently passed like
+this::
+
+ setup(...,
+ ext_modules=[Extension('_foo', ['foo.i'],
+ swig_opts=['-modern', '-I../include'])],
+ py_modules=['foo'],
+ )
+
+Or on the commandline like this::
+
+ > python setup.py build_ext --swig-opts="-modern -I../include"
+
+On some platforms, you can include non-source files that are processed by the
+compiler and included in your extension. Currently, this just means Windows
+message text (:file:`.mc`) files and resource definition (:file:`.rc`) files for
+Visual C++. These will be compiled to binary resource (:file:`.res`) files and
+linked into the executable.
+
+
+Preprocessor options
+--------------------
+
+Three optional arguments to :class:`~distutils.core.Extension` will help if
+you need to specify include directories to search or preprocessor macros to
+define/undefine: ``include_dirs``, ``define_macros``, and ``undef_macros``.
+
+For example, if your extension requires header files in the :file:`include`
+directory under your distribution root, use the ``include_dirs`` option::
+
+ Extension('foo', ['foo.c'], include_dirs=['include'])
+
+You can specify absolute directories there; if you know that your extension will
+only be built on Unix systems with X11R6 installed to :file:`/usr`, you can get
+away with ::
+
+ Extension('foo', ['foo.c'], include_dirs=['/usr/include/X11'])
+
+You should avoid this sort of non-portable usage if you plan to distribute your
+code: it's probably better to write C code like ::
+
+ #include <X11/Xlib.h>
+
+If you need to include header files from some other Python extension, you can
+take advantage of the fact that header files are installed in a consistent way
+by the Distutils :command:`install_headers` command. For example, the Numerical
+Python header files are installed (on a standard Unix installation) to
+:file:`/usr/local/include/python1.5/Numerical`. (The exact location will differ
+according to your platform and Python installation.) Since the Python include
+directory---\ :file:`/usr/local/include/python1.5` in this case---is always
+included in the search path when building Python extensions, the best approach
+is to write C code like ::
+
+ #include <Numerical/arrayobject.h>
+
+If you must put the :file:`Numerical` include directory right into your header
+search path, though, you can find that directory using the Distutils
+:mod:`distutils.sysconfig` module::
+
+ from distutils.sysconfig import get_python_inc
+ incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical')
+ setup(...,
+ Extension(..., include_dirs=[incdir]),
+ )
+
+Even though this is quite portable---it will work on any Python installation,
+regardless of platform---it's probably easier to just write your C code in the
+sensible way.
+
+You can define and undefine pre-processor macros with the ``define_macros`` and
+``undef_macros`` options. ``define_macros`` takes a list of ``(name, value)``
+tuples, where ``name`` is the name of the macro to define (a string) and
+``value`` is its value: either a string or ``None``. (Defining a macro ``FOO``
+to ``None`` is the equivalent of a bare ``#define FOO`` in your C source: with
+most compilers, this sets ``FOO`` to the string ``1``.) ``undef_macros`` is
+just a list of macros to undefine.
+
+For example::
+
+ Extension(...,
+ define_macros=[('NDEBUG', '1'),
+ ('HAVE_STRFTIME', None)],
+ undef_macros=['HAVE_FOO', 'HAVE_BAR'])
+
+is the equivalent of having this at the top of every C source file::
+
+ #define NDEBUG 1
+ #define HAVE_STRFTIME
+ #undef HAVE_FOO
+ #undef HAVE_BAR
+
+
+Library options
+---------------
+
+You can also specify the libraries to link against when building your extension,
+and the directories to search for those libraries. The ``libraries`` option is
+a list of libraries to link against, ``library_dirs`` is a list of directories
+to search for libraries at link-time, and ``runtime_library_dirs`` is a list of
+directories to search for shared (dynamically loaded) libraries at run-time.
+
+For example, if you need to link against libraries known to be in the standard
+library search path on target systems ::
+
+ Extension(...,
+ libraries=['gdbm', 'readline'])
+
+If you need to link with libraries in a non-standard location, you'll have to
+include the location in ``library_dirs``::
+
+ Extension(...,
+ library_dirs=['/usr/X11R6/lib'],
+ libraries=['X11', 'Xt'])
+
+(Again, this sort of non-portable construct should be avoided if you intend to
+distribute your code.)
+
+.. XXX Should mention clib libraries here or somewhere else!
+
+
+Other options
+-------------
+
+There are still some other options which can be used to handle special cases.
+
+The ``optional`` option is a boolean; if it is true,
+a build failure in the extension will not abort the build process, but
+instead simply not install the failing extension.
+
+The ``extra_objects`` option is a list of object files to be passed to the
+linker. These files must not have extensions, as the default extension for the
+compiler is used.
+
+``extra_compile_args`` and ``extra_link_args`` can be used to
+specify additional command line options for the respective compiler and linker
+command lines.
+
+``export_symbols`` is only useful on Windows. It can contain a list of
+symbols (functions or variables) to be exported. This option is not needed when
+building compiled extensions: Distutils will automatically add ``initmodule``
+to the list of exported symbols.
+
+The ``depends`` option is a list of files that the extension depends on
+(for example header files). The build command will call the compiler on the
+sources to rebuild extension if any on this files has been modified since the
+previous build.
+
+Relationships between Distributions and Packages
+================================================
+
+A distribution may relate to packages in three specific ways:
+
+#. It can require packages or modules.
+
+#. It can provide packages or modules.
+
+#. It can obsolete packages or modules.
+
+These relationships can be specified using keyword arguments to the
+:func:`distutils.core.setup` function.
+
+Dependencies on other Python modules and packages can be specified by supplying
+the *requires* keyword argument to :func:`~distutils.core.setup`. The
+value must be a list of
+strings. Each string specifies a package that is required, and optionally what
+versions are sufficient.
+
+To specify that any version of a module or package is required, the string
+should consist entirely of the module or package name. Examples include
+``'mymodule'`` and ``'xml.parsers.expat'``.
+
+If specific versions are required, a sequence of qualifiers can be supplied in
+parentheses. Each qualifier may consist of a comparison operator and a version
+number. The accepted comparison operators are::
+
+ < > ==
+ <= >= !=
+
+These can be combined by using multiple qualifiers separated by commas (and
+optional whitespace). In this case, all of the qualifiers must be matched; a
+logical AND is used to combine the evaluations.
+
+Let's look at a bunch of examples:
+
++-------------------------+----------------------------------------------+
+| Requires Expression | Explanation |
++=========================+==============================================+
+| ``==1.0`` | Only version ``1.0`` is compatible |
++-------------------------+----------------------------------------------+
+| ``>1.0, !=1.5.1, <2.0`` | Any version after ``1.0`` and before ``2.0`` |
+| | is compatible, except ``1.5.1`` |
++-------------------------+----------------------------------------------+
+
+Now that we can specify dependencies, we also need to be able to specify what we
+provide that other distributions can require. This is done using the *provides*
+keyword argument to :func:`~distutils.core.setup`. The value for this keyword is a list of
+strings, each of which names a Python module or package, and optionally
+identifies the version. If the version is not specified, it is assumed to match
+that of the distribution.
+
+Some examples:
+
++---------------------+----------------------------------------------+
+| Provides Expression | Explanation |
++=====================+==============================================+
+| ``mypkg`` | Provide ``mypkg``, using the distribution |
+| | version |
++---------------------+----------------------------------------------+
+| ``mypkg (1.1)`` | Provide ``mypkg`` version 1.1, regardless of |
+| | the distribution version |
++---------------------+----------------------------------------------+
+
+A package can declare that it obsoletes other packages using the *obsoletes*
+keyword argument. The value for this is similar to that of the *requires*
+keyword: a list of strings giving module or package specifiers. Each specifier
+consists of a module or package name optionally followed by one or more version
+qualifiers. Version qualifiers are given in parentheses after the module or
+package name.
+
+The versions identified by the qualifiers are those that are obsoleted by the
+distribution being described. If no qualifiers are given, all versions of the
+named module or package are understood to be obsoleted.
+
+.. _distutils-installing-scripts:
+
+Installing Scripts
+==================
+
+So far we have been dealing with pure and non-pure Python modules, which are
+usually not run by themselves but imported by scripts.
+
+Scripts are files containing Python source code, intended to be started from the
+command line. Scripts don't require Distutils to do anything very complicated.
+The only clever feature is that if the first line of the script starts with
+``#!`` and contains the word "python", the Distutils will adjust the first line
+to refer to the current interpreter location. By default, it is replaced with
+the current interpreter location. The :option:`!--executable` (or :option:`!-e`)
+option will allow the interpreter path to be explicitly overridden.
+
+The ``scripts`` option simply is a list of files to be handled in this
+way. From the PyXML setup script::
+
+ setup(...,
+ scripts=['scripts/xmlproc_parse', 'scripts/xmlproc_val']
+ )
+
+.. versionchanged:: 3.1
+ All the scripts will also be added to the ``MANIFEST`` file if no template is
+ provided. See :ref:`manifest`.
+
+
+.. _distutils-installing-package-data:
+
+Installing Package Data
+=======================
+
+Often, additional files need to be installed into a package. These files are
+often data that's closely related to the package's implementation, or text files
+containing documentation that might be of interest to programmers using the
+package. These files are called :dfn:`package data`.
+
+Package data can be added to packages using the ``package_data`` keyword
+argument to the :func:`~distutils.core.setup` function. The value must be a mapping from
+package name to a list of relative path names that should be copied into the
+package. The paths are interpreted as relative to the directory containing the
+package (information from the ``package_dir`` mapping is used if appropriate);
+that is, the files are expected to be part of the package in the source
+directories. They may contain glob patterns as well.
+
+The path names may contain directory portions; any necessary directories will be
+created in the installation.
+
+For example, if a package should contain a subdirectory with several data files,
+the files can be arranged like this in the source tree::
+
+ setup.py
+ src/
+ mypkg/
+ __init__.py
+ module.py
+ data/
+ tables.dat
+ spoons.dat
+ forks.dat
+
+The corresponding call to :func:`~distutils.core.setup` might be::
+
+ setup(...,
+ packages=['mypkg'],
+ package_dir={'mypkg': 'src/mypkg'},
+ package_data={'mypkg': ['data/*.dat']},
+ )
+
+
+.. versionchanged:: 3.1
+ All the files that match ``package_data`` will be added to the ``MANIFEST``
+ file if no template is provided. See :ref:`manifest`.
+
+
+.. _distutils-additional-files:
+
+Installing Additional Files
+===========================
+
+The ``data_files`` option can be used to specify additional files needed
+by the module distribution: configuration files, message catalogs, data files,
+anything which doesn't fit in the previous categories.
+
+``data_files`` specifies a sequence of (*directory*, *files*) pairs in the
+following way::
+
+ setup(...,
+ data_files=[('bitmaps', ['bm/b1.gif', 'bm/b2.gif']),
+ ('config', ['cfg/data.cfg'])],
+ )
+
+Each (*directory*, *files*) pair in the sequence specifies the installation
+directory and the files to install there.
+
+Each file name in *files* is interpreted relative to the :file:`setup.py`
+script at the top of the package source distribution. Note that you can
+specify the directory where the data files will be installed, but you cannot
+rename the data files themselves.
+
+The *directory* should be a relative path. It is interpreted relative to the
+installation prefix (Python's ``sys.prefix`` for system installations;
+``site.USER_BASE`` for user installations). Distutils allows *directory* to be
+an absolute installation path, but this is discouraged since it is
+incompatible with the wheel packaging format. No directory information from
+*files* is used to determine the final location of the installed file; only
+the name of the file is used.
+
+You can specify the ``data_files`` options as a simple sequence of files
+without specifying a target directory, but this is not recommended, and the
+:command:`install` command will print a warning in this case. To install data
+files directly in the target directory, an empty string should be given as the
+directory.
+
+.. versionchanged:: 3.1
+ All the files that match ``data_files`` will be added to the ``MANIFEST``
+ file if no template is provided. See :ref:`manifest`.
+
+
+.. _meta-data:
+
+Additional meta-data
+====================
+
+The setup script may include additional meta-data beyond the name and version.
+This information includes:
+
++----------------------+---------------------------+-----------------+--------+
+| Meta-Data | Description | Value | Notes |
++======================+===========================+=================+========+
+| ``name`` | name of the package | short string | \(1) |
++----------------------+---------------------------+-----------------+--------+
+| ``version`` | version of this release | short string | (1)(2) |
++----------------------+---------------------------+-----------------+--------+
+| ``author`` | package author's name | short string | \(3) |
++----------------------+---------------------------+-----------------+--------+
+| ``author_email`` | email address of the | email address | \(3) |
+| | package author | | |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer`` | package maintainer's name | short string | \(3) |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer_email`` | email address of the | email address | \(3) |
+| | package maintainer | | |
++----------------------+---------------------------+-----------------+--------+
+| ``url`` | home page for the package | URL | \(1) |
++----------------------+---------------------------+-----------------+--------+
+| ``description`` | short, summary | short string | |
+| | description of the | | |
+| | package | | |
++----------------------+---------------------------+-----------------+--------+
+| ``long_description`` | longer description of the | long string | \(4) |
+| | package | | |
++----------------------+---------------------------+-----------------+--------+
+| ``download_url`` | location where the | URL | |
+| | package may be downloaded | | |
++----------------------+---------------------------+-----------------+--------+
+| ``classifiers`` | a list of classifiers | list of strings | (6)(7) |
++----------------------+---------------------------+-----------------+--------+
+| ``platforms`` | a list of platforms | list of strings | (6)(8) |
++----------------------+---------------------------+-----------------+--------+
+| ``keywords`` | a list of keywords | list of strings | (6)(8) |
++----------------------+---------------------------+-----------------+--------+
+| ``license`` | license for the package | short string | \(5) |
++----------------------+---------------------------+-----------------+--------+
+
+Notes:
+
+(1)
+ These fields are required.
+
+(2)
+ It is recommended that versions take the form *major.minor[.patch[.sub]]*.
+
+(3)
+ Either the author or the maintainer must be identified. If maintainer is
+ provided, distutils lists it as the author in :file:`PKG-INFO`.
+
+(4)
+ The ``long_description`` field is used by PyPI when you publish a package,
+ to build its project page.
+
+(5)
+ The ``license`` field is a text indicating the license covering the
+ package where the license is not a selection from the "License" Trove
+ classifiers. See the ``Classifier`` field. Notice that
+ there's a ``licence`` distribution option which is deprecated but still
+ acts as an alias for ``license``.
+
+(6)
+ This field must be a list.
+
+(7)
+ The valid classifiers are listed on
+ `PyPI <https://pypi.org/classifiers>`_.
+
+(8)
+ To preserve backward compatibility, this field also accepts a string. If
+ you pass a comma-separated string ``'foo, bar'``, it will be converted to
+ ``['foo', 'bar']``, Otherwise, it will be converted to a list of one
+ string.
+
+'short string'
+ A single line of text, not more than 200 characters.
+
+'long string'
+ Multiple lines of plain text in reStructuredText format (see
+ http://docutils.sourceforge.net/).
+
+'list of strings'
+ See below.
+
+Encoding the version information is an art in itself. Python packages generally
+adhere to the version format *major.minor[.patch][sub]*. The major number is 0
+for initial, experimental releases of software. It is incremented for releases
+that represent major milestones in a package. The minor number is incremented
+when important new features are added to the package. The patch number
+increments when bug-fix releases are made. Additional trailing version
+information is sometimes used to indicate sub-releases. These are
+"a1,a2,...,aN" (for alpha releases, where functionality and API may change),
+"b1,b2,...,bN" (for beta releases, which only fix bugs) and "pr1,pr2,...,prN"
+(for final pre-release release testing). Some examples:
+
+0.1.0
+ the first, experimental release of a package
+
+1.0.1a2
+ the second alpha release of the first patch version of 1.0
+
+``classifiers`` must be specified in a list::
+
+ setup(...,
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Environment :: Web Environment',
+ 'Intended Audience :: End Users/Desktop',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: Python Software Foundation License',
+ 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: POSIX',
+ 'Programming Language :: Python',
+ 'Topic :: Communications :: Email',
+ 'Topic :: Office/Business',
+ 'Topic :: Software Development :: Bug Tracking',
+ ],
+ )
+
+.. versionchanged:: 3.7
+ :class:`~distutils.core.setup` now warns when ``classifiers``, ``keywords``
+ or ``platforms`` fields are not specified as a list or a string.
+
+.. _debug-setup-script:
+
+Debugging the setup script
+==========================
+
+Sometimes things go wrong, and the setup script doesn't do what the developer
+wants.
+
+Distutils catches any exceptions when running the setup script, and print a
+simple error message before the script is terminated. The motivation for this
+behaviour is to not confuse administrators who don't know much about Python and
+are trying to install a package. If they get a big long traceback from deep
+inside the guts of Distutils, they may think the package or the Python
+installation is broken because they don't read all the way down to the bottom
+and see that it's a permission problem.
+
+On the other hand, this doesn't help the developer to find the cause of the
+failure. For this purpose, the :envvar:`DISTUTILS_DEBUG` environment variable can be set
+to anything except an empty string, and distutils will now print detailed
+information about what it is doing, dump the full traceback when an exception
+occurs, and print the whole command line when an external program (like a C
+compiler) fails.
diff --git a/docs/deprecated/distutils/sourcedist.rst b/docs/deprecated/distutils/sourcedist.rst
new file mode 100644
index 0000000..0600663
--- /dev/null
+++ b/docs/deprecated/distutils/sourcedist.rst
@@ -0,0 +1,242 @@
+.. _source-dist:
+
+******************************
+Creating a Source Distribution
+******************************
+
+.. include:: ./_setuptools_disclaimer.rst
+
+As shown in section :ref:`distutils-simple-example`, you use the :command:`sdist` command
+to create a source distribution. In the simplest case, ::
+
+ python setup.py sdist
+
+(assuming you haven't specified any :command:`sdist` options in the setup script
+or config file), :command:`sdist` creates the archive of the default format for
+the current platform. The default format is a gzip'ed tar file
+(:file:`.tar.gz`) on Unix, and ZIP file on Windows.
+
+You can specify as many formats as you like using the :option:`!--formats`
+option, for example::
+
+ python setup.py sdist --formats=gztar,zip
+
+to create a gzipped tarball and a zip file. The available formats are:
+
++-----------+-------------------------+---------+
+| Format | Description | Notes |
++===========+=========================+=========+
+| ``zip`` | zip file (:file:`.zip`) | (1),(3) |
++-----------+-------------------------+---------+
+| ``gztar`` | gzip'ed tar file | \(2) |
+| | (:file:`.tar.gz`) | |
++-----------+-------------------------+---------+
+| ``bztar`` | bzip2'ed tar file | |
+| | (:file:`.tar.bz2`) | |
++-----------+-------------------------+---------+
+| ``xztar`` | xz'ed tar file | |
+| | (:file:`.tar.xz`) | |
++-----------+-------------------------+---------+
+| ``ztar`` | compressed tar file | \(4) |
+| | (:file:`.tar.Z`) | |
++-----------+-------------------------+---------+
+| ``tar`` | tar file (:file:`.tar`) | |
++-----------+-------------------------+---------+
+
+.. versionchanged:: 3.5
+ Added support for the ``xztar`` format.
+
+Notes:
+
+(1)
+ default on Windows
+
+(2)
+ default on Unix
+
+(3)
+ requires either external :program:`zip` utility or :mod:`zipfile` module (part
+ of the standard Python library since Python 1.6)
+
+(4)
+ requires the :program:`compress` program. Notice that this format is now
+ pending for deprecation and will be removed in the future versions of Python.
+
+When using any ``tar`` format (``gztar``, ``bztar``, ``xztar``, ``ztar`` or
+``tar``), under Unix you can specify the ``owner`` and ``group`` names
+that will be set for each member of the archive.
+
+For example, if you want all files of the archive to be owned by root::
+
+ python setup.py sdist --owner=root --group=root
+
+
+.. _manifest:
+
+Specifying the files to distribute
+==================================
+
+If you don't supply an explicit list of files (or instructions on how to
+generate one), the :command:`sdist` command puts a minimal default set into the
+source distribution:
+
+* all Python source files implied by the ``py_modules`` and
+ ``packages`` options
+
+* all C source files mentioned in the ``ext_modules`` or
+ ``libraries`` options
+
+ .. XXX getting C library sources currently broken---no
+ :meth:`get_source_files` method in :file:`build_clib.py`!
+
+* scripts identified by the ``scripts`` option
+ See :ref:`distutils-installing-scripts`.
+
+* anything that looks like a test script: :file:`test/test\*.py` (currently, the
+ Distutils don't do anything with test scripts except include them in source
+ distributions, but in the future there will be a standard for testing Python
+ module distributions)
+
+* Any of the standard README files (:file:`README`, :file:`README.txt`,
+ or :file:`README.rst`), :file:`setup.py` (or whatever you called your setup
+ script), and :file:`setup.cfg`.
+
+* all files that matches the ``package_data`` metadata.
+ See :ref:`distutils-installing-package-data`.
+
+* all files that matches the ``data_files`` metadata.
+ See :ref:`distutils-additional-files`.
+
+Sometimes this is enough, but usually you will want to specify additional files
+to distribute. The typical way to do this is to write a *manifest template*,
+called :file:`MANIFEST.in` by default. The manifest template is just a list of
+instructions for how to generate your manifest file, :file:`MANIFEST`, which is
+the exact list of files to include in your source distribution. The
+:command:`sdist` command processes this template and generates a manifest based
+on its instructions and what it finds in the filesystem.
+
+If you prefer to roll your own manifest file, the format is simple: one filename
+per line, regular files (or symlinks to them) only. If you do supply your own
+:file:`MANIFEST`, you must specify everything: the default set of files
+described above does not apply in this case.
+
+.. versionchanged:: 3.1
+ An existing generated :file:`MANIFEST` will be regenerated without
+ :command:`sdist` comparing its modification time to the one of
+ :file:`MANIFEST.in` or :file:`setup.py`.
+
+.. versionchanged:: 3.1.3
+ :file:`MANIFEST` files start with a comment indicating they are generated.
+ Files without this comment are not overwritten or removed.
+
+.. versionchanged:: 3.2.2
+ :command:`sdist` will read a :file:`MANIFEST` file if no :file:`MANIFEST.in`
+ exists, like it used to do.
+
+.. versionchanged:: 3.7
+ :file:`README.rst` is now included in the list of distutils standard READMEs.
+
+
+The manifest template has one command per line, where each command specifies a
+set of files to include or exclude from the source distribution. For an
+example, again we turn to the Distutils' own manifest template:
+
+.. code-block:: none
+
+ include *.txt
+ recursive-include examples *.txt *.py
+ prune examples/sample?/build
+
+The meanings should be fairly clear: include all files in the distribution root
+matching :file:`\*.txt`, all files anywhere under the :file:`examples` directory
+matching :file:`\*.txt` or :file:`\*.py`, and exclude all directories matching
+:file:`examples/sample?/build`. All of this is done *after* the standard
+include set, so you can exclude files from the standard set with explicit
+instructions in the manifest template. (Or, you can use the
+:option:`!--no-defaults` option to disable the standard set entirely.) There are
+several other commands available in the manifest template mini-language; see
+section :ref:`sdist-cmd`.
+
+The order of commands in the manifest template matters: initially, we have the
+list of default files as described above, and each command in the template adds
+to or removes from that list of files. Once we have fully processed the
+manifest template, we remove files that should not be included in the source
+distribution:
+
+* all files in the Distutils "build" tree (default :file:`build/`)
+
+* all files in directories named :file:`RCS`, :file:`CVS`, :file:`.svn`,
+ :file:`.hg`, :file:`.git`, :file:`.bzr` or :file:`_darcs`
+
+Now we have our complete list of files, which is written to the manifest for
+future reference, and then used to build the source distribution archive(s).
+
+You can disable the default set of included files with the
+:option:`!--no-defaults` option, and you can disable the standard exclude set
+with :option:`!--no-prune`.
+
+Following the Distutils' own manifest template, let's trace how the
+:command:`sdist` command builds the list of files to include in the Distutils
+source distribution:
+
+#. include all Python source files in the :file:`distutils` and
+ :file:`distutils/command` subdirectories (because packages corresponding to
+ those two directories were mentioned in the ``packages`` option in the
+ setup script---see section :ref:`setup-script`)
+
+#. include :file:`README.txt`, :file:`setup.py`, and :file:`setup.cfg` (standard
+ files)
+
+#. include :file:`test/test\*.py` (standard files)
+
+#. include :file:`\*.txt` in the distribution root (this will find
+ :file:`README.txt` a second time, but such redundancies are weeded out later)
+
+#. include anything matching :file:`\*.txt` or :file:`\*.py` in the sub-tree
+ under :file:`examples`,
+
+#. exclude all files in the sub-trees starting at directories matching
+ :file:`examples/sample?/build`\ ---this may exclude files included by the
+ previous two steps, so it's important that the ``prune`` command in the manifest
+ template comes after the ``recursive-include`` command
+
+#. exclude the entire :file:`build` tree, and any :file:`RCS`, :file:`CVS`,
+ :file:`.svn`, :file:`.hg`, :file:`.git`, :file:`.bzr` and :file:`_darcs`
+ directories
+
+Just like in the setup script, file and directory names in the manifest template
+should always be slash-separated; the Distutils will take care of converting
+them to the standard representation on your platform. That way, the manifest
+template is portable across operating systems.
+
+
+.. _manifest-options:
+
+Manifest-related options
+========================
+
+The normal course of operations for the :command:`sdist` command is as follows:
+
+* if the manifest file (:file:`MANIFEST` by default) exists and the first line
+ does not have a comment indicating it is generated from :file:`MANIFEST.in`,
+ then it is used as is, unaltered
+
+* if the manifest file doesn't exist or has been previously automatically
+ generated, read :file:`MANIFEST.in` and create the manifest
+
+* if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest
+ with just the default file set
+
+* use the list of files now in :file:`MANIFEST` (either just generated or read
+ in) to create the source distribution archive(s)
+
+There are a couple of options that modify this behaviour. First, use the
+:option:`!--no-defaults` and :option:`!--no-prune` to disable the standard
+"include" and "exclude" sets.
+
+Second, you might just want to (re)generate the manifest, but not create a source
+distribution::
+
+ python setup.py sdist --manifest-only
+
+:option:`!-o` is a shortcut for :option:`!--manifest-only`.
diff --git a/docs/deprecated/distutils/uploading.rst b/docs/deprecated/distutils/uploading.rst
new file mode 100644
index 0000000..4c391ca
--- /dev/null
+++ b/docs/deprecated/distutils/uploading.rst
@@ -0,0 +1,8 @@
+:orphan:
+
+***************************************
+Uploading Packages to the Package Index
+***************************************
+
+References to up to date PyPI documentation can be found at
+:ref:`publishing-python-packages`.
diff --git a/docs/easy_install.txt b/docs/deprecated/easy_install.rst
index 5c99234..3cf3bea 100644
--- a/docs/easy_install.txt
+++ b/docs/deprecated/easy_install.rst
@@ -2,6 +2,12 @@
Easy Install
============
+.. warning::
+ Easy Install is deprecated. Do not use it. Instead use pip. If
+ you think you need Easy Install, please reach out to the PyPA
+ team (a ticket to pip or setuptools is fine), describing your
+ use-case.
+
Easy Install is a python module (``easy_install``) bundled with ``setuptools``
that lets you automatically download, build, install, and manage Python
packages.
@@ -17,10 +23,7 @@ bug -- and then do so via list discussion first.)
(Also, if you'd like to learn about how you can use ``setuptools`` to make your
own packages work better with EasyInstall, or provide EasyInstall-like features
without requiring your users to use EasyInstall directly, you'll probably want
-to check out the full `setuptools`_ documentation as well.)
-
-.. contents:: **Table of Contents**
-
+to check out the full documentation as well.)
Using "Easy Install"
====================
@@ -31,11 +34,11 @@ Using "Easy Install"
Installing "Easy Install"
-------------------------
-Please see the `setuptools PyPI page <https://pypi.org/project/setuptools/>`_
+Please see the :pypi:`setuptools` on the package index
for download links and basic installation instructions for each of the
supported platforms.
-You will need at least Python 3.3 or 2.7. An ``easy_install`` script will be
+You will need at least Python 3.5 or 2.7. An ``easy_install`` script will be
installed in the normal location for Python scripts on your platform.
Note that the instructions on the setuptools PyPI page assume that you are
@@ -311,8 +314,8 @@ Note that instead of changing your ``PATH`` to include the Python scripts
directory, you can also retarget the installation location for scripts so they
go on a directory that's already on the ``PATH``. For more information see
`Command-Line Options`_ and `Configuration Files`_. During installation,
-pass command line options (such as ``--script-dir``) to
-``ez_setup.py`` to control where ``easy_install.exe`` will be installed.
+pass command line options (such as ``--script-dir``) to control where
+scripts will be installed.
Windows Executable Launcher
@@ -1017,10 +1020,7 @@ of the User installation scheme. "virtualenv" provides a version of ``easy_inst
scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features
that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages.
-Please refer to the `virtualenv`_ documentation for more details.
-
-.. _virtualenv: https://pypi.org/project/virtualenv/
-
+Please refer to the :pypi:`virtualenv` documentation for more details.
Package Index "API"
@@ -1077,546 +1077,3 @@ EasyInstall to be able to look up and download packages:
8. If a package index is accessed via a ``file://`` URL, then EasyInstall will
automatically use ``index.html`` files, if present, when trying to read a
directory with a trailing ``/`` on the URL.
-
-
-Backward Compatibility
-~~~~~~~~~~~~~~~~~~~~~~
-
-Package indexes that wish to support setuptools versions prior to 0.6b4 should
-also follow these rules:
-
-* Homepage and download links must be preceded with ``"<th>Home Page"`` or
- ``"<th>Download URL"``, in addition to (or instead of) the ``rel=""``
- attributes on the actual links. These marker strings do not need to be
- visible, or uncommented, however! For example, the following is a valid
- homepage link that will work with any version of setuptools::
-
- <li>
- <strong>Home Page:</strong>
- <!-- <th>Home Page -->
- <a rel="homepage" href="http://sqlobject.org">http://sqlobject.org</a>
- </li>
-
- Even though the marker string is in an HTML comment, older versions of
- EasyInstall will still "see" it and know that the link that follows is the
- project's home page URL.
-
-* The pages described by paragraph 3(b) of the preceding section *must*
- contain the string ``"Index of Packages</title>"`` somewhere in their text.
- This can be inside of an HTML comment, if desired, and it can be anywhere
- in the page. (Note: this string MUST NOT appear on normal project pages, as
- described in paragraphs 2 and 3(a)!)
-
-In addition, for compatibility with PyPI versions that do not use ``#md5=``
-fragment IDs, EasyInstall uses the following regular expression to match PyPI's
-displayed MD5 info (broken onto two lines for readability)::
-
- <a href="([^"#]+)">([^<]+)</a>\n\s+\(<a href="[^?]+\?:action=show_md5
- &amp;digest=([0-9a-f]{32})">md5</a>\)
-
-History
-=======
-
-0.6c9
- * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
- is flattened out in the resulting egg. (There was a case-sensitivity
- problem that affected some distributions, notably ``pywin32``.)
-
- * Prevent ``--help-commands`` and other junk from showing under Python 2.5
- when running ``easy_install --help``.
-
- * Fixed GUI scripts sometimes not executing on Windows
-
- * Fixed not picking up dependency links from recursive dependencies.
-
- * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
-
- * Changes for Jython compatibility
-
- * Improved error message when a requirement is also a directory name, but the
- specified directory is not a source package.
-
- * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
-
- * Fixed HTTP SVN detection failing when the page title included a project
- name (e.g. on SourceForge-hosted SVN)
-
- * Fix Jython script installation to handle ``#!`` lines better when
- ``sys.executable`` is a script.
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Keep site directories (e.g. ``site-packages``) from being included in
- ``.pth`` files.
-
-0.6c7
- * ``ftp:`` download URLs now work correctly.
-
- * The default ``--index-url`` is now ``https://pypi.python.org/simple``, to use
- the Python Package Index's new simpler (and faster!) REST API.
-
-0.6c6
- * EasyInstall no longer aborts the installation process if a URL it wants to
- retrieve can't be downloaded, unless the URL is an actual package download.
- Instead, it issues a warning and tries to keep going.
-
- * Fixed distutils-style scripts originally built on Windows having their line
- endings doubled when installed on any platform.
-
- * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
- installed using ``setup.py develop``.
-
- * Fixed not HTML-decoding URLs scraped from web pages
-
-0.6c5
- * Fixed ``.dll`` files on Cygwin not having executable permissions when an egg
- is installed unzipped.
-
-0.6c4
- * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
- URLs. If a password-protected page contains links to the same host (and
- protocol), those links will inherit the credentials used to access the
- original page.
-
- * Removed all special support for Sourceforge mirrors, as Sourceforge's
- mirror system now works well for non-browser downloads.
-
- * Fixed not recognizing ``win32.exe`` installers that included a custom
- bitmap.
-
- * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
- are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
- is done by ``os.urandom()`` on some platforms).
-
- * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
- has a space in it (e.g., the user installed Python to a ``Program Files``
- directory).
-
-0.6c3
- * You can once again use "python -m easy_install" with Python 2.4 and above.
-
- * Python 2.5 compatibility fixes added.
-
-0.6c2
- * Windows script wrappers now support quoted arguments and arguments
- containing spaces. (Patch contributed by Jim Fulton.)
-
- * The ``ez_setup.py`` script now actually works when you put a setuptools
- ``.egg`` alongside it for bootstrapping an offline machine.
-
- * A writable installation directory on ``sys.path`` is no longer required to
- download and extract a source distribution using ``--editable``.
-
- * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
- contains non-ASCII characters, to prevent deprecation warnings about an
- unspecified encoding when the script is run.
-
-0.6c1
- * EasyInstall now includes setuptools version information in the
- ``User-Agent`` string sent to websites it visits.
-
-0.6b4
- * Fix creating Python wrappers for non-Python scripts
-
- * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
- "Home page" or "Download URL" slots on PyPI.
-
- * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
- or directory is deleted/overwritten.
-
- * Fix not recognizing HTML 404 pages from package indexes.
-
- * Allow ``file://`` URLs to be used as a package index. URLs that refer to
- directories will use an internally-generated directory listing if there is
- no ``index.html`` file in the directory.
-
- * Allow external links in a package index to be specified using
- ``rel="homepage"`` or ``rel="download"``, without needing the old
- PyPI-specific visible markup.
-
- * Suppressed warning message about possibly-misspelled project name, if an egg
- or link for that project name has already been seen.
-
-0.6b3
- * Fix local ``--find-links`` eggs not being copied except with
- ``--always-copy``.
-
- * Fix sometimes not detecting local packages installed outside of "site"
- directories.
-
- * Fix mysterious errors during initial ``setuptools`` install, caused by
- ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
- after deleting the egg from which it's running.
-
-0.6b2
- * Don't install or update a ``site.py`` patch when installing to a
- ``PYTHONPATH`` directory with ``--multi-version``, unless an
- ``easy-install.pth`` file is already in use there.
-
- * Construct ``.pth`` file paths in such a way that installing an egg whose
- name begins with ``import`` doesn't cause a syntax error.
-
- * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
-
-0.6b1
- * Better ambiguity management: accept ``#egg`` name/version even if processing
- what appears to be a correctly-named distutils file, and ignore ``.egg``
- files with no ``-``, since valid Python ``.egg`` files always have a version
- number (but Scheme eggs often don't).
-
- * Support ``file://`` links to directories in ``--find-links``, so that
- easy_install can build packages from local source checkouts.
-
- * Added automatic retry for Sourceforge mirrors. The new download process is
- to first just try dl.sourceforge.net, then randomly select mirror IPs and
- remove ones that fail, until something works. The removed IPs stay removed
- for the remainder of the run.
-
- * Ignore bdist_dumb distributions when looking at download URLs.
-
-0.6a11
- * Process ``dependency_links.txt`` if found in a distribution, by adding the
- URLs to the list for scanning.
-
- * Use relative paths in ``.pth`` files when eggs are being installed to the
- same directory as the ``.pth`` file. This maximizes portability of the
- target directory when building applications that contain eggs.
-
- * Added ``easy_install-N.N`` script(s) for convenience when using multiple
- Python versions.
-
- * Added automatic handling of installation conflicts. Eggs are now shifted to
- the front of sys.path, in an order consistent with where they came from,
- making EasyInstall seamlessly co-operate with system package managers.
-
- The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
- are now no longer necessary, and will generate warnings at the end of a
- run if you use them.
-
- * Don't recursively traverse subdirectories given to ``--find-links``.
-
-0.6a10
- * Added exhaustive testing of the install directory, including a spawn test
- for ``.pth`` file support, and directory writability/existence checks. This
- should virtually eliminate the need to set or configure ``--site-dirs``.
-
- * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
- RTFM-ing. :)
-
- * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
- manually to make it work. ``--multi-version`` is no longer a silent
- default; you must explicitly use it if installing to a non-PYTHONPATH,
- non-"site" directory.
-
- * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
- ``--install-dir``, and ``--script-dir`` options, whether on the command line
- or in configuration files.
-
- * Improved SourceForge mirror processing to work faster and be less affected
- by transient HTML changes made by SourceForge.
-
- * PyPI searches now use the exact spelling of requirements specified on the
- command line or in a project's ``install_requires``. Previously, a
- normalized form of the name was used, which could lead to unnecessary
- full-index searches when a project's name had an underscore (``_``) in it.
-
- * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
- as long as you include an ``#egg=name-version`` suffix on the URL, or if
- the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
- This allows third parties to "package" trivial Python modules just by
- linking to them (e.g. from within their own PyPI page or download links
- page).
-
- * The ``--always-copy`` option now skips "system" and "development" eggs since
- they can't be reliably copied. Note that this may cause EasyInstall to
- choose an older version of a package than what you expected, or it may cause
- downloading and installation of a fresh version of what's already installed.
-
- * The ``--find-links`` option previously scanned all supplied URLs and
- directories as early as possible, but now only directories and direct
- archive links are scanned immediately. URLs are not retrieved unless a
- package search was already going to go online due to a package not being
- available locally, or due to the use of the ``--update`` or ``-U`` option.
-
- * Fixed the annoying ``--help-commands`` wart.
-
-0.6a9
- * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
- "baskets") when they weren't explicitly listed in the ``.pth`` file.
-
- * If more than one URL appears to describe the exact same distribution, prefer
- the shortest one. This helps to avoid "table of contents" CGI URLs like the
- ones on effbot.org.
-
- * Quote arguments to python.exe (including python's path) to avoid problems
- when Python (or a script) is installed in a directory whose name contains
- spaces on Windows.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
- format. Running ``bdist_wininst`` on a setuptools-based package wraps the
- egg in an .exe that will safely install it as an egg (i.e., with metadata
- and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
- back into an ``.egg`` file or directory and install it as such.
-
-0.6a8
- * Update for changed SourceForge mirror format
-
- * Fixed not installing dependencies for some packages fetched via Subversion
-
- * Fixed dependency installation with ``--always-copy`` not using the same
- dependency resolution procedure as other operations.
-
- * Fixed not fully removing temporary directories on Windows, if a Subversion
- checkout left read-only files behind
-
- * Fixed some problems building extensions when Pyrex was installed, especially
- with Python 2.4 and/or packages using SWIG.
-
-0.6a7
- * Fixed not being able to install Windows script wrappers using Python 2.3
-
-0.6a6
- * Added support for "traditional" PYTHONPATH-based non-root installation, and
- also the convenient ``virtual-python.py`` script, based on a contribution
- by Ian Bicking. The setuptools egg now contains a hacked ``site`` module
- that makes the PYTHONPATH-based approach work with .pth files, so that you
- can get the full EasyInstall feature set on such installations.
-
- * Added ``--no-deps`` and ``--allow-hosts`` options.
-
- * Improved Windows ``.exe`` script wrappers so that the script can have the
- same name as a module without confusing Python.
-
- * Changed dependency processing so that it's breadth-first, allowing a
- depender's preferences to override those of a dependee, to prevent conflicts
- when a lower version is acceptable to the dependee, but not the depender.
- Also, ensure that currently installed/selected packages aren't given
- precedence over ones desired by a package being installed, which could
- cause conflict errors.
-
-0.6a3
- * Improved error message when trying to use old ways of running
- ``easy_install``. Removed the ability to run via ``python -m`` or by
- running ``easy_install.py``; ``easy_install`` is the command to run on all
- supported platforms.
-
- * Improved wrapper script generation and runtime initialization so that a
- VersionConflict doesn't occur if you later install a competing version of a
- needed package as the default version of that package.
-
- * Fixed a problem parsing version numbers in ``#egg=`` links.
-
-0.6a2
- * EasyInstall can now install "console_scripts" defined by packages that use
- ``setuptools`` and define appropriate entry points. On Windows, console
- scripts get an ``.exe`` wrapper so you can just type their name. On other
- platforms, the scripts are installed without a file extension.
-
- * Using ``python -m easy_install`` or running ``easy_install.py`` is now
- DEPRECATED, since an ``easy_install`` wrapper is now available on all
- platforms.
-
-0.6a1
- * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
- that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
-
- * EasyInstall now handles symlinks in target directories by removing the link,
- rather than attempting to overwrite the link's destination. This makes it
- easier to set up an alternate Python "home" directory (as described above in
- the `Non-Root Installation`_ section).
-
- * Added support for handling MacOS platform information in ``.egg`` filenames,
- based on a contribution by Kevin Dangoor. You may wish to delete and
- reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
- because the format for this platform information has changed so that minor
- OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
- previous OS version to become obsolete.
-
- * easy_install's dependency processing algorithms have changed. When using
- ``--always-copy``, it now ensures that dependencies are copied too. When
- not using ``--always-copy``, it tries to use a single resolution loop,
- rather than recursing.
-
- * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
- extensions.
-
- * Added ``--site-dirs`` option to allow adding custom "site" directories.
- Made ``easy-install.pth`` work in platform-specific alternate site
- directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
-
- * If you manually delete the current version of a package, the next run of
- EasyInstall against the target directory will now remove the stray entry
- from the ``easy-install.pth`` file.
-
- * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
- as pointing to the named project's source checkout. Such URLs have a lower
- match precedence than any other kind of distribution, so they'll only be
- used if they have a higher version number than any other available
- distribution, or if you use the ``--editable`` option. The ``#egg``
- fragment can contain a version if it's formatted as ``#egg=proj-ver``,
- where ``proj`` is the project name, and ``ver`` is the version number. You
- *must* use the format for these values that the ``bdist_egg`` command uses;
- i.e., all non-alphanumeric runs must be condensed to single underscore
- characters.
-
- * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_
- above for more info. Also, slightly changed the behavior of the
- ``--build-directory`` option.
-
- * Fixed the setup script sandbox facility not recognizing certain paths as
- valid on case-insensitive platforms.
-
-0.5a12
- * Fix ``python -m easy_install`` not working due to setuptools being installed
- as a zipfile. Update safety scanner to check for modules that might be used
- as ``python -m`` scripts.
-
- * Misc. fixes for win32.exe support, including changes to support Python 2.4's
- changed ``bdist_wininst`` format.
-
-0.5a10
- * Put the ``easy_install`` module back in as a module, as it's needed for
- ``python -m`` to run it!
-
- * Allow ``--find-links/-f`` to accept local directories or filenames as well
- as URLs.
-
-0.5a9
- * EasyInstall now automatically detects when an "unmanaged" package or
- module is going to be on ``sys.path`` ahead of a package you're installing,
- thereby preventing the newer version from being imported. By default, it
- will abort installation to alert you of the problem, but there are also
- new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
- available to change the default behavior. (Note: this new feature doesn't
- take effect for egg files that were built with older ``setuptools``
- versions, because they lack the new metadata file required to implement it.)
-
- * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
- base error type for errors that should just issue a message to stderr and
- exit the program without a traceback.
-
- * EasyInstall can now be given a path to a directory containing a setup
- script, and it will attempt to build and install the package there.
-
- * EasyInstall now performs a safety analysis on module contents to determine
- whether a package is likely to run in zipped form, and displays
- information about what modules may be doing introspection that would break
- when running as a zipfile.
-
- * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
- would ordinarily be considered safe to unzip, and changed the meaning of
- ``--zip-ok/-z`` to "always leave everything zipped".
-
-0.5a8
- * There is now a separate documentation page for `setuptools`_; revision
- history that's not specific to EasyInstall has been moved to that page.
-
- .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-
-0.5a5
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
- the ``easy_install`` module to ``setuptools.command.easy_install``. Note
- that if you were importing or extending it, you must now change your imports
- accordingly. ``easy_install.py`` is still installed as a script, but not as
- a module.
-
-0.5a4
- * Added ``--always-copy/-a`` option to always copy needed packages to the
- installation directory, even if they're already present elsewhere on
- sys.path. (In previous versions, this was the default behavior, but now
- you must request it.)
-
- * Added ``--upgrade/-U`` option to force checking PyPI for latest available
- version(s) of all packages requested by name and version, even if a matching
- version is available locally.
-
- * Added automatic installation of dependencies declared by a distribution
- being installed. These dependencies must be listed in the distribution's
- ``EGG-INFO`` directory, so the distribution has to have declared its
- dependencies by using setuptools. If a package has requirements it didn't
- declare, you'll still have to deal with them yourself. (E.g., by asking
- EasyInstall to find and install them.)
-
- * Added the ``--record`` option to ``easy_install`` for the benefit of tools
- that run ``setup.py install --record=filename`` on behalf of another
- packaging system.)
-
-0.5a3
- * Fixed not setting script permissions to allow execution.
-
- * Improved sandboxing so that setup scripts that want a temporary directory
- (e.g. pychecker) can still run in the sandbox.
-
-0.5a2
- * Fix stupid stupid refactoring-at-the-last-minute typos. :(
-
-0.5a1
- * Added support for converting ``.win32.exe`` installers to eggs on the fly.
- EasyInstall will now recognize such files by name and install them.
-
- * Fixed a problem with picking the "best" version to install (versions were
- being sorted as strings, rather than as parsed values)
-
-0.4a4
- * Added support for the distutils "verbose/quiet" and "dry-run" options, as
- well as the "optimize" flag.
-
- * Support downloading packages that were uploaded to PyPI (by scanning all
- links on package pages, not just the homepage/download links).
-
-0.4a3
- * Add progress messages to the search/download process so that you can tell
- what URLs it's reading to find download links. (Hopefully, this will help
- people report out-of-date and broken links to package authors, and to tell
- when they've asked for a package that doesn't exist.)
-
-0.4a2
- * Added support for installing scripts
-
- * Added support for setting options via distutils configuration files, and
- using distutils' default options as a basis for EasyInstall's defaults.
-
- * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
- script installation directory option.
-
- * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
- Python includes SSL support.
-
-0.4a1
- * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
- and search PyPI for needed packages.
-
-0.3a4
- * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
- URL installs, to avoid running the wrong setup.py.
-
-0.3a3
- * Added ``--build-directory=DIR/-b DIR`` option.
-
- * Added "installation report" that explains how to use 'require()' when doing
- a multiversion install or alternate installation directory.
-
- * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
-
- * Added "sandboxing" that stops a setup script from running if it attempts to
- write to the filesystem outside of the build area
-
- * Added more workarounds for packages with quirky ``install_data`` hacks
-
-0.3a2
- * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
- automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-
-Future Plans
-============
-
-* Additional utilities to list/remove/verify packages
-* Signature checking? SSL? Ability to suppress PyPI search?
-* Display byte progress meter when downloading distributions and long pages?
-* Redirect stdout/stderr to log during run_setup?
diff --git a/docs/deprecated/functionalities.rst b/docs/deprecated/functionalities.rst
new file mode 100644
index 0000000..7213c5d
--- /dev/null
+++ b/docs/deprecated/functionalities.rst
@@ -0,0 +1,33 @@
+"Eggsecutable" Scripts
+----------------------
+
+.. deprecated:: 45.3.0
+
+Occasionally, there are situations where it's desirable to make an ``.egg``
+file directly executable. You can do this by including an entry point such
+as the following::
+
+ setup(
+ # other arguments here...
+ entry_points={
+ "setuptools.installation": [
+ "eggsecutable = my_package.some_module:main_func",
+ ]
+ }
+ )
+
+Any eggs built from the above setup script will include a short executable
+prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
+The prelude can be run on Unix-like platforms (including Mac and Linux) by
+invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
+``.egg`` file. For the executable prelude to run, the appropriate version of
+Python must be available via the ``PATH`` environment variable, under its
+"long" name. That is, if the egg is built for Python 2.3, there must be a
+``python2.3`` executable present in a directory on ``PATH``.
+
+IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
+invoked via symlinks. They *must* be invoked using their original filename, in
+order to ensure that, once running, ``pkg_resources`` will know what project
+and version is in use. The header script will check this and exit with an
+error if the ``.egg`` file has been renamed or is invoked via a symlink that
+changes its base name.
diff --git a/docs/deprecated/index.rst b/docs/deprecated/index.rst
new file mode 100644
index 0000000..59fc7be
--- /dev/null
+++ b/docs/deprecated/index.rst
@@ -0,0 +1,20 @@
+======================================================
+Guides on backward compatibility & deprecated practice
+======================================================
+
+``Setuptools`` has undergone tremendous changes since its first debut. As its
+development continues to roll forward, many of the practice and mechanisms it
+had established are now considered deprecated. But they still remain relevant
+as a plethora of libraries continue to depend on them. Many people also find
+it necessary to equip themselves with the knowledge to better support backward
+compatibility. This guide aims to provide the essential information for such
+objectives.
+
+.. toctree::
+ :maxdepth: 1
+
+ python_eggs
+ easy_install
+ distutils/index
+ distutils-legacy
+ functionalities
diff --git a/docs/formats.txt b/docs/deprecated/python_eggs.rst
index a182eb9..59d1adc 100644
--- a/docs/formats.txt
+++ b/docs/deprecated/python_eggs.rst
@@ -6,9 +6,6 @@ STOP! This is not the first document you should read!
-.. contents:: **Table of Contents**
-
-
----------------------
Eggs and their Formats
----------------------
@@ -299,11 +296,8 @@ specified by the ``setup_requires`` parameter to the Distribution.
A list of dependency URLs, one per line, as specified using the
``dependency_links`` keyword to ``setup()``. These may be direct
download URLs, or the URLs of web pages containing direct download
-links, and will be used by EasyInstall to find dependencies, as though
-the user had manually provided them via the ``--find-links`` command
-line option. Please see the setuptools manual and EasyInstall manual
-for more information on specifying this option, and for information on
-how EasyInstall processes ``--find-links`` URLs.
+links. Please see the setuptools manual for more information on
+specifying this option.
``depends.txt`` -- Obsolete, do not create!
@@ -679,4 +673,3 @@ to facilitate introspection of installed scripts, and their relationship
to installed eggs. For example, an uninstallation tool could use this
data to identify what scripts can safely be removed, and/or identify
what scripts would stop working if a particular egg is uninstalled.
-
diff --git a/docs/developer-guide.txt b/docs/development/developer-guide.rst
index b2c1a0c..d2cf159 100644
--- a/docs/developer-guide.txt
+++ b/docs/development/developer-guide.rst
@@ -5,9 +5,6 @@ Developer's Guide for Setuptools
If you want to know more about contributing on Setuptools, this is the place.
-.. contents:: **Table of Contents**
-
-
-------------------
Recommended Reading
-------------------
@@ -23,17 +20,17 @@ contribution.
Project Management
------------------
-Setuptools is maintained primarily in Github at `this home
+Setuptools is maintained primarily in GitHub at `this home
<https://github.com/pypa/setuptools>`_. Setuptools is maintained under the
Python Packaging Authority (PyPA) with several core contributors. All bugs
-for Setuptools are filed and the canonical source is maintained in Github.
+for Setuptools are filed and the canonical source is maintained in GitHub.
-User support and discussions are done through the issue tracker (for specific)
-issues, through the distutils-sig mailing list, or on IRC (Freenode) at
-#pypa.
+User support and discussions are done through
+`GitHub Discussions <https://github.com/pypa/setuptools/discussions>`_,
+or the issue tracker (for specific issues).
-Discussions about development happen on the pypa-dev mailing list or on
-`Gitter <https://gitter.im/pypa/setuptools>`_.
+Discussions about development happen on GitHub Discussions or
+the ``setuptools`` channel on `PyPA Discord <https://discord.com/invite/pypa>`_.
-----------------
Authoring Tickets
@@ -44,7 +41,7 @@ describing the motivation behind making changes. First search to see if a
ticket already exists for your issue. If not, create one. Try to think from
the perspective of the reader. Explain what behavior you expected, what you
got instead, and what factors might have contributed to the unexpected
-behavior. In Github, surround a block of code or traceback with the triple
+behavior. In GitHub, surround a block of code or traceback with the triple
backtick "\`\`\`" so that it is formatted nicely.
Filing a ticket provides a forum for justification, discussion, and
@@ -57,37 +54,36 @@ Setuptools makes extensive use of hyperlinks to tickets in the changelog so
that system integrators and other users can get a quick summary, but then
jump to the in-depth discussion about any subject referenced.
------------
-Source Code
------------
-
-Grab the code at Github::
+---------------------
+Making a pull request
+---------------------
- $ git checkout https://github.com/pypa/setuptools
+When making a pull request, please
+:ref:`include a short summary of the changes <Adding change notes
+with your PRs>` and a reference to any issue tickets that the PR is
+intended to solve.
+All PRs with code changes should include tests. All changes should
+include a changelog entry.
-If you want to contribute changes, we recommend you fork the repository on
-Github, commit the changes to your repository, and then make a pull request
-on Github. If you make some changes, don't forget to:
+.. include:: ../../changelog.d/README.rst
-- add a note in CHANGES.rst
-
-Please commit all changes in the 'master' branch against the latest available
-commit or for bug-fixes, against an earlier commit or release in which the
-bug occurred.
+-------------------
+Auto-Merge Requests
+-------------------
-If you find yourself working on more than one issue at a time, Setuptools
-generally prefers Git-style branches, so use Mercurial bookmarks or Git
-branches or multiple forks to maintain separate efforts.
+To support running all code through CI, even lightweight contributions,
+the project employs Mergify to auto-merge pull requests tagged as
+auto-merge.
-The Continuous Integration tests that validate every release are run
-from this repository.
+Use ``hub pull-request -l auto-merge`` to create such a pull request
+from the command line after pushing a new branch.
-------
Testing
-------
-The primary tests are run using tox. To run the tests, first make
-sure you have tox installed, then invoke it::
+The primary tests are run using tox. Make sure you have tox installed,
+and invoke it::
$ tox
@@ -106,10 +102,32 @@ Setuptools follows ``semver``.
Building Documentation
----------------------
-Setuptools relies on the Sphinx system for building documentation.
-To accommodate RTD, docs must be built from the docs/ directory.
+Setuptools relies on the `Sphinx`_ system for building documentation.
+The `published documentation`_ is hosted on Read the Docs.
+
+To build the docs locally, use tox::
+
+ $ tox -e docs
+
+.. _Sphinx: http://www.sphinx-doc.org/en/master/
+.. _published documentation: https://setuptools.pypa.io/en/latest/
+
+---------------------
+Vendored Dependencies
+---------------------
+
+Setuptools has some dependencies, but due to `bootstrapping issues
+<https://github.com/pypa/setuptools/issues/980>`_, those dependencies
+cannot be declared as they won't be resolved soon enough to build
+setuptools from source. Eventually, this limitation may be lifted as
+PEP 517/518 reach ubiquitous adoption, but for now, Setuptools
+cannot declare dependencies other than through
+``setuptools/_vendor/vendored.txt`` and
+``pkg_resources/_vendor/vendored.txt``.
+
+All the dependencies specified in these files are "vendorized" using a
+simple Python script ``tools/vendor.py``.
-To build them, you need to have installed the requirements specified
-in docs/requirements.txt. One way to do this is to use rwt:
+To refresh the dependencies, run the following command::
- setuptools/docs$ python -m rwt -r requirements.txt -- -m sphinx . html
+ $ tox -e vendor
diff --git a/docs/development.txt b/docs/development/index.rst
index 455f038..7ee5236 100644
--- a/docs/development.txt
+++ b/docs/development/index.rst
@@ -7,7 +7,7 @@ Authority (PyPA) and led by Jason R. Coombs.
This document describes the process by which Setuptools is developed.
This document assumes the reader has some passing familiarity with
-*using* setuptools, the ``pkg_resources`` module, and EasyInstall. It
+*using* setuptools, the ``pkg_resources`` module, and pip. It
does not attempt to explain basic concepts like inter-project
dependencies, nor does it contain detailed lexical syntax for most
file formats. Neither does it explain concepts like "namespace
@@ -31,5 +31,4 @@ setuptools changes. You have been warned.
:maxdepth: 1
developer-guide
- formats
releases
diff --git a/docs/releases.txt b/docs/development/releases.rst
index 30ea084..35b415c 100644
--- a/docs/releases.txt
+++ b/docs/development/releases.rst
@@ -3,24 +3,13 @@ Release Process
===============
In order to allow for rapid, predictable releases, Setuptools uses a
-mechanical technique for releases, enacted by Travis following a
-successful build of a tagged release per
-`PyPI deployment <https://docs.travis-ci.com/user/deployment/pypi>`_.
-
-Prior to cutting a release, please check that the CHANGES.rst reflects
-the summary of changes since the last release.
-Ideally, these changelog entries would have been added
-along with the changes, but it's always good to check.
-Think about it from the
-perspective of a user not involved with the development--what would
-that person want to know about what has changed--or from the
-perspective of your future self wanting to know when a particular
-change landed.
-
-To cut a release, install and run ``bump2version {part}`` where ``part``
-is major, minor, or patch based on the scope of the changes in the
-release. Then, push the commits to the master branch. If tests pass,
-the release will be uploaded to PyPI (from the Python 3.6 tests).
+mechanical technique for releases, enacted on tagged commits by
+continuous integration.
+
+To finalize a release, run ``tox -e finalize``, review, then push
+the changes.
+
+If tests pass, the release will be uploaded to PyPI.
Release Frequency
-----------------
diff --git a/docs/history.txt b/docs/history.rst
index 8fd1dc6..ce7e77a 100644
--- a/docs/history.txt
+++ b/docs/history.rst
@@ -5,6 +5,8 @@
History
*******
+.. towncrier-draft-entries:: DRAFT, unreleased as on |today|
+
.. include:: ../CHANGES (links).rst
Credits
@@ -12,7 +14,7 @@ Credits
* The original design for the ``.egg`` format and the ``pkg_resources`` API was
co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
- version of ``pkg_resources``, and supplied the OS X operating system version
+ version of ``pkg_resources``, and supplied the macOS operating system version
compatibility algorithm.
* Ian Bicking implemented many early "creature comfort" features of
@@ -40,7 +42,6 @@ Credits
re-invigorated the community on the project, encouraged renewed innovation,
and addressed many defects.
-* Since the merge with Distribute, Jason R. Coombs is the
- maintainer of setuptools. The project is maintained in coordination with
- the Python Packaging Authority (PyPA) and the larger Python community.
-
+* Jason R. Coombs performed the merge with Distribute, maintaining the
+ project for several years in coordination with the Python Packaging
+ Authority (PyPA).
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..0f52c36
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,31 @@
+.. image:: images/banner-640x320.svg
+ :align: center
+
+Documentation
+=============
+
+Setuptools is a fully-featured, actively-maintained, and stable library
+designed to facilitate packaging Python projects.
+
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+
+ User guide <userguide/index>
+ build_meta
+ pkg_resources
+ references/keywords
+ setuptools
+
+.. toctree::
+ :caption: Project
+ :maxdepth: 1
+ :hidden:
+
+ roadmap
+ Development guide <development/index>
+ Backward compatibility & deprecated practice <deprecated/index>
+ Changelog <history>
+ artwork
+
+.. tidelift-referral-banner::
diff --git a/docs/index.txt b/docs/index.txt
deleted file mode 100644
index 74aabb5..0000000
--- a/docs/index.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-Welcome to Setuptools' documentation!
-=====================================
-
-Setuptools is a fully-featured, actively-maintained, and stable library
-designed to facilitate packaging Python projects, where packaging includes:
-
- - Python package and module definitions
- - Distribution package metadata
- - Test hooks
- - Project installation
- - Platform-specific details
- - Python 3 support
-
-Documentation content:
-
-.. toctree::
- :maxdepth: 2
-
- setuptools
- easy_install
- pkg_resources
- python3
- development
- roadmap
- history
diff --git a/docs/pkg_resources.txt b/docs/pkg_resources.rst
index b40a209..21ff6dc 100644
--- a/docs/pkg_resources.txt
+++ b/docs/pkg_resources.rst
@@ -10,8 +10,12 @@ eggs, support for merging packages that have separately-distributed modules or
subpackages, and APIs for managing Python's current "working set" of active
packages.
-
-.. contents:: **Table of Contents**
+Use of ``pkg_resources`` is discouraged in favor of
+`importlib.resources <https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_,
+`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_,
+and their backports (:pypi:`importlib_resources`,
+:pypi:`importlib_metadata`).
+Please consider using those libraries instead of pkg_resources.
--------
@@ -149,7 +153,7 @@ more information on this.) Also, you must add a ``declare_namespace()`` call
in the package's ``__init__.py`` file(s):
``declare_namespace(name)``
- Declare that the dotted package name `name` is a "namespace package" whose
+ Declare that the dotted package name ``name`` is a "namespace package" whose
contained packages and modules may be spread across multiple distributions.
The named package's ``__path__`` will be extended to include the
corresponding package in all distributions on ``sys.path`` that contain a
@@ -163,7 +167,7 @@ Applications that manipulate namespace packages or directly alter ``sys.path``
at runtime may also need to use this API function:
``fixup_namespace_packages(path_item)``
- Declare that `path_item` is a newly added item on ``sys.path`` that may
+ Declare that ``path_item`` is a newly added item on ``sys.path`` that may
need to be used to update existing namespace packages. Ordinarily, this is
called for you when an egg is automatically added to ``sys.path``, but if
your application modifies ``sys.path`` to include locations that may
@@ -197,7 +201,7 @@ not provide any way to detect arbitrary changes to a list object like
``working_set`` based on changes to ``sys.path``.
``WorkingSet(entries=None)``
- Create a ``WorkingSet`` from an iterable of path entries. If `entries`
+ Create a ``WorkingSet`` from an iterable of path entries. If ``entries``
is not supplied, it defaults to the value of ``sys.path`` at the time
the constructor is called.
@@ -229,9 +233,9 @@ abbreviation for ``pkg_resources.working_set.require()``:
``require(*requirements)``
- Ensure that distributions matching `requirements` are activated
+ Ensure that distributions matching ``requirements`` are activated
- `requirements` must be a string or a (possibly-nested) sequence
+ ``requirements`` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
@@ -245,8 +249,8 @@ abbreviation for ``pkg_resources.working_set.require()``:
interactive interpreter hacking than for production use. If you're creating
an actual library or application, it's strongly recommended that you create
a "setup.py" script using ``setuptools``, and declare all your requirements
- there. That way, tools like EasyInstall can automatically detect what
- requirements your package has, and deal with them accordingly.
+ there. That way, tools like pip can automatically detect what requirements
+ your package has, and deal with them accordingly.
Note that calling ``require('SomePackage')`` will not install
``SomePackage`` if it isn't already present. If you need to do this, you
@@ -259,8 +263,8 @@ abbreviation for ``pkg_resources.working_set.require()``:
``obtain()`` method of ``Environment`` objects.
``run_script(requires, script_name)``
- Locate distribution specified by `requires` and run its `script_name`
- script. `requires` must be a string containing a requirement specifier.
+ Locate distribution specified by ``requires`` and run its ``script_name``
+ script. ``requires`` must be a string containing a requirement specifier.
(See `Requirements Parsing`_ below for the syntax.)
The script, if found, will be executed in *the caller's globals*. That's
@@ -274,11 +278,11 @@ abbreviation for ``pkg_resources.working_set.require()``:
object's `Metadata API`_ instead.
``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`
+ Yield entry point objects from ``group`` matching ``name``
- If `name` is None, yields all entry points in `group` from all
+ If ``name`` is None, yields all entry points in ``group`` from all
distributions in the working set, otherwise only ones matching both
- `group` and `name` are yielded. Entry points are yielded from the active
+ ``group`` and ``name`` are yielded. Entry points are yielded from the active
distributions in the order that the distributions appear in the working
set. (For the global ``working_set``, this should be the same as the order
that they are listed in ``sys.path``.) Note that within the entry points
@@ -301,14 +305,14 @@ instance:
called by the ``WorkingSet()`` constructor during initialization.
This method uses ``find_distributions(entry,True)`` to find distributions
- corresponding to the path entry, and then ``add()`` them. `entry` is
+ corresponding to the path entry, and then ``add()`` them. ``entry`` is
always appended to the ``entries`` attribute, even if it is already
present, however. (This is because ``sys.path`` can contain the same value
more than once, and the ``entries`` attribute should be able to reflect
this.)
``__contains__(dist)``
- True if `dist` is active in this ``WorkingSet``. Note that only one
+ True if ``dist`` is active in this ``WorkingSet``. Note that only one
distribution for a given project can be active in a given ``WorkingSet``.
``__iter__()``
@@ -317,34 +321,34 @@ instance:
added to the working set.
``find(req)``
- Find a distribution matching `req` (a ``Requirement`` instance).
+ Find a distribution matching ``req`` (a ``Requirement`` instance).
If there is an active distribution for the requested project, this
returns it, as long as it meets the version requirement specified by
- `req`. But, if there is an active distribution for the project and it
- does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ ``req``. But, if there is an active distribution for the project and it
+ does *not* meet the ``req`` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
``resolve(requirements, env=None, installer=None)``
- List all distributions needed to (recursively) meet `requirements`
+ List all distributions needed to (recursively) meet ``requirements``
- `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ ``requirements`` must be a sequence of ``Requirement`` objects. ``env``,
if supplied, should be an ``Environment`` instance. If
not supplied, an ``Environment`` is created from the working set's
- ``entries``. `installer`, if supplied, will be invoked with each
+ ``entries``. ``installer``, if supplied, will be invoked with each
requirement that cannot be met by an already-installed distribution; it
should return a ``Distribution`` or ``None``. (See the ``obtain()`` method
- of `Environment Objects`_, below, for more information on the `installer`
+ of `Environment Objects`_, below, for more information on the ``installer``
argument.)
``add(dist, entry=None)``
- Add `dist` to working set, associated with `entry`
+ Add ``dist`` to working set, associated with ``entry``
- If `entry` is unspecified, it defaults to ``dist.location``. On exit from
- this routine, `entry` is added to the end of the working set's ``.entries``
+ If ``entry`` is unspecified, it defaults to ``dist.location``. On exit from
+ this routine, ``entry`` is added to the end of the working set's ``.entries``
(if it wasn't already present).
- `dist` is only added to the working set if it's for a project that
+ ``dist`` is only added to the working set if it's for a project that
doesn't already have a distribution active in the set. If it's
successfully added, any callbacks registered with the ``subscribe()``
method will be called. (See `Receiving Change Notifications`_, below.)
@@ -401,7 +405,7 @@ environment for the newest version of each project that can be safely loaded
without conflicts or missing requirements.
``find_plugins(plugin_env, full_env=None, fallback=True)``
- Scan `plugin_env` and identify which distributions could be added to this
+ Scan ``plugin_env`` and identify which distributions could be added to this
working set without version conflicts or missing requirements.
Example usage::
@@ -412,19 +416,19 @@ without conflicts or missing requirements.
map(working_set.add, distributions) # add plugins+libs to sys.path
print "Couldn't load", errors # display errors
- The `plugin_env` should be an ``Environment`` instance that contains only
+ The ``plugin_env`` should be an ``Environment`` instance that contains only
distributions that are in the project's "plugin directory" or directories.
- The `full_env`, if supplied, should be an ``Environment`` instance that
+ The ``full_env``, if supplied, should be an ``Environment`` instance that
contains all currently-available distributions.
- If `full_env` is not supplied, one is created automatically from the
+ If ``full_env`` is not supplied, one is created automatically from the
``WorkingSet`` this method is called on, which will typically mean that
every directory on ``sys.path`` will be scanned for distributions.
- This method returns a 2-tuple: (`distributions`, `error_info`), where
- `distributions` is a list of the distributions found in `plugin_env` that
+ This method returns a 2-tuple: (``distributions``, ``error_info``), where
+ ``distributions`` is a list of the distributions found in ``plugin_env`` that
were loadable, along with any other distributions that are needed to resolve
- their dependencies. `error_info` is a dictionary mapping unloadable plugin
+ their dependencies. ``error_info`` is a dictionary mapping unloadable plugin
distributions to an exception instance describing the error that occurred.
Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
instance.
@@ -436,7 +440,7 @@ without conflicts or missing requirements.
metadata tracking and hooks to be activated.
The resolution algorithm used by ``find_plugins()`` is as follows. First,
- the project names of the distributions present in `plugin_env` are sorted.
+ the project names of the distributions present in ``plugin_env`` are sorted.
Then, each project's eggs are tried in descending version order (i.e.,
newest version first).
@@ -446,7 +450,7 @@ without conflicts or missing requirements.
the next project name, and no older eggs for that project are tried.
If the resolution attempt fails, however, the error is added to the error
- dictionary. If the `fallback` flag is true, the next older version of the
+ dictionary. If the ``fallback`` flag is true, the next older version of the
plugin is tried, until a working version is found. If false, the resolution
process continues with the next plugin project name.
@@ -455,7 +459,7 @@ without conflicts or missing requirements.
may not be able to safely downgrade a version of a package. Others may want
to ensure that a new plugin configuration is either 100% good or else
revert to a known-good configuration. (That is, they may wish to revert to
- a known configuration if the `error_info` return value is non-empty.)
+ a known configuration if the ``error_info`` return value is non-empty.)
Note that this algorithm gives precedence to satisfying the dependencies of
alphabetically prior project names in case of version conflicts. If two
@@ -473,22 +477,22 @@ that are present and potentially importable on the current platform.
distributions during dependency resolution.
``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
- Create an environment snapshot by scanning `search_path` for distributions
- compatible with `platform` and `python`. `search_path` should be a
+ Create an environment snapshot by scanning ``search_path`` for distributions
+ compatible with ``platform`` and ``python``. ``search_path`` should be a
sequence of strings such as might be used on ``sys.path``. If a
- `search_path` isn't supplied, ``sys.path`` is used.
+ ``search_path`` isn't supplied, ``sys.path`` is used.
- `platform` is an optional string specifying the name of the platform
+ ``platform`` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
- unspecified, it defaults to the current platform. `python` is an
+ unspecified, it defaults to the current platform. ``python`` is an
optional string naming the desired version of Python (e.g. ``'2.4'``);
it defaults to the currently-running version.
- You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ You may explicitly set ``platform`` (and/or ``python``) to ``None`` if you
wish to include *all* distributions, not just those compatible with the
running platform or Python version.
- Note that `search_path` is scanned immediately for distributions, and the
+ Note that ``search_path`` is scanned immediately for distributions, and the
resulting ``Environment`` is a snapshot of the found distributions. It
is not automatically updated if the system's state changes due to e.g.
installation or removal of distributions.
@@ -504,15 +508,15 @@ distributions during dependency resolution.
The yielded names are always in lower case.
``add(dist)``
- Add `dist` to the environment if it matches the platform and python version
+ Add ``dist`` to the environment if it matches the platform and python version
specified at creation time, and only if the distribution hasn't already
been added. (i.e., adding the same distribution more than once is a no-op.)
``remove(dist)``
- Remove `dist` from the environment.
+ Remove ``dist`` from the environment.
``can_add(dist)``
- Is distribution `dist` acceptable for this environment? If it's not
+ Is distribution ``dist`` acceptable for this environment? If it's not
compatible with the ``platform`` and ``python`` version values specified
when the environment was created, a false value is returned.
@@ -534,34 +538,34 @@ distributions during dependency resolution.
are silently ignored.
``best_match(req, working_set, installer=None)``
- Find distribution best matching `req` and usable on `working_set`
+ Find distribution best matching ``req`` and usable on ``working_set``
- This calls the ``find(req)`` method of the `working_set` to see if a
+ This calls the ``find(req)`` method of the ``working_set`` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
- active in the specified `working_set`.) If a suitable distribution isn't
+ active in the specified ``working_set``.) If a suitable distribution isn't
active, this method returns the newest distribution in the environment
- that meets the ``Requirement`` in `req`. If no suitable distribution is
- found, and `installer` is supplied, then the result of calling
+ that meets the ``Requirement`` in ``req``. If no suitable distribution is
+ found, and ``installer`` is supplied, then the result of calling
the environment's ``obtain(req, installer)`` method will be returned.
``obtain(requirement, installer=None)``
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
- ``installer(requirement)``, unless `installer` is None, in which case
+ ``installer(requirement)``, unless ``installer`` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
- to the `installer` argument.
+ to the ``installer`` argument.
``scan(search_path=None)``
- Scan `search_path` for distributions usable on `platform`
+ Scan ``search_path`` for distributions usable on ``platform``
- Any distributions found are added to the environment. `search_path` should
+ Any distributions found are added to the environment. ``search_path`` should
be a sequence of strings such as might be used on ``sys.path``. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added. This
method is a shortcut for using the ``find_distributions()`` function to
- find the distributions from each item in `search_path`, and then calling
+ find the distributions from each item in ``search_path``, and then calling
``add()`` to add each one to the environment.
@@ -594,7 +598,7 @@ Requirements Parsing
FooProject >= 1.2
Fizzy [foo, bar]
- PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
+ PickyThing>1.6,<=1.9,!=1.8.6
SomethingWhoseVersionIDontCareAbout
SomethingWithMarker[foo]>1.0;python_version<"2.7"
@@ -611,9 +615,9 @@ Requirements Parsing
or activation of both Report-O-Rama and any libraries it needs in order to
provide PDF support. For example, you could use::
- easy_install.py Report-O-Rama[PDF]
+ pip install Report-O-Rama[PDF]
- To install the necessary packages using the EasyInstall program, or call
+ To install the necessary packages using pip, or call
``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary
distributions to sys.path at runtime.
@@ -627,10 +631,10 @@ Requirements Parsing
--------------------------------------
``__contains__(dist_or_version)``
- Return true if `dist_or_version` fits the criteria for this requirement.
- If `dist_or_version` is a ``Distribution`` object, its project name must
+ Return true if ``dist_or_version`` fits the criteria for this requirement.
+ If ``dist_or_version`` is a ``Distribution`` object, its project name must
match the requirement's project name, and its version must meet the
- requirement's version criteria. If `dist_or_version` is a string, it is
+ requirement's version criteria. If ``dist_or_version`` is a string, it is
parsed using the ``parse_version()`` utility function. Otherwise, it is
assumed to be an already-parsed version.
@@ -668,8 +672,8 @@ Requirements Parsing
``specs``
A list of ``(op,version)`` tuples, sorted in ascending parsed-version
- order. The `op` in each tuple is a comparison operator, represented as
- a string. The `version` is the (unparsed) version number.
+ order. The ``op`` in each tuple is a comparison operator, represented as
+ a string. The ``version`` is the (unparsed) version number.
``marker``
An instance of ``packaging.markers.Marker`` that allows evaluation
@@ -703,7 +707,7 @@ entry point group and look for entry points named "pre_process" and
To advertise an entry point, a project needs to use ``setuptools`` and provide
an ``entry_points`` argument to ``setup()`` in its setup script, so that the
entry points will be included in the distribution's metadata. For more
-details, see the ``setuptools`` documentation. (XXX link here to setuptools)
+details, see :ref:`Advertising Behavior<dynamic discovery of services and plugins>`.
Each project distribution can advertise at most one entry point of a given
name within the same entry point group. For example, a distutils extension
@@ -721,14 +725,14 @@ in sys.path order, etc.
Convenience API
---------------
-In the following functions, the `dist` argument can be a ``Distribution``
+In the following functions, the ``dist`` argument can be a ``Distribution``
instance, a ``Requirement`` instance, or a string specifying a requirement
(i.e. project name, version, etc.). If the argument is a string or
``Requirement``, the specified distribution is located (and added to sys.path
if not already present). An error will be raised if a matching distribution is
not available.
-The `group` argument should be a string containing a dotted identifier,
+The ``group`` argument should be a string containing a dotted identifier,
identifying an entry point group. If you are defining an entry point group,
you should include some portion of your package's name in the group name so as
to avoid collision with other packages' entry point groups.
@@ -738,25 +742,25 @@ to avoid collision with other packages' entry point groups.
``ImportError``.
``get_entry_info(dist, group, name)``
- Return an ``EntryPoint`` object for the given `group` and `name` from
+ Return an ``EntryPoint`` object for the given ``group`` and ``name`` from
the specified distribution. Returns ``None`` if the distribution has not
advertised a matching entry point.
``get_entry_map(dist, group=None)``
- Return the distribution's entry point map for `group`, or the full entry
+ Return the distribution's entry point map for ``group``, or the full entry
map for the distribution. This function always returns a dictionary,
- even if the distribution advertises no entry points. If `group` is given,
+ even if the distribution advertises no entry points. If ``group`` is given,
the dictionary maps entry point names to the corresponding ``EntryPoint``
- object. If `group` is None, the dictionary maps group names to
+ object. If ``group`` is None, the dictionary maps group names to
dictionaries that then map entry point names to the corresponding
``EntryPoint`` instance in that group.
``iter_entry_points(group, name=None)``
- Yield entry point objects from `group` matching `name`.
+ Yield entry point objects from ``group`` matching ``name``.
- If `name` is None, yields all entry points in `group` from all
+ If ``name`` is None, yields all entry points in ``group`` from all
distributions in the working set on sys.path, otherwise only ones matching
- both `group` and `name` are yielded. Entry points are yielded from
+ both ``group`` and ``name`` are yielded. Entry points are yielded from
the active distributions in the order that the distributions appear on
sys.path. (Within entry points for a particular distribution, however,
there is no particular ordering.)
@@ -769,26 +773,26 @@ Creating and Parsing
--------------------
``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
- Create an ``EntryPoint`` instance. `name` is the entry point name. The
- `module_name` is the (dotted) name of the module containing the advertised
- object. `attrs` is an optional tuple of names to look up from the
- module to obtain the advertised object. For example, an `attrs` of
- ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
+ Create an ``EntryPoint`` instance. ``name`` is the entry point name. The
+ ``module_name`` is the (dotted) name of the module containing the advertised
+ object. ``attrs`` is an optional tuple of names to look up from the
+ module to obtain the advertised object. For example, an ``attrs`` of
+ ``("foo","bar")`` and a ``module_name`` of ``"baz"`` would mean that the
advertised object could be obtained by the following code::
import baz
advertised_object = baz.foo.bar
- The `extras` are an optional tuple of "extra feature" names that the
+ The ``extras`` are an optional tuple of "extra feature" names that the
distribution needs in order to provide this entry point. When the
- entry point is loaded, these extra features are looked up in the `dist`
+ entry point is loaded, these extra features are looked up in the ``dist``
argument to find out what other distributions may need to be activated
- on sys.path; see the ``load()`` method for more details. The `extras`
- argument is only meaningful if `dist` is specified. `dist` must be
+ on sys.path; see the ``load()`` method for more details. The ``extras``
+ argument is only meaningful if ``dist`` is specified. ``dist`` must be
a ``Distribution`` instance.
``EntryPoint.parse(src, dist=None)`` (classmethod)
- Parse a single entry point from string `src`
+ Parse a single entry point from string ``src``
Entry point syntax follows the form::
@@ -796,27 +800,27 @@ Creating and Parsing
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional, as is the whitespace shown between
- some of the items. The `dist` argument is passed through to the
+ some of the items. The ``dist`` argument is passed through to the
``EntryPoint()`` constructor, along with the other values parsed from
- `src`.
+ ``src``.
``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
- Parse `lines` (a string or sequence of lines) to create a dictionary
+ Parse ``lines`` (a string or sequence of lines) to create a dictionary
mapping entry point names to ``EntryPoint`` objects. ``ValueError`` is
- raised if entry point names are duplicated, if `group` is not a valid
+ raised if entry point names are duplicated, if ``group`` is not a valid
entry point group name, or if there are any syntax errors. (Note: the
- `group` parameter is used only for validation and to create more
- informative error messages.) If `dist` is provided, it will be used to
+ ``group`` parameter is used only for validation and to create more
+ informative error messages.) If ``dist`` is provided, it will be used to
set the ``dist`` attribute of the created ``EntryPoint`` objects.
``EntryPoint.parse_map(data, dist=None)`` (classmethod)
- Parse `data` into a dictionary mapping group names to dictionaries mapping
- entry point names to ``EntryPoint`` objects. If `data` is a dictionary,
+ Parse ``data`` into a dictionary mapping group names to dictionaries mapping
+ entry point names to ``EntryPoint`` objects. If ``data`` is a dictionary,
then the keys are used as group names and the values are passed to
- ``parse_group()`` as the `lines` argument. If `data` is a string or
+ ``parse_group()`` as the ``lines`` argument. If ``data`` is a string or
sequence of lines, it is first split into .ini-style sections (using
the ``split_sections()`` utility function) and the section names are used
- as group names. In either case, the `dist` argument is passed through to
+ as group names. In either case, the ``dist`` argument is passed through to
``parse_group()`` so that the entry points will be linked to the specified
distribution.
@@ -837,9 +841,9 @@ addition, the following methods are provided:
Ensure that any "extras" needed by the entry point are available on
sys.path. ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
but no ``dist``, or if the named extras are not defined by the
- distribution. If `env` is supplied, it must be an ``Environment``, and it
+ distribution. If ``env`` is supplied, it must be an ``Environment``, and it
will be used to search for needed distributions if they are not already
- present on sys.path. If `installer` is supplied, it must be a callable
+ present on sys.path. If ``installer`` is supplied, it must be a callable
taking a ``Requirement`` instance and returning a matching importable
``Distribution`` instance or None.
@@ -872,16 +876,16 @@ available distributions, respectively.) You can also obtain ``Distribution``
objects from one of these high-level APIs:
``find_distributions(path_item, only=False)``
- Yield distributions accessible via `path_item`. If `only` is true, yield
- only distributions whose ``location`` is equal to `path_item`. In other
- words, if `only` is true, this yields any distributions that would be
- importable if `path_item` were on ``sys.path``. If `only` is false, this
- also yields distributions that are "in" or "under" `path_item`, but would
+ Yield distributions accessible via ``path_item``. If ``only`` is true, yield
+ only distributions whose ``location`` is equal to ``path_item``. In other
+ words, if ``only`` is true, this yields any distributions that would be
+ importable if ``path_item`` were on ``sys.path``. If ``only`` is false, this
+ also yields distributions that are "in" or "under" ``path_item``, but would
not be importable unless their locations were also added to ``sys.path``.
``get_distribution(dist_spec)``
Return a ``Distribution`` object for a given ``Requirement`` or string.
- If `dist_spec` is already a ``Distribution`` instance, it is returned.
+ If ``dist_spec`` is already a ``Distribution`` instance, it is returned.
If it is a ``Requirement`` object or a string that can be parsed into one,
it is used to locate and activate a matching distribution, which is then
returned.
@@ -890,18 +894,18 @@ However, if you're creating specialized tools for working with distributions,
or creating a new distribution format, you may also need to create
``Distribution`` objects directly, using one of the three constructors below.
-These constructors all take an optional `metadata` argument, which is used to
-access any resources or metadata associated with the distribution. `metadata`
+These constructors all take an optional ``metadata`` argument, which is used to
+access any resources or metadata associated with the distribution. ``metadata``
must be an object that implements the ``IResourceProvider`` interface, or None.
If it is None, an ``EmptyProvider`` is used instead. ``Distribution`` objects
implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
-delegating them to the `metadata` object.
+delegating them to the ``metadata`` object.
``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
- Create a distribution for `location`, which must be a string such as a
+ Create a distribution for ``location``, which must be a string such as a
URL, filename, or other string that might be used on ``sys.path``.
- `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
- If `basename` ends with ``.egg``, then the project's name, version, python
+ ``basename`` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
+ If ``basename`` ends with ``.egg``, then the project's name, version, python
version and platform are extracted from the filename and used to set those
properties of the created distribution. Any additional keyword arguments
are forwarded to the ``Distribution()`` constructor.
@@ -917,8 +921,8 @@ delegating them to the `metadata` object.
``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
Create a distribution by setting its properties. All arguments are
- optional and default to None, except for `py_version` (which defaults to
- the current Python version) and `precedence` (which defaults to
+ optional and default to None, except for ``py_version`` (which defaults to
+ the current Python version) and ``precedence`` (which defaults to
``EGG_DIST``; for more details see ``precedence`` under `Distribution
Attributes`_ below). Note that it's usually easier to use the
``from_filename()`` or ``from_location()`` constructors than to specify
@@ -938,7 +942,7 @@ project_name
A string, naming the project that this distribution is for. Project names
are defined by a project's setup script, and they are used to identify
projects on PyPI. When a ``Distribution`` is constructed, the
- `project_name` argument is passed through the ``safe_name()`` utility
+ ``project_name`` argument is passed through the ``safe_name()`` utility
function to filter out any unacceptable characters.
key
@@ -952,9 +956,9 @@ extras
version
A string denoting what release of the project this distribution contains.
- When a ``Distribution`` is constructed, the `version` argument is passed
+ When a ``Distribution`` is constructed, the ``version`` argument is passed
through the ``safe_version()`` utility function to filter out any
- unacceptable characters. If no `version` is specified at construction
+ unacceptable characters. If no ``version`` is specified at construction
time, then attempting to access this attribute later will cause the
``Distribution`` to try to discover its version by reading its ``PKG-INFO``
metadata file. If ``PKG-INFO`` is unavailable or can't be parsed,
@@ -967,7 +971,7 @@ parsed_version
distributions by version. (See the `Parsing Utilities`_ section below for
more information on the ``parse_version()`` function.) Note that accessing
``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
- was constructed without a `version` and without `metadata` capable of
+ was constructed without a ``version`` and without ``metadata`` capable of
supplying the missing version info.
py_version
@@ -998,9 +1002,9 @@ precedence
------------------------
``activate(path=None)``
- Ensure distribution is importable on `path`. If `path` is None,
+ Ensure distribution is importable on ``path``. If ``path`` is None,
``sys.path`` is used instead. This ensures that the distribution's
- ``location`` is in the `path` list, and it also performs any necessary
+ ``location`` is in the ``path`` list, and it also performs any necessary
namespace package fixups or declarations. (That is, if the distribution
contains namespace packages, this method ensures that they are declared,
and that the distribution's contents for those namespace packages are
@@ -1020,7 +1024,7 @@ precedence
``requires(extras=())``
List the ``Requirement`` objects that specify this distribution's
- dependencies. If `extras` is specified, it should be a sequence of names
+ dependencies. If ``extras`` is specified, it should be a sequence of names
of "extras" defined by the distribution, and the list returned will then
include any dependencies needed to support the named "extras".
@@ -1047,11 +1051,11 @@ by the distribution. See the section above on `Entry Points`_ for more
detailed information about these operations:
``get_entry_info(group, name)``
- Return the ``EntryPoint`` object for `group` and `name`, or None if no
+ Return the ``EntryPoint`` object for ``group`` and ``name``, or None if no
such point is advertised by this distribution.
``get_entry_map(group=None)``
- Return the entry point map for `group`. If `group` is None, return
+ Return the entry point map for ``group``. If ``group`` is None, return
a dictionary mapping group names to entry point maps for all groups.
(An entry point map is a dictionary of entry point names to ``EntryPoint``
objects.)
@@ -1079,14 +1083,15 @@ documented in later sections):
* ``resource_isdir(resource_name)``
* ``resource_listdir(resource_name)``
-If the distribution was created with a `metadata` argument, these resource and
-metadata access methods are all delegated to that `metadata` provider.
+If the distribution was created with a ``metadata`` argument, these resource and
+metadata access methods are all delegated to that ``metadata`` provider.
Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
will appear to have no resources or metadata. This delegation approach is used
so that supporting custom importers or new distribution formats can be done
simply by creating an appropriate `IResourceProvider`_ implementation; see the
section below on `Supporting Custom Importers`_ for more details.
+.. _ResourceManager API:
``ResourceManager`` API
=======================
@@ -1111,11 +1116,11 @@ Thus, you can use the APIs below without needing an explicit
Basic Resource Access
---------------------
-In the following methods, the `package_or_requirement` argument may be either
+In the following methods, the ``package_or_requirement`` argument may be either
a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
If it is a package or module name, the named module or package must be
importable (i.e., be in a distribution or directory on ``sys.path``), and the
-`resource_name` argument is interpreted relative to the named package. (Note
+``resource_name`` argument is interpreted relative to the named package. (Note
that if a module name is used, then the resource name is relative to the
package immediately containing the named module. Also, you should not use use
a namespace package name, because a namespace package can be spread across
@@ -1126,13 +1131,13 @@ If it is a ``Requirement``, then the requirement is automatically resolved
(searching the current ``Environment`` if necessary) and a matching
distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
already present. (Unless the ``Requirement`` can't be satisfied, in which
-case an exception is raised.) The `resource_name` argument is then interpreted
+case an exception is raised.) The ``resource_name`` argument is then interpreted
relative to the root of the identified distribution; i.e. its first path
segment will be treated as a peer of the top-level modules or packages in the
distribution.
-Note that resource names must be ``/``-separated paths and cannot be absolute
-(i.e. no leading ``/``) or contain relative names like ``".."``. Do *not* use
+Note that resource names must be ``/``-separated paths rooted at the package,
+cannot contain relative names like ``".."``, and cannot be absolute. Do *not* use
``os.path`` routines to manipulate resource paths, as they are *not* filesystem
paths.
@@ -1146,7 +1151,7 @@ paths.
will be read as-is.
``resource_string(package_or_requirement, resource_name)``
- Return the specified resource as a string. The resource is read in
+ Return the specified resource as ``bytes``. The resource is read in
binary fashion, such that the returned string contains exactly the bytes
that are stored in the resource.
@@ -1221,19 +1226,19 @@ Resource Extraction
If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider``
for a new distribution archive format, you may need to use the following
-``IResourceManager`` methods to co-ordinate extraction of resources to the
+``IResourceManager`` methods to coordinate extraction of resources to the
filesystem. If you're not implementing an archive format, however, you have
no need to use these methods. Unlike the other methods listed above, they are
*not* available as top-level functions tied to the global ``ResourceManager``;
you must therefore have an explicit ``ResourceManager`` instance to use them.
``get_cache_path(archive_name, names=())``
- Return absolute location in cache for `archive_name` and `names`
+ Return absolute location in cache for ``archive_name`` and ``names``
The parent directory of the resulting path will be created if it does
- not already exist. `archive_name` should be the base filename of the
+ not already exist. ``archive_name`` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
- including its ".egg" extension. `names`, if provided, should be a
+ including its ".egg" extension. ``names``, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
@@ -1249,12 +1254,12 @@ you must therefore have an explicit ``ResourceManager`` instance to use them.
wrap or handle extraction errors themselves.
``postprocess(tempname, filename)``
- Perform any platform-specific postprocessing of `tempname`.
+ Perform any platform-specific postprocessing of ``tempname``.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
- `tempname` is the current (temporary) name of the file, and `filename`
+ ``tempname`` is the current (temporary) name of the file, and ``filename``
is the name it will be renamed to by the caller after this routine
returns.
@@ -1322,7 +1327,7 @@ implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
``run_script(script_name, namespace)``
Execute the named script in the supplied namespace dictionary. Raises
``ResolutionError`` if there is no script by that name in the ``scripts``
- metadata directory. `namespace` should be a Python dictionary, usually
+ metadata directory. ``namespace`` should be a Python dictionary, usually
a module dictionary if the script is being run as a module.
@@ -1379,11 +1384,11 @@ with other (PEP 302-compatible) importers or module loaders, you may need to
register various handlers and support functions using these APIs:
``register_finder(importer_type, distribution_finder)``
- Register `distribution_finder` to find distributions in ``sys.path`` items.
- `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
- item handler), and `distribution_finder` is a callable that, when passed a
- path item, the importer instance, and an `only` flag, yields
- ``Distribution`` instances found under that path item. (The `only` flag,
+ Register ``distribution_finder`` to find distributions in ``sys.path`` items.
+ ``importer_type`` is the type or class of a PEP 302 "Importer" (``sys.path``
+ item handler), and ``distribution_finder`` is a callable that, when passed a
+ path item, the importer instance, and an ``only`` flag, yields
+ ``Distribution`` instances found under that path item. (The ``only`` flag,
if true, means the finder should yield only ``Distribution`` objects whose
``location`` is equal to the path item provided.)
@@ -1391,16 +1396,16 @@ register various handlers and support functions using these APIs:
example finder function.
``register_loader_type(loader_type, provider_factory)``
- Register `provider_factory` to make ``IResourceProvider`` objects for
- `loader_type`. `loader_type` is the type or class of a PEP 302
- ``module.__loader__``, and `provider_factory` is a function that, when
+ Register ``provider_factory`` to make ``IResourceProvider`` objects for
+ ``loader_type``. ``loader_type`` is the type or class of a PEP 302
+ ``module.__loader__``, and ``provider_factory`` is a function that, when
passed a module object, returns an `IResourceProvider`_ for that module,
allowing it to be used with the `ResourceManager API`_.
``register_namespace_handler(importer_type, namespace_handler)``
- Register `namespace_handler` to declare namespace packages for the given
- `importer_type`. `importer_type` is the type or class of a PEP 302
- "importer" (sys.path item handler), and `namespace_handler` is a callable
+ Register ``namespace_handler`` to declare namespace packages for the given
+ ``importer_type``. ``importer_type`` is the type or class of a PEP 302
+ "importer" (sys.path item handler), and ``namespace_handler`` is a callable
with a signature like this::
def namespace_handler(importer, path_entry, moduleName, module):
@@ -1420,23 +1425,23 @@ IResourceProvider
-----------------
``IResourceProvider`` is an abstract class that documents what methods are
-required of objects returned by a `provider_factory` registered with
+required of objects returned by a ``provider_factory`` registered with
``register_loader_type()``. ``IResourceProvider`` is a subclass of
``IMetadataProvider``, so objects that implement this interface must also
implement all of the `IMetadataProvider Methods`_ as well as the methods
-shown here. The `manager` argument to the methods below must be an object
+shown here. The ``manager`` argument to the methods below must be an object
that supports the full `ResourceManager API`_ documented above.
``get_resource_filename(manager, resource_name)``
- Return a true filesystem path for `resource_name`, coordinating the
- extraction with `manager`, if the resource must be unpacked to the
+ Return a true filesystem path for ``resource_name``, coordinating the
+ extraction with ``manager``, if the resource must be unpacked to the
filesystem.
``get_resource_stream(manager, resource_name)``
- Return a readable file-like object for `resource_name`.
+ Return a readable file-like object for ``resource_name``.
``get_resource_string(manager, resource_name)``
- Return a string containing the contents of `resource_name`.
+ Return a string containing the contents of ``resource_name``.
``has_resource(resource_name)``
Does the package contain the named resource?
@@ -1500,15 +1505,15 @@ where appropriate. Their inheritance tree looks like this::
``PathMetadata(path, egg_info)``
Create an ``IResourceProvider`` for a filesystem-based distribution, where
- `path` is the filesystem location of the importable modules, and `egg_info`
+ ``path`` is the filesystem location of the importable modules, and ``egg_info``
is the filesystem location of the distribution's metadata directory.
- `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
- "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
+ ``egg_info`` should usually be the ``EGG-INFO`` subdirectory of ``path`` for an
+ "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of ``path`` for
a "development egg". However, other uses are possible for custom purposes.
``EggMetadata(zipimporter)``
Create an ``IResourceProvider`` for a zipfile-based distribution. The
- `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
+ ``zipimporter`` should be a ``zipimport.zipimporter`` instance, and may
represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
a specific egg *within* a basket, or a zipfile egg (where the zipfile
itself is a ".egg"). It can also be a combination, such as a zipfile egg
@@ -1546,12 +1551,12 @@ Parsing Utilities
``yield_lines(strs)``
Yield non-empty/non-comment lines from a string/unicode or a possibly-
- nested sequence thereof. If `strs` is an instance of ``basestring``, it
+ nested sequence thereof. If ``strs`` is an instance of ``basestring``, it
is split into lines, and each non-blank, non-comment line is yielded after
stripping leading and trailing whitespace. (Lines whose first non-blank
character is ``#`` are considered comment lines.)
- If `strs` is not an instance of ``basestring``, it is iterated over, and
+ If ``strs`` is not an instance of ``basestring``, it is iterated over, and
each item is passed recursively to ``yield_lines()``, so that an arbitrarily
nested sequence of strings, or sequences of sequences of strings can be
flattened out to the lines contained therein. So for example, passing
@@ -1595,12 +1600,12 @@ Parsing Utilities
See ``to_filename()``.
``safe_version(version)``
- This will return the normalized form of any PEP 440 version, if the version
- string is not PEP 440 compatible than it is similar to ``safe_name()``
- except that spaces in the input become dots, and dots are allowed to exist
- in the output. As with ``safe_name()``, if you are generating a filename
- from this you should replace any "-" characters in the output with
- underscores.
+ This will return the normalized form of any PEP 440 version. If the version
+ string is not PEP 440 compatible, this function behaves similar to
+ ``safe_name()`` except that spaces in the input become dots, and dots are
+ allowed to exist in the output. As with ``safe_name()``, if you are
+ generating a filename from this you should replace any "-" characters in
+ the output with underscores.
``safe_extra(extra)``
Return a "safe" form of an extra's name, suitable for use in a requirement
@@ -1620,7 +1625,7 @@ Platform Utilities
``get_build_platform()``
Return this platform's identifier string. For Windows, the return value
- is ``"win32"``, and for Mac OS X it is a string of the form
+ is ``"win32"``, and for macOS it is a string of the form
``"macosx-10.4-ppc"``. All other platforms return the same uname-based
string that the ``distutils.util.get_platform()`` function returns.
This string is the minimum platform version required by distributions built
@@ -1635,15 +1640,15 @@ Platform Utilities
``compatible_platforms()`` function.
``compatible_platforms(provided, required)``
- Return true if a distribution built on the `provided` platform may be used
- on the `required` platform. If either platform value is ``None``, it is
+ Return true if a distribution built on the ``provided`` platform may be used
+ on the ``required`` platform. If either platform value is ``None``, it is
considered a wildcard, and the platforms are therefore compatible.
Likewise, if the platform strings are equal, they're also considered
compatible, and ``True`` is returned. Currently, the only non-equal
- platform strings that are considered compatible are Mac OS X platform
+ platform strings that are considered compatible are macOS platform
strings with the same hardware type (e.g. ``ppc``) and major version
- (e.g. ``10``) with the `provided` platform's minor version being less than
- or equal to the `required` platform's minor version.
+ (e.g. ``10``) with the ``provided`` platform's minor version being less than
+ or equal to the ``required`` platform's minor version.
``get_default_cache()``
Determine the default cache location for extracting resources from zipped
@@ -1665,15 +1670,15 @@ File/Path Utilities
-------------------
``ensure_directory(path)``
- Ensure that the parent directory (``os.path.dirname``) of `path` actually
+ Ensure that the parent directory (``os.path.dirname``) of ``path`` actually
exists, using ``os.makedirs()`` if necessary.
``normalize_path(path)``
- Return a "normalized" version of `path`, such that two paths represent
+ Return a "normalized" version of ``path``, such that two paths represent
the same filesystem location if they have equal ``normalized_path()``
values. Specifically, this is a shortcut for calling ``os.path.realpath``
- and ``os.path.normcase`` on `path`. Unfortunately, on certain platforms
- (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately
+ and ``os.path.normcase`` on ``path``. Unfortunately, on certain platforms
+ (notably Cygwin and macOS) the ``normcase`` function does not accurately
reflect the platform's case-sensitivity, so there is always the possibility
of two apparently-different paths being equal on such platforms.
@@ -1842,9 +1847,9 @@ History
because it isn't necessarily a filesystem path (and hasn't been for some
time now). The ``location`` of ``Distribution`` objects in the filesystem
should always be normalized using ``pkg_resources.normalize_path()``; all
- of the setuptools and EasyInstall code that generates distributions from
- the filesystem (including ``Distribution.from_filename()``) ensure this
- invariant, but if you use a more generic API like ``Distribution()`` or
+ of the setuptools' code that generates distributions from the filesystem
+ (including ``Distribution.from_filename()``) ensure this invariant, but if
+ you use a more generic API like ``Distribution()`` or
``Distribution.from_location()`` you should take care that you don't
create a distribution with an un-normalized filesystem path.
@@ -1938,4 +1943,3 @@ History
0.3a1
* Initial release.
-
diff --git a/docs/python 2 sunset.rst b/docs/python 2 sunset.rst
new file mode 100644
index 0000000..225d655
--- /dev/null
+++ b/docs/python 2 sunset.rst
@@ -0,0 +1,69 @@
+:orphan:
+
+Python 2 Sunset
+===============
+
+Since January 2020 and the release of Setuptools 45, Python 2 is no longer
+supported by the most current release (`discussion
+<https://github.com/pypa/setuptools/issues/1458>`_). Setuptools as a project
+continues to support Python 2 with bugfixes and important features on
+Setuptools 44.x.
+
+By design, most users will be unaffected by this change. That's because
+Setuptools 45 declares its supported Python versions to exclude Python 2.7,
+and installers such as pip 9 or later will honor this declaration and prevent
+installation of Setuptools 45 or later in Python 2 environments.
+
+Users that do import any portion of Setuptools 45 or later on Python 2 are
+directed to this documentation to provide guidance on how to work around the
+issues.
+
+Workarounds
+-----------
+
+The best recommendation is to avoid Python 2 and move to Python 3 where
+possible. This project acknowledges that not all environments can drop Python
+2 support, so provides other options.
+
+In less common scenarios, later versions of Setuptools can be installed on
+unsupported Python versions. In these environments, the installer is advised
+to first install ``setuptools<45`` to "pin Setuptools" to a compatible
+version.
+
+- When using older versions of pip (before 9.0), the ``Requires-Python``
+ directive is not honored and invalid versions can be installed. Users are
+ advised first to upgrade pip and retry or to pin Setuptools. Use ``pip
+ --version`` to determine the version of pip.
+- When using ``easy_install``, ``Requires-Python`` is not honored and later
+ versions can be installed. In this case, users are advised to pin
+ Setuptools. This applies to ``setup.py install`` invocations as well, as
+ they use Setuptools under the hood.
+
+It's still not working
+----------------------
+
+If after trying the above steps, the Python environment still has incompatible
+versions of Setuptools installed, here are some things to try.
+
+1. Uninstall and reinstall Setuptools. Run ``pip uninstall -y setuptools`` for
+ the relevant environment. Repeat until there is no Setuptools installed.
+ Then ``pip install setuptools``.
+2. If possible, attempt to replicate the problem in a second environment
+ (virtual machine, friend's computer, etc). If the issue is isolated to just
+ one unique environment, first determine what is different about those
+ environments (or reinstall/reset the failing one to defaults).
+3. End users who are not themselves the maintainers for the package they are
+ trying to install should contact the support channels for the relevant
+ application. Please be considerate of those projects by searching for
+ existing issues and following the latest guidance before reaching out for
+ support. When filing an issue, be sure to give as much detail as possible
+ to help the maintainers understand what factors led to the issue after
+ following their recommended guidance.
+4. Reach out to your local support groups. There's a good chance someone
+ nearby has the expertise and willingness to help.
+5. If all else fails, `file this template
+ <https://github.com/pypa/setuptools/issues/new?assignees=&labels=Python+2&template=setuptools-warns-about-python-2-incompatibility.md&title=Incompatible+install+in+(summarize+your+environment)>`_
+ with Setuptools. Please complete the whole template, providing as much
+ detail about what factors led to the issue. Setuptools maintainers will
+ summarily close tickets filed without any meaningful detail or engagement
+ with the issue.
diff --git a/docs/python3.txt b/docs/python3.txt
deleted file mode 100644
index c528fc3..0000000
--- a/docs/python3.txt
+++ /dev/null
@@ -1,94 +0,0 @@
-=====================================================
-Supporting both Python 2 and Python 3 with Setuptools
-=====================================================
-
-Starting with Distribute version 0.6.2 and Setuptools 0.7, the Setuptools
-project supported Python 3. Installing and
-using setuptools for Python 3 code works exactly the same as for Python 2
-code.
-
-Setuptools provides a facility to invoke 2to3 on the code as a part of the
-build process, by setting the keyword parameter ``use_2to3`` to True, but
-the Setuptools strongly recommends instead developing a unified codebase
-using `six <https://pypi.org/project/six/>`_,
-`future <https://pypi.org/project/future/>`_, or another compatibility
-library.
-
-
-Using 2to3
-==========
-
-Setuptools attempts to make the porting process easier by automatically
-running
-2to3 as a part of running tests. To do so, you need to configure the
-setup.py so that you can run the unit tests with ``python setup.py test``.
-
-See :ref:`test` for more information on this.
-
-Once you have the tests running under Python 2, you can add the use_2to3
-keyword parameters to setup(), and start running the tests under Python 3.
-The test command will now first run the build command during which the code
-will be converted with 2to3, and the tests will then be run from the build
-directory, as opposed from the source directory as is normally done.
-
-Setuptools will convert all Python files, and also all doctests in Python
-files. However, if you have doctests located in separate text files, these
-will not automatically be converted. By adding them to the
-``convert_2to3_doctests`` keyword parameter Setuptools will convert them as
-well.
-
-By default, the conversion uses all fixers in the ``lib2to3.fixers`` package.
-To use additional fixers, the parameter ``use_2to3_fixers`` can be set
-to a list of names of packages containing fixers. To exclude fixers, the
-parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be
-skipped.
-
-An example setup.py might look something like this::
-
- from setuptools import setup
-
- setup(
- name='your.module',
- version='1.0',
- description='This is your awesome module',
- author='You',
- author_email='your@email',
- package_dir={'': 'src'},
- packages=['your', 'you.module'],
- test_suite='your.module.tests',
- use_2to3=True,
- convert_2to3_doctests=['src/your/module/README.txt'],
- use_2to3_fixers=['your.fixers'],
- use_2to3_exclude_fixers=['lib2to3.fixes.fix_import'],
- )
-
-Differential conversion
------------------------
-
-Note that a file will only be copied and converted during the build process
-if the source file has been changed. If you add a file to the doctests
-that should be converted, it will not be converted the next time you run
-the tests, since it hasn't been modified. You need to remove it from the
-build directory. Also if you run the build, install or test commands before
-adding the use_2to3 parameter, you will have to remove the build directory
-before you run the test command, as the files otherwise will seem updated,
-and no conversion will happen.
-
-In general, if code doesn't seem to be converted, deleting the build directory
-and trying again is a good safeguard against the build directory getting
-"out of sync" with the source directory.
-
-Distributing Python 3 modules
-=============================
-
-You can distribute your modules with Python 3 support in different ways. A
-normal source distribution will work, but can be slow in installing, as the
-2to3 process will be run during the install. But you can also distribute
-the module in binary format, such as a binary egg. That egg will contain the
-already converted code, and hence no 2to3 conversion is needed during install.
-
-Advanced features
-=================
-
-If you don't want to run the 2to3 conversion on the doctests in Python files,
-you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``.
diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst
new file mode 100644
index 0000000..c26b9d4
--- /dev/null
+++ b/docs/references/keywords.rst
@@ -0,0 +1,336 @@
+========
+Keywords
+========
+
+``name``
+ A string specifying the name of the package.
+
+``version``
+ A string specifying the version number of the package.
+
+``description``
+ A string describing the package in a single line.
+
+``long_description``
+ A string providing a longer description of the package.
+
+``long_description_content_type``
+ A string specifying the content type is used for the ``long_description``
+ (e.g. ``text/markdown``)
+
+``author``
+ A string specifying the author of the package.
+
+``author_email``
+ A string specifying the email address of the package author.
+
+``maintainer``
+ A string specifying the name of the current maintainer, if different from
+ the author. Note that if the maintainer is provided, setuptools will use it
+ as the author in ``PKG-INFO``.
+
+``maintainer_email``
+ A string specifying the email address of the current maintainer, if
+ different from the author.
+
+``url``
+ A string specifying the URL for the package homepage.
+
+``download_url``
+ A string specifying the URL to download the package.
+
+``packages``
+ A list of strings specifying the packages that setuptools will manipulate.
+
+``py_modules``
+ A list of strings specifying the modules that setuptools will manipulate.
+
+``scripts``
+ A list of strings specifying the standalone script files to be built and
+ installed.
+
+``ext_package``
+ A string specifying the base package name for the extensions provided by
+ this package.
+
+``ext_modules``
+ A list of instances of ``setuptools.Extension`` providing the list of
+ Python extensions to be built.
+
+``classifiers``
+ A list of strings describing the categories for the package.
+
+``distclass``
+ A subclass of ``Distribution`` to use.
+
+``script_name``
+ A string specifying the name of the setup.py script -- defaults to
+ ``sys.argv[0]``
+
+``script_args``
+ A list of strings defining the arguments to supply to the setup script.
+
+``options``
+ A dictionary providing the default options for the setup script.
+
+``license``
+ A string specifying the license of the package.
+
+``license_file``
+
+ .. warning::
+ ``license_file`` is deprecated. Use ``license_files`` instead.
+
+``license_files``
+
+ A list of glob patterns for license related files that should be included.
+ If neither ``license_file`` nor ``license_files`` is specified, this option
+ defaults to ``LICEN[CS]E*``, ``COPYING*``, ``NOTICE*``, and ``AUTHORS*``.
+
+``keywords``
+ A list of strings or a comma-separated string providing descriptive
+ meta-data. See: `PEP 0314`_.
+
+.. _PEP 0314: https://www.python.org/dev/peps/pep-0314/
+
+``platforms``
+ A list of strings or comma-separated string.
+
+``cmdclass``
+ A dictionary providing a mapping of command names to ``Command``
+ subclasses.
+
+``data_files``
+
+ .. warning::
+ ``data_files`` is deprecated. It does not work with wheels, so it
+ should be avoided.
+
+ A list of strings specifying the data files to install.
+
+``package_dir``
+ A dictionary providing a mapping of package to directory names.
+
+``requires``
+
+ .. warning::
+ ``requires`` is superseded by ``install_requires`` and should not be used
+ anymore.
+
+``obsoletes``
+
+ .. warning::
+ ``obsoletes`` is currently ignored by ``pip``.
+
+ List of strings describing packages which this package renders obsolete,
+ meaning that the two projects should not be installed at the same time.
+
+ Version declarations can be supplied. Version numbers must be in the format
+ specified in Version specifiers (e.g. ``foo (<3.0)``).
+
+ This field may be followed by an environment marker after a semicolon (e.g.
+ ``foo; os_name == "posix"``)
+
+ The most common use of this field will be in case a project name changes,
+ e.g. Gorgon 2.3 gets subsumed into Torqued Python 1.0. When you install
+ Torqued Python, the Gorgon distribution should be removed.
+
+``provides``
+
+ .. warning::
+ ``provides`` is currently ignored by ``pip``.
+
+ List of strings describing package- and virtual package names contained
+ within this package.
+
+ A package may provide additional names, e.g. to indicate that multiple
+ projects have been bundled together. For instance, source distributions of
+ the ZODB project have historically included the transaction project, which
+ is now available as a separate distribution. Installing such a source
+ distribution satisfies requirements for both ZODB and transaction.
+
+ A package may also provide a “virtual” project name, which does not
+ correspond to any separately-distributed project: such a name might be used
+ to indicate an abstract capability which could be supplied by one of
+ multiple projects. E.g., multiple projects might supply RDBMS bindings for
+ use by a given ORM: each project might declare that it provides
+ ORM-bindings, allowing other projects to depend only on having at most one
+ of them installed.
+
+ A version declaration may be supplied and must follow the rules described in
+ Version specifiers. The distribution’s version number will be implied if
+ none is specified (e.g. ``foo (<3.0)``).
+
+ Each package may be followed by an environment marker after a semicolon
+ (e.g. ``foo; os_name == "posix"``).
+
+.. Below are setuptools keywords, above are distutils
+
+``include_package_data``
+ If set to ``True``, this tells ``setuptools`` to automatically include any
+ data files it finds inside your package directories that are specified by
+ your ``MANIFEST.in`` file. For more information, see the section on
+ :ref:`Including Data Files`.
+
+``exclude_package_data``
+ A dictionary mapping package names to lists of glob patterns that should
+ be *excluded* from your package directories. You can use this to trim back
+ any excess files included by ``include_package_data``. For a complete
+ description and examples, see the section on :ref:`Including Data Files`.
+
+``package_data``
+ A dictionary mapping package names to lists of glob patterns. For a
+ complete description and examples, see the section on :ref:`Including Data
+ Files`. You do not need to use this option if you are using
+ ``include_package_data``, unless you need to add e.g. files that are
+ generated by your setup script and build process. (And are therefore not
+ in source control or are files that you don't want to include in your
+ source distribution.)
+
+``zip_safe``
+ A boolean (True or False) flag specifying whether the project can be
+ safely installed and run from a zip file. If this argument is not
+ supplied, the ``bdist_egg`` command will have to analyze all of your
+ project's contents for possible problems each time it builds an egg.
+
+``install_requires``
+ A string or list of strings specifying what other distributions need to
+ be installed when this one is. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
+
+``entry_points``
+ A dictionary mapping entry point group names to strings or lists of strings
+ defining the entry points. Entry points are used to support dynamic
+ discovery of services or plugins provided by a project. See :ref:`Dynamic
+ Discovery of Services and Plugins` for details and examples of the format
+ of this argument. In addition, this keyword is used to support
+ :ref:`Automatic Script Creation <entry_points>`.
+
+``extras_require``
+ A dictionary mapping names of "extras" (optional features of your project)
+ to strings or lists of strings specifying what other distributions must be
+ installed to support those features. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
+
+``python_requires``
+ A string corresponding to a version specifier (as defined in PEP 440) for
+ the Python version, used to specify the Requires-Python defined in PEP 345.
+
+``setup_requires``
+
+ .. warning::
+ Using ``setup_requires`` is discouraged in favor of `PEP-518`_
+
+ A string or list of strings specifying what other distributions need to
+ be present in order for the *setup script* to run. ``setuptools`` will
+ attempt to obtain these (even going so far as to download them using
+ ``EasyInstall``) before processing the rest of the setup script or commands.
+ This argument is needed if you are using distutils extensions as part of
+ your build process; for example, extensions that process setup() arguments
+ and turn them into EGG-INFO metadata files.
+
+ (Note: projects listed in ``setup_requires`` will NOT be automatically
+ installed on the system where the setup script is being run. They are
+ simply downloaded to the ./.eggs directory if they're not locally available
+ already. If you want them to be installed, as well as being available
+ when the setup script is run, you should add them to ``install_requires``
+ **and** ``setup_requires``.)
+
+.. _PEP-518: http://www.python.org/dev/peps/pep-0518/
+
+``dependency_links``
+
+ .. warning::
+ ``dependency_links`` is deprecated. It is not supported anymore by pip.
+
+ A list of strings naming URLs to be searched when satisfying dependencies.
+ These links will be used if needed to install packages specified by
+ ``setup_requires`` or ``tests_require``. They will also be written into
+ the egg's metadata for use by tools like EasyInstall to use when installing
+ an ``.egg`` file.
+
+``namespace_packages``
+ A list of strings naming the project's "namespace packages". A namespace
+ package is a package that may be split across multiple project
+ distributions. For example, Zope 3's ``zope`` package is a namespace
+ package, because subpackages like ``zope.interface`` and ``zope.publisher``
+ may be distributed separately. The egg runtime system can automatically
+ merge such subpackages into a single parent package at runtime, as long
+ as you declare them in each project that contains any subpackages of the
+ namespace package, and as long as the namespace package's ``__init__.py``
+ does not contain any code other than a namespace declaration. See the
+ section on :ref:`Namespace Packages` for more information.
+
+``test_suite``
+ A string naming a ``unittest.TestCase`` subclass (or a package or module
+ containing one or more of them, or a method of such a subclass), or naming
+ a function that can be called with no arguments and returns a
+ ``unittest.TestSuite``. If the named suite is a module, and the module
+ has an ``additional_tests()`` function, it is called and the results are
+ added to the tests to be run. If the named suite is a package, any
+ submodules and subpackages are recursively added to the overall test suite.
+
+ Specifying this argument enables use of the :ref:`test` command to run the
+ specified test suite, e.g. via ``setup.py test``. See the section on the
+ :ref:`test` command below for more details.
+
+ New in 41.5.0: Deprecated the test command.
+
+``tests_require``
+ If your project's tests need one or more additional packages besides those
+ needed to install it, you can use this option to specify them. It should
+ be a string or list of strings specifying what other distributions need to
+ be present for the package's tests to run. When you run the ``test``
+ command, ``setuptools`` will attempt to obtain these (even going
+ so far as to download them using ``EasyInstall``). Note that these
+ required projects will *not* be installed on the system where the tests
+ are run, but only downloaded to the project's setup directory if they're
+ not already installed locally.
+
+ New in 41.5.0: Deprecated the test command.
+
+.. _test_loader:
+
+``test_loader``
+ If you would like to use a different way of finding tests to run than what
+ setuptools normally uses, you can specify a module name and class name in
+ this argument. The named class must be instantiable with no arguments, and
+ its instances must support the ``loadTestsFromNames()`` method as defined
+ in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
+ pass only one test "name" in the ``names`` argument: the value supplied for
+ the ``test_suite`` argument. The loader you specify may interpret this
+ string in any way it likes, as there are no restrictions on what may be
+ contained in a ``test_suite`` string.
+
+ The module name and class name must be separated by a ``:``. The default
+ value of this argument is ``"setuptools.command.test:ScanningLoader"``. If
+ you want to use the default ``unittest`` behavior, you can specify
+ ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This
+ will prevent automatic scanning of submodules and subpackages.
+
+ The module and class you specify here may be contained in another package,
+ as long as you use the ``tests_require`` option to ensure that the package
+ containing the loader class is available when the ``test`` command is run.
+
+ New in 41.5.0: Deprecated the test command.
+
+``eager_resources``
+ A list of strings naming resources that should be extracted together, if
+ any of them is needed, or if any C extensions included in the project are
+ imported. This argument is only useful if the project will be installed as
+ a zipfile, and there is a need to have all of the listed resources be
+ extracted to the filesystem *as a unit*. Resources listed here
+ should be '/'-separated paths, relative to the source root, so to list a
+ resource ``foo.png`` in package ``bar.baz``, you would include the string
+ ``bar/baz/foo.png`` in this argument.
+
+ If you only need to obtain resources one at a time, or you don't have any C
+ extensions that access other files in the project (such as data files or
+ shared libraries), you probably do NOT need this argument and shouldn't
+ mess with it. For more details on how this argument works, see the section
+ below on :ref:`Automatic Resource Extraction`.
+
+``project_urls``
+ An arbitrary map of URL names to hyperlinks, allowing more extensible
+ documentation of where various resources can be found than the simple
+ ``url`` and ``download_url`` options provide.
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index 2138c88..0000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-sphinx
-rst.linker>=1.9
-jaraco.packaging>=3.2
-
-setuptools>=34
diff --git a/docs/roadmap.rst b/docs/roadmap.rst
new file mode 100644
index 0000000..147288f
--- /dev/null
+++ b/docs/roadmap.rst
@@ -0,0 +1,7 @@
+=======
+Roadmap
+=======
+
+Setuptools maintains a series of `milestones
+<https://github.com/pypa/setuptools/milestones>`_ to track
+a roadmap of large-scale goals.
diff --git a/docs/roadmap.txt b/docs/roadmap.txt
deleted file mode 100644
index 8f175b9..0000000
--- a/docs/roadmap.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-=======
-Roadmap
-=======
-
-Setuptools is primarily in maintenance mode. The project attempts to address
-user issues, concerns, and feature requests in a timely fashion.
diff --git a/docs/setuptools.rst b/docs/setuptools.rst
new file mode 100644
index 0000000..aa63830
--- /dev/null
+++ b/docs/setuptools.rst
@@ -0,0 +1,228 @@
+==================================================
+Building and Distributing Packages with Setuptools
+==================================================
+
+``Setuptools`` is a collection of enhancements to the Python ``distutils``
+that allow developers to more easily build and
+distribute Python packages, especially ones that have dependencies on other
+packages.
+
+Packages built and distributed using ``setuptools`` look to the user like
+ordinary Python packages based on the ``distutils``.
+
+Feature Highlights:
+
+* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
+ a single-file importable distribution format
+
+* Enhanced support for accessing data files hosted in zipped packages.
+
+* Automatically include all packages in your source tree, without listing them
+ individually in setup.py
+
+* Automatically include all relevant files in your source distributions,
+ without needing to create a |MANIFEST.in|_ file, and without having to force
+ regeneration of the ``MANIFEST`` file when your source tree changes
+ [#manifest]_.
+
+* Automatically generate wrapper scripts or Windows (console and GUI) .exe
+ files for any number of "main" functions in your project. (Note: this is not
+ a py2exe replacement; the .exe files rely on the local Python installation.)
+
+* Transparent Cython support, so that your setup.py can list ``.pyx`` files and
+ still work even when the end-user doesn't have Cython installed (as long as
+ you include the Cython-generated C in your source distribution)
+
+* Command aliases - create project-specific, per-user, or site-wide shortcut
+ names for commonly used commands and options
+
+* Deploy your project in "development mode", such that it's available on
+ ``sys.path``, yet can still be edited directly from its source checkout.
+
+* Easily extend the distutils with new commands or ``setup()`` arguments, and
+ distribute/reuse your extensions for multiple projects, without copying code.
+
+* Create extensible applications and frameworks that automatically discover
+ extensions, using simple "entry points" declared in a project's setup script.
+
+* Full support for PEP 420 via ``find_namespace_packages()``, which is also backwards
+ compatible to the existing ``find_packages()`` for Python >= 3.3.
+
+-----------------
+Developer's Guide
+-----------------
+
+The developer's guide has been updated. See the :doc:`most recent version <userguide/index>`.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+TRANSITIONAL NOTE
+~~~~~~~~~~~~~~~~~
+
+Setuptools automatically calls ``declare_namespace()`` for you at runtime,
+but future versions may *not*. This is because the automatic declaration
+feature has some negative side effects, such as needing to import all namespace
+packages during the initialization of the ``pkg_resources`` runtime, and also
+the need for ``pkg_resources`` to be explicitly imported before any namespace
+packages work at all. In some future releases, you'll be responsible
+for including your own declaration lines, and the automatic declaration feature
+will be dropped to get rid of the negative side effects.
+
+During the remainder of the current development cycle, therefore, setuptools
+will warn you about missing ``declare_namespace()`` calls in your
+``__init__.py`` files, and you should correct these as soon as possible
+before the compatibility support is removed.
+Namespace packages without declaration lines will not work
+correctly once a user has upgraded to a later version, so it's important that
+you make this change now in order to avoid having your code break in the field.
+Our apologies for the inconvenience, and thank you for your patience.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+setup.cfg-only projects
+=======================
+
+.. versionadded:: 40.9.0
+
+If ``setup.py`` is missing from the project directory when a :pep:`517`
+build is invoked, ``setuptools`` emulates a dummy ``setup.py`` file containing
+only a ``setuptools.setup()`` call.
+
+.. note::
+
+ :pep:`517` doesn't support editable installs so this is currently
+ incompatible with ``pip install -e .``.
+
+This means that you can have a Python project with all build configuration
+specified in ``setup.cfg``, without a ``setup.py`` file, if you **can rely
+on** your project always being built by a :pep:`517`/:pep:`518` compatible
+frontend.
+
+To use this feature:
+
+* Specify build requirements and :pep:`517` build backend in
+ ``pyproject.toml``.
+ For example:
+
+ .. code-block:: toml
+
+ [build-system]
+ requires = [
+ "setuptools >= 40.9.0",
+ "wheel",
+ ]
+ build-backend = "setuptools.build_meta"
+
+* Use a :pep:`517` compatible build frontend, such as ``pip >= 19`` or ``build``.
+
+ .. warning::
+
+ As :pep:`517` is new, support is not universal, and frontends that
+ do support it may still have bugs. For compatibility, you may want to
+ put a ``setup.py`` file containing only a ``setuptools.setup()``
+ invocation.
+
+
+Configuration API
+=================
+
+Some automation tools may wish to access data from a configuration file.
+
+``Setuptools`` exposes a ``read_configuration()`` function for
+parsing ``metadata`` and ``options`` sections into a dictionary.
+
+
+.. code-block:: python
+
+ from setuptools.config import read_configuration
+
+ conf_dict = read_configuration("/home/user/dev/package/setup.cfg")
+
+
+By default, ``read_configuration()`` will read only the file provided
+in the first argument. To include values from other configuration files
+which could be in various places, set the ``find_others`` keyword argument
+to ``True``.
+
+If you have only a configuration file but not the whole package, you can still
+try to get data out of it with the help of the ``ignore_option_errors`` keyword
+argument. When it is set to ``True``, all options with errors possibly produced
+by directives, such as ``attr:`` and others, will be silently ignored.
+As a consequence, the resulting dictionary will include no such options.
+
+
+
+
+
+
+
+
+
+
+
+Forum and Bug Tracker
+=====================
+
+Please use `GitHub Discussions`_ for questions and discussion about
+setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
+confirmed via the forum are actual bugs, and which you have reduced to a minimal
+set of steps to reproduce.
+
+.. _GitHub Discussions: https://github.com/pypa/setuptools/discussions
+.. _setuptools bug tracker: https://github.com/pypa/setuptools/
+
+
+----
+
+
+.. [#manifest] The default behaviour for ``setuptools`` will work well for pure
+ Python packages, or packages with simple C extensions (that don't require
+ any special C header). See :ref:`Controlling files in the distribution` and
+ :doc:`userguide/datafiles` for more information about complex scenarios, if
+ you want to include other types of files.
+
+
+.. |MANIFEST.in| replace:: ``MANIFEST.in``
+.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/
diff --git a/docs/setuptools.txt b/docs/setuptools.txt
deleted file mode 100644
index e14d208..0000000
--- a/docs/setuptools.txt
+++ /dev/null
@@ -1,2775 +0,0 @@
-==================================================
-Building and Distributing Packages with Setuptools
-==================================================
-
-``Setuptools`` is a collection of enhancements to the Python ``distutils``
-that allow developers to more easily build and
-distribute Python packages, especially ones that have dependencies on other
-packages.
-
-Packages built and distributed using ``setuptools`` look to the user like
-ordinary Python packages based on the ``distutils``. Your users don't need to
-install or even know about setuptools in order to use them, and you don't
-have to include the entire setuptools package in your distributions. By
-including just a single `bootstrap module`_ (a 12K .py file), your package will
-automatically download and install ``setuptools`` if the user is building your
-package from source and doesn't have a suitable version already installed.
-
-.. _bootstrap module: https://bootstrap.pypa.io/ez_setup.py
-
-Feature Highlights:
-
-* Automatically find/download/install/upgrade dependencies at build time using
- the `EasyInstall tool <easy_install.html>`_,
- which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
- automatically scans web pages linked from PyPI to find download links. (It's
- the closest thing to CPAN currently available for Python.)
-
-* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
- a single-file importable distribution format
-
-* Enhanced support for accessing data files hosted in zipped packages.
-
-* Automatically include all packages in your source tree, without listing them
- individually in setup.py
-
-* Automatically include all relevant files in your source distributions,
- without needing to create a ``MANIFEST.in`` file, and without having to force
- regeneration of the ``MANIFEST`` file when your source tree changes.
-
-* Automatically generate wrapper scripts or Windows (console and GUI) .exe
- files for any number of "main" functions in your project. (Note: this is not
- a py2exe replacement; the .exe files rely on the local Python installation.)
-
-* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
- still work even when the end-user doesn't have Pyrex installed (as long as
- you include the Pyrex-generated C in your source distribution)
-
-* Command aliases - create project-specific, per-user, or site-wide shortcut
- names for commonly used commands and options
-
-* PyPI upload support - upload your source distributions and eggs to PyPI
-
-* Deploy your project in "development mode", such that it's available on
- ``sys.path``, yet can still be edited directly from its source checkout.
-
-* Easily extend the distutils with new commands or ``setup()`` arguments, and
- distribute/reuse your extensions for multiple projects, without copying code.
-
-* Create extensible applications and frameworks that automatically discover
- extensions, using simple "entry points" declared in a project's setup script.
-
-.. contents:: **Table of Contents**
-
-.. _ez_setup.py: `bootstrap module`_
-
-
------------------
-Developer's Guide
------------------
-
-
-Installing ``setuptools``
-=========================
-
-Please follow the `EasyInstall Installation Instructions`_ to install the
-current stable version of setuptools. In particular, be sure to read the
-section on `Custom Installation Locations`_ if you are installing anywhere
-other than Python's ``site-packages`` directory.
-
-.. _EasyInstall Installation Instructions: easy_install.html#installation-instructions
-
-.. _Custom Installation Locations: easy_install.html#custom-installation-locations
-
-If you want the current in-development version of setuptools, you should first
-install a stable version, and then run::
-
- ez_setup.py setuptools==dev
-
-This will download and install the latest development (i.e. unstable) version
-of setuptools from the Python Subversion sandbox.
-
-
-Basic Use
-=========
-
-For basic use of setuptools, just import things from setuptools instead of
-the distutils. Here's a minimal setup script using setuptools::
-
- from setuptools import setup, find_packages
- setup(
- name="HelloWorld",
- version="0.1",
- packages=find_packages(),
- )
-
-As you can see, it doesn't take much to use setuptools in a project.
-Run that script in your project folder, alongside the Python packages
-you have developed.
-
-Invoke that script to produce eggs, upload to
-PyPI, and automatically include all packages in the directory where the
-setup.py lives. See the `Command Reference`_ section below to see what
-commands you can give to this setup script. For example,
-to produce a source distribution, simply invoke::
-
- python setup.py sdist
-
-Of course, before you release your project to PyPI, you'll want to add a bit
-more information to your setup script to help people find or learn about your
-project. And maybe your project will have grown by then to include a few
-dependencies, and perhaps some data files and scripts::
-
- from setuptools import setup, find_packages
- setup(
- name="HelloWorld",
- version="0.1",
- packages=find_packages(),
- scripts=['say_hello.py'],
-
- # Project uses reStructuredText, so ensure that the docutils get
- # installed or upgraded on the target machine
- install_requires=['docutils>=0.3'],
-
- package_data={
- # If any package contains *.txt or *.rst files, include them:
- '': ['*.txt', '*.rst'],
- # And include any *.msg files found in the 'hello' package, too:
- 'hello': ['*.msg'],
- },
-
- # metadata for upload to PyPI
- author="Me",
- author_email="me@example.com",
- description="This is an Example Package",
- license="PSF",
- keywords="hello world example examples",
- url="http://example.com/HelloWorld/", # project home page, if any
- project_urls={
- "Bug Tracker": "https://bugs.example.com/HelloWorld/",
- "Documentation": "https://docs.example.com/HelloWorld/",
- "Source Code": "https://code.example.com/HelloWorld/",
- }
-
- # could also include long_description, download_url, classifiers, etc.
- )
-
-In the sections that follow, we'll explain what most of these ``setup()``
-arguments do (except for the metadata ones), and the various ways you might use
-them in your own project(s).
-
-
-Specifying Your Project's Version
----------------------------------
-
-Setuptools can work well with most versioning schemes; there are, however, a
-few special things to watch out for, in order to ensure that setuptools and
-EasyInstall can always tell what version of your package is newer than another
-version. Knowing these things will also help you correctly specify what
-versions of other projects your project depends on.
-
-A version consists of an alternating series of release numbers and pre-release
-or post-release tags. A release number is a series of digits punctuated by
-dots, such as ``2.4`` or ``0.5``. Each series of digits is treated
-numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
-same release number, denoting the first subrelease of release 2. But ``2.10``
-is the *tenth* subrelease of release 2, and so is a different and newer release
-from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also
-ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
-
-Following a release number, you can have either a pre-release or post-release
-tag. Pre-release tags make a version be considered *older* than the version
-they are appended to. So, revision ``2.4`` is *newer* than revision ``2.4c1``,
-which in turn is newer than ``2.4b1`` or ``2.4a1``. Postrelease tags make
-a version be considered *newer* than the version they are appended to. So,
-revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older*
-than ``2.4.1`` (which has a higher release number).
-
-A pre-release tag is a series of letters that are alphabetically before
-"final". Some examples of prerelease tags would include ``alpha``, ``beta``,
-``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash
-before the prerelease tag if it's immediately after a number, but it's okay to
-do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all
-represent release candidate 1 of version ``2.4``, and are treated as identical
-by setuptools.
-
-In addition, there are three special prerelease tags that are treated as if
-they were the letter ``c``: ``pre``, ``preview``, and ``rc``. So, version
-``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
-``2.4c1``, and are treated as identical by setuptools.
-
-A post-release tag is either a series of letters that are alphabetically
-greater than or equal to "final", or a dash (``-``). Post-release tags are
-generally used to separate patch numbers, port numbers, build numbers, revision
-numbers, or date stamps from the release number. For example, the version
-``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
-version ``2.4``. Or you might use ``2.4-20051127`` to denote a date-stamped
-post-release.
-
-Notice that after each pre or post-release tag, you are free to place another
-release number, followed again by more pre- or post-release tags. For example,
-``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
-development version of the ninth alpha of release 0.6. Notice that ``dev`` is
-a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
-which would be the actual ninth alpha of release 0.6. But the ``-r41475`` is
-a post-release tag, so this version is *newer* than ``0.6a9.dev``.
-
-For the most part, setuptools' interpretation of version numbers is intuitive,
-but here are a few tips that will keep you out of trouble in the corner cases:
-
-* Don't stick adjoining pre-release tags together without a dot or number
- between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
- *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in
- ``1.9a.dev``, or separate the prerelease tags with a number, as in
- ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
- identical versions from setuptools' point of view, so you can use whatever
- scheme you prefer.
-
-* If you want to be certain that your chosen numbering scheme works the way
- you think it will, you can use the ``pkg_resources.parse_version()`` function
- to compare different version numbers::
-
- >>> from pkg_resources import parse_version
- >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
- True
- >>> parse_version('2.1-rc2') < parse_version('2.1')
- True
- >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
- True
-
-Once you've decided on a version numbering scheme for your project, you can
-have setuptools automatically tag your in-development releases with various
-pre- or post-release tags. See the following sections for more details:
-
-* `Tagging and "Daily Build" or "Snapshot" Releases`_
-* `Managing "Continuous Releases" Using Subversion`_
-* The `egg_info`_ command
-
-
-New and Changed ``setup()`` Keywords
-====================================
-
-The following keyword arguments to ``setup()`` are added or changed by
-``setuptools``. All of them are optional; you do not have to supply them
-unless you need the associated ``setuptools`` feature.
-
-``include_package_data``
- If set to ``True``, this tells ``setuptools`` to automatically include any
- data files it finds inside your package directories that are specified by
- your ``MANIFEST.in`` file. For more information, see the section below on
- `Including Data Files`_.
-
-``exclude_package_data``
- A dictionary mapping package names to lists of glob patterns that should
- be *excluded* from your package directories. You can use this to trim back
- any excess files included by ``include_package_data``. For a complete
- description and examples, see the section below on `Including Data Files`_.
-
-``package_data``
- A dictionary mapping package names to lists of glob patterns. For a
- complete description and examples, see the section below on `Including
- Data Files`_. You do not need to use this option if you are using
- ``include_package_data``, unless you need to add e.g. files that are
- generated by your setup script and build process. (And are therefore not
- in source control or are files that you don't want to include in your
- source distribution.)
-
-``zip_safe``
- A boolean (True or False) flag specifying whether the project can be
- safely installed and run from a zip file. If this argument is not
- supplied, the ``bdist_egg`` command will have to analyze all of your
- project's contents for possible problems each time it builds an egg.
-
-``install_requires``
- A string or list of strings specifying what other distributions need to
- be installed when this one is. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
-
-``entry_points``
- A dictionary mapping entry point group names to strings or lists of strings
- defining the entry points. Entry points are used to support dynamic
- discovery of services or plugins provided by a project. See `Dynamic
- Discovery of Services and Plugins`_ for details and examples of the format
- of this argument. In addition, this keyword is used to support `Automatic
- Script Creation`_.
-
-``extras_require``
- A dictionary mapping names of "extras" (optional features of your project)
- to strings or lists of strings specifying what other distributions must be
- installed to support those features. See the section below on `Declaring
- Dependencies`_ for details and examples of the format of this argument.
-
-``python_requires``
- A string corresponding to a version specifier (as defined in PEP 440) for
- the Python version, used to specify the Requires-Python defined in PEP 345.
-
-``setup_requires``
- A string or list of strings specifying what other distributions need to
- be present in order for the *setup script* to run. ``setuptools`` will
- attempt to obtain these (even going so far as to download them using
- ``EasyInstall``) before processing the rest of the setup script or commands.
- This argument is needed if you are using distutils extensions as part of
- your build process; for example, extensions that process setup() arguments
- and turn them into EGG-INFO metadata files.
-
- (Note: projects listed in ``setup_requires`` will NOT be automatically
- installed on the system where the setup script is being run. They are
- simply downloaded to the ./.eggs directory if they're not locally available
- already. If you want them to be installed, as well as being available
- when the setup script is run, you should add them to ``install_requires``
- **and** ``setup_requires``.)
-
-``dependency_links``
- A list of strings naming URLs to be searched when satisfying dependencies.
- These links will be used if needed to install packages specified by
- ``setup_requires`` or ``tests_require``. They will also be written into
- the egg's metadata for use by tools like EasyInstall to use when installing
- an ``.egg`` file.
-
-``namespace_packages``
- A list of strings naming the project's "namespace packages". A namespace
- package is a package that may be split across multiple project
- distributions. For example, Zope 3's ``zope`` package is a namespace
- package, because subpackages like ``zope.interface`` and ``zope.publisher``
- may be distributed separately. The egg runtime system can automatically
- merge such subpackages into a single parent package at runtime, as long
- as you declare them in each project that contains any subpackages of the
- namespace package, and as long as the namespace package's ``__init__.py``
- does not contain any code other than a namespace declaration. See the
- section below on `Namespace Packages`_ for more information.
-
-``test_suite``
- A string naming a ``unittest.TestCase`` subclass (or a package or module
- containing one or more of them, or a method of such a subclass), or naming
- a function that can be called with no arguments and returns a
- ``unittest.TestSuite``. If the named suite is a module, and the module
- has an ``additional_tests()`` function, it is called and the results are
- added to the tests to be run. If the named suite is a package, any
- submodules and subpackages are recursively added to the overall test suite.
-
- Specifying this argument enables use of the `test`_ command to run the
- specified test suite, e.g. via ``setup.py test``. See the section on the
- `test`_ command below for more details.
-
-``tests_require``
- If your project's tests need one or more additional packages besides those
- needed to install it, you can use this option to specify them. It should
- be a string or list of strings specifying what other distributions need to
- be present for the package's tests to run. When you run the ``test``
- command, ``setuptools`` will attempt to obtain these (even going
- so far as to download them using ``EasyInstall``). Note that these
- required projects will *not* be installed on the system where the tests
- are run, but only downloaded to the project's setup directory if they're
- not already installed locally.
-
-.. _test_loader:
-
-``test_loader``
- If you would like to use a different way of finding tests to run than what
- setuptools normally uses, you can specify a module name and class name in
- this argument. The named class must be instantiable with no arguments, and
- its instances must support the ``loadTestsFromNames()`` method as defined
- in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
- pass only one test "name" in the `names` argument: the value supplied for
- the ``test_suite`` argument. The loader you specify may interpret this
- string in any way it likes, as there are no restrictions on what may be
- contained in a ``test_suite`` string.
-
- The module name and class name must be separated by a ``:``. The default
- value of this argument is ``"setuptools.command.test:ScanningLoader"``. If
- you want to use the default ``unittest`` behavior, you can specify
- ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This
- will prevent automatic scanning of submodules and subpackages.
-
- The module and class you specify here may be contained in another package,
- as long as you use the ``tests_require`` option to ensure that the package
- containing the loader class is available when the ``test`` command is run.
-
-``eager_resources``
- A list of strings naming resources that should be extracted together, if
- any of them is needed, or if any C extensions included in the project are
- imported. This argument is only useful if the project will be installed as
- a zipfile, and there is a need to have all of the listed resources be
- extracted to the filesystem *as a unit*. Resources listed here
- should be '/'-separated paths, relative to the source root, so to list a
- resource ``foo.png`` in package ``bar.baz``, you would include the string
- ``bar/baz/foo.png`` in this argument.
-
- If you only need to obtain resources one at a time, or you don't have any C
- extensions that access other files in the project (such as data files or
- shared libraries), you probably do NOT need this argument and shouldn't
- mess with it. For more details on how this argument works, see the section
- below on `Automatic Resource Extraction`_.
-
-``use_2to3``
- Convert the source code from Python 2 to Python 3 with 2to3 during the
- build process. See :doc:`python3` for more details.
-
-``convert_2to3_doctests``
- List of doctest source files that need to be converted with 2to3.
- See :doc:`python3` for more details.
-
-``use_2to3_fixers``
- A list of modules to search for additional fixers to be used during
- the 2to3 conversion. See :doc:`python3` for more details.
-
-``project_urls``
- An arbitrary map of URL names to hyperlinks, allowing more extensible
- documentation of where various resources can be found than the simple
- ``url`` and ``download_url`` options provide.
-
-
-Using ``find_packages()``
--------------------------
-
-For simple projects, it's usually easy enough to manually add packages to
-the ``packages`` argument of ``setup()``. However, for very large projects
-(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the
-package list updated. That's what ``setuptools.find_packages()`` is for.
-
-``find_packages()`` takes a source directory and two lists of package name
-patterns to exclude and include. If omitted, the source directory defaults to
-the same
-directory as the setup script. Some projects use a ``src`` or ``lib``
-directory as the root of their source tree, and those projects would of course
-use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``. (And
-such projects also need something like ``package_dir={'':'src'}`` in their
-``setup()`` arguments, but that's just a normal distutils thing.)
-
-Anyway, ``find_packages()`` walks the target directory, filtering by inclusion
-patterns, and finds Python packages (any directory). Packages are only
-recognized if they include an ``__init__.py`` file. Finally, exclusion
-patterns are applied to remove matching packages.
-
-Inclusion and exclusion patterns are package names, optionally including
-wildcards. For
-example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose
-last name part is ``tests``. Or, ``find_packages(exclude=["*.tests",
-"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``,
-but it still won't exclude a top-level ``tests`` package or the children
-thereof. In fact, if you really want no ``tests`` packages at all, you'll need
-something like this::
-
- find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
-
-in order to cover all the bases. Really, the exclusion patterns are intended
-to cover simpler use cases than this, like excluding a single, specified
-package and its subpackages.
-
-Regardless of the parameters, the ``find_packages()``
-function returns a list of package names suitable for use as the ``packages``
-argument to ``setup()``, and so is usually the easiest way to set that
-argument in your setup script. Especially since it frees you from having to
-remember to modify your setup script whenever your project grows additional
-top-level packages or subpackages.
-
-
-Automatic Script Creation
-=========================
-
-Packaging and installing scripts can be a bit awkward with the distutils. For
-one thing, there's no easy way to have a script's filename match local
-conventions on both Windows and POSIX platforms. For another, you often have
-to create a separate file just for the "main" script, when your actual "main"
-is a function in a module somewhere. And even in Python 2.4, using the ``-m``
-option only works for actual ``.py`` files that aren't installed in a package.
-
-``setuptools`` fixes all of these problems by automatically generating scripts
-for you with the correct extension, and on Windows it will even create an
-``.exe`` file so that users don't have to change their ``PATHEXT`` settings.
-The way to use this feature is to define "entry points" in your setup script
-that indicate what function the generated script should import and run. For
-example, to create two console scripts called ``foo`` and ``bar``, and a GUI
-script called ``baz``, you might do something like this::
-
- setup(
- # other arguments here...
- entry_points={
- 'console_scripts': [
- 'foo = my_package.some_module:main_func',
- 'bar = other_module:some_func',
- ],
- 'gui_scripts': [
- 'baz = my_package_gui:start_func',
- ]
- }
- )
-
-When this project is installed on non-Windows platforms (using "setup.py
-install", "setup.py develop", or by using EasyInstall), a set of ``foo``,
-``bar``, and ``baz`` scripts will be installed that import ``main_func`` and
-``some_func`` from the specified modules. The functions you specify are called
-with no arguments, and their return value is passed to ``sys.exit()``, so you
-can return an errorlevel or message to print to stderr.
-
-On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are
-created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files. The
-``.exe`` wrappers find and execute the right version of Python to run the
-``.py`` or ``.pyw`` file.
-
-You may define as many "console script" and "gui script" entry points as you
-like, and each one can optionally specify "extras" that it depends on, that
-will be added to ``sys.path`` when the script is run. For more information on
-"extras", see the section below on `Declaring Extras`_. For more information
-on "entry points" in general, see the section below on `Dynamic Discovery of
-Services and Plugins`_.
-
-
-"Eggsecutable" Scripts
-----------------------
-
-Occasionally, there are situations where it's desirable to make an ``.egg``
-file directly executable. You can do this by including an entry point such
-as the following::
-
- setup(
- # other arguments here...
- entry_points={
- 'setuptools.installation': [
- 'eggsecutable = my_package.some_module:main_func',
- ]
- }
- )
-
-Any eggs built from the above setup script will include a short executable
-prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
-The prelude can be run on Unix-like platforms (including Mac and Linux) by
-invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
-``.egg`` file. For the executable prelude to run, the appropriate version of
-Python must be available via the ``PATH`` environment variable, under its
-"long" name. That is, if the egg is built for Python 2.3, there must be a
-``python2.3`` executable present in a directory on ``PATH``.
-
-This feature is primarily intended to support ez_setup the installation of
-setuptools itself on non-Windows platforms, but may also be useful for other
-projects as well.
-
-IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
-invoked via symlinks. They *must* be invoked using their original filename, in
-order to ensure that, once running, ``pkg_resources`` will know what project
-and version is in use. The header script will check this and exit with an
-error if the ``.egg`` file has been renamed or is invoked via a symlink that
-changes its base name.
-
-
-Declaring Dependencies
-======================
-
-``setuptools`` supports automatically installing dependencies when a package is
-installed, and including information about dependencies in Python Eggs (so that
-package management tools like EasyInstall can use the information).
-
-``setuptools`` and ``pkg_resources`` use a common syntax for specifying a
-project's required dependencies. This syntax consists of a project's PyPI
-name, optionally followed by a comma-separated list of "extras" in square
-brackets, optionally followed by a comma-separated list of version
-specifiers. A version specifier is one of the operators ``<``, ``>``, ``<=``,
-``>=``, ``==`` or ``!=``, followed by a version identifier. Tokens may be
-separated by whitespace, but any whitespace or nonstandard characters within a
-project name or version identifier must be replaced with ``-``.
-
-Version specifiers for a given project are internally sorted into ascending
-version order, and used to establish what ranges of versions are acceptable.
-Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes
-``">2"``, and ``"<2,<3"`` becomes ``"<2"``). ``"!="`` versions are excised from
-the ranges they fall within. A project's version is then checked for
-membership in the resulting ranges. (Note that providing conflicting conditions
-for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may
-therefore produce bizarre results.)
-
-Here are some example requirement specifiers::
-
- docutils >= 0.3
-
- # comment lines and \ continuations are allowed in requirement strings
- BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \
- ==1.6, ==1.7 # and so are line-end comments
-
- PEAK[FastCGI, reST]>=0.5a4
-
- setuptools==0.5a7
-
-The simplest way to include requirement specifiers is to use the
-``install_requires`` argument to ``setup()``. It takes a string or list of
-strings containing requirement specifiers. If you include more than one
-requirement in a string, each requirement must begin on a new line.
-
-This has three effects:
-
-1. When your project is installed, either by using EasyInstall, ``setup.py
- install``, or ``setup.py develop``, all of the dependencies not already
- installed will be located (via PyPI), downloaded, built (if necessary),
- and installed.
-
-2. Any scripts in your project will be installed with wrappers that verify
- the availability of the specified dependencies at runtime, and ensure that
- the correct versions are added to ``sys.path`` (e.g. if multiple versions
- have been installed).
-
-3. Python Egg distributions will include a metadata file listing the
- dependencies.
-
-Note, by the way, that if you declare your dependencies in ``setup.py``, you do
-*not* need to use the ``require()`` function in your scripts or modules, as
-long as you either install the project or use ``setup.py develop`` to do
-development work on it. (See `"Development Mode"`_ below for more details on
-using ``setup.py develop``.)
-
-
-Dependencies that aren't in PyPI
---------------------------------
-
-If your project depends on packages that aren't registered in PyPI, you may
-still be able to depend on them, as long as they are available for download
-as:
-
-- an egg, in the standard distutils ``sdist`` format,
-- a single ``.py`` file, or
-- a VCS repository (Subversion, Mercurial, or Git).
-
-You just need to add some URLs to the ``dependency_links`` argument to
-``setup()``.
-
-The URLs must be either:
-
-1. direct download URLs,
-2. the URLs of web pages that contain direct download links, or
-3. the repository's URL
-
-In general, it's better to link to web pages, because it is usually less
-complex to update a web page than to release a new version of your project.
-You can also use a SourceForge ``showfiles.php`` link in the case where a
-package you depend on is distributed via SourceForge.
-
-If you depend on a package that's distributed as a single ``.py`` file, you
-must include an ``"#egg=project-version"`` suffix to the URL, to give a project
-name and version number. (Be sure to escape any dashes in the name or version
-by replacing them with underscores.) EasyInstall will recognize this suffix
-and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
-as an egg.
-
-In the case of a VCS checkout, you should also append ``#egg=project-version``
-in order to identify for what package that checkout should be used. You can
-append ``@REV`` to the URL's path (before the fragment) to specify a revision.
-Additionally, you can also force the VCS being used by prepending the URL with
-a certain prefix. Currently available are:
-
-- ``svn+URL`` for Subversion,
-- ``git+URL`` for Git, and
-- ``hg+URL`` for Mercurial
-
-A more complete example would be:
-
- ``vcs+proto://host/path@revision#egg=project-version``
-
-Be careful with the version. It should match the one inside the project files.
-If you want to disregard the version, you have to omit it both in the
-``requires`` and in the URL's fragment.
-
-This will do a checkout (or a clone, in Git and Mercurial parlance) to a
-temporary folder and run ``setup.py bdist_egg``.
-
-The ``dependency_links`` option takes the form of a list of URL strings. For
-example, the below will cause EasyInstall to search the specified page for
-eggs or source distributions, if the package's dependencies aren't already
-installed::
-
- setup(
- ...
- dependency_links=[
- "http://peak.telecommunity.com/snapshots/"
- ],
- )
-
-
-.. _Declaring Extras:
-
-
-Declaring "Extras" (optional features with their own dependencies)
-------------------------------------------------------------------
-
-Sometimes a project has "recommended" dependencies, that are not required for
-all uses of the project. For example, a project might offer optional PDF
-output if ReportLab is installed, and reStructuredText support if docutils is
-installed. These optional features are called "extras", and setuptools allows
-you to define their requirements as well. In this way, other projects that
-require these optional features can force the additional requirements to be
-installed, by naming the desired extras in their ``install_requires``.
-
-For example, let's say that Project A offers optional PDF and reST support::
-
- setup(
- name="Project-A",
- ...
- extras_require={
- 'PDF': ["ReportLab>=1.2", "RXP"],
- 'reST': ["docutils>=0.3"],
- }
- )
-
-As you can see, the ``extras_require`` argument takes a dictionary mapping
-names of "extra" features, to strings or lists of strings describing those
-features' requirements. These requirements will *not* be automatically
-installed unless another package depends on them (directly or indirectly) by
-including the desired "extras" in square brackets after the associated project
-name. (Or if the extras were listed in a requirement spec on the EasyInstall
-command line.)
-
-Extras can be used by a project's `entry points`_ to specify dynamic
-dependencies. For example, if Project A includes a "rst2pdf" script, it might
-declare it like this, so that the "PDF" requirements are only resolved if the
-"rst2pdf" script is run::
-
- setup(
- name="Project-A",
- ...
- entry_points={
- 'console_scripts': [
- 'rst2pdf = project_a.tools.pdfgen [PDF]',
- 'rst2html = project_a.tools.htmlgen',
- # more script entry points ...
- ],
- }
- )
-
-Projects can also use another project's extras when specifying dependencies.
-For example, if project B needs "project A" with PDF support installed, it
-might declare the dependency like this::
-
- setup(
- name="Project-B",
- install_requires=["Project-A[PDF]"],
- ...
- )
-
-This will cause ReportLab to be installed along with project A, if project B is
-installed -- even if project A was already installed. In this way, a project
-can encapsulate groups of optional "downstream dependencies" under a feature
-name, so that packages that depend on it don't have to know what the downstream
-dependencies are. If a later version of Project A builds in PDF support and
-no longer needs ReportLab, or if it ends up needing other dependencies besides
-ReportLab in order to provide PDF support, Project B's setup information does
-not need to change, but the right packages will still be installed if needed.
-
-Note, by the way, that if a project ends up not needing any other packages to
-support a feature, it should keep an empty requirements list for that feature
-in its ``extras_require`` argument, so that packages depending on that feature
-don't break (due to an invalid feature name). For example, if Project A above
-builds in PDF support and no longer needs ReportLab, it could change its
-setup to this::
-
- setup(
- name="Project-A",
- ...
- extras_require={
- 'PDF': [],
- 'reST': ["docutils>=0.3"],
- }
- )
-
-so that Package B doesn't have to remove the ``[PDF]`` from its requirement
-specifier.
-
-
-.. _Platform Specific Dependencies:
-
-
-Declaring platform specific dependencies
-----------------------------------------
-
-Sometimes a project might require a dependency to run on a specific platform.
-This could to a package that back ports a module so that it can be used in
-older python versions. Or it could be a package that is required to run on a
-specific operating system. This will allow a project to work on multiple
-different platforms without installing dependencies that are not required for
-a platform that is installing the project.
-
-For example, here is a project that uses the ``enum`` module and ``pywin32``::
-
- setup(
- name="Project",
- ...
- install_requires=[
- 'enum34;python_version<"3.4"',
- 'pywin32 >= 1.0;platform_system=="Windows"'
- ]
- )
-
-Since the ``enum`` module was added in Python 3.4, it should only be installed
-if the python version is earlier. Since ``pywin32`` will only be used on
-windows, it should only be installed when the operating system is Windows.
-Specifying version requirements for the dependencies is supported as normal.
-
-The environmental markers that may be used for testing platform types are
-detailed in `PEP 508`_.
-
-.. _PEP 508: https://www.python.org/dev/peps/pep-0508/
-
-Including Data Files
-====================
-
-The distutils have traditionally allowed installation of "data files", which
-are placed in a platform-specific location. However, the most common use case
-for data files distributed with a package is for use *by* the package, usually
-by including the data files in the package directory.
-
-Setuptools offers three ways to specify data files to be included in your
-packages. First, you can simply use the ``include_package_data`` keyword,
-e.g.::
-
- from setuptools import setup, find_packages
- setup(
- ...
- include_package_data=True
- )
-
-This tells setuptools to install any data files it finds in your packages.
-The data files must be specified via the distutils' ``MANIFEST.in`` file.
-(They can also be tracked by a revision control system, using an appropriate
-plugin. See the section below on `Adding Support for Revision Control
-Systems`_ for information on how to write such plugins.)
-
-If you want finer-grained control over what files are included (for example,
-if you have documentation files in your package directories and want to exclude
-them from installation), then you can also use the ``package_data`` keyword,
-e.g.::
-
- from setuptools import setup, find_packages
- setup(
- ...
- package_data={
- # If any package contains *.txt or *.rst files, include them:
- '': ['*.txt', '*.rst'],
- # And include any *.msg files found in the 'hello' package, too:
- 'hello': ['*.msg'],
- }
- )
-
-The ``package_data`` argument is a dictionary that maps from package names to
-lists of glob patterns. The globs may include subdirectory names, if the data
-files are contained in a subdirectory of the package. For example, if the
-package tree looks like this::
-
- setup.py
- src/
- mypkg/
- __init__.py
- mypkg.txt
- data/
- somefile.dat
- otherdata.dat
-
-The setuptools setup file might look like this::
-
- from setuptools import setup, find_packages
- setup(
- ...
- packages=find_packages('src'), # include all packages under src
- package_dir={'':'src'}, # tell distutils packages are under src
-
- package_data={
- # If any package contains *.txt files, include them:
- '': ['*.txt'],
- # And include any *.dat files found in the 'data' subdirectory
- # of the 'mypkg' package, also:
- 'mypkg': ['data/*.dat'],
- }
- )
-
-Notice that if you list patterns in ``package_data`` under the empty string,
-these patterns are used to find files in every package, even ones that also
-have their own patterns listed. Thus, in the above example, the ``mypkg.txt``
-file gets included even though it's not listed in the patterns for ``mypkg``.
-
-Also notice that if you use paths, you *must* use a forward slash (``/``) as
-the path separator, even if you are on Windows. Setuptools automatically
-converts slashes to appropriate platform-specific separators at build time.
-
-If datafiles are contained in a subdirectory of a package that isn't a package
-itself (no ``__init__.py``), then the subdirectory names (or ``*``) are required
-in the ``package_data`` argument (as shown above with ``'data/*.dat'``).
-
-When building an ``sdist``, the datafiles are also drawn from the
-``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if
-the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``.
-
-(Note: although the ``package_data`` argument was previously only available in
-``setuptools``, it was also added to the Python ``distutils`` package as of
-Python 2.4; there is `some documentation for the feature`__ available on the
-python.org website. If using the setuptools-specific ``include_package_data``
-argument, files specified by ``package_data`` will *not* be automatically
-added to the manifest unless they are listed in the MANIFEST.in file.)
-
-__ http://docs.python.org/dist/node11.html
-
-Sometimes, the ``include_package_data`` or ``package_data`` options alone
-aren't sufficient to precisely define what files you want included. For
-example, you may want to include package README files in your revision control
-system and source distributions, but exclude them from being installed. So,
-setuptools offers an ``exclude_package_data`` option as well, that allows you
-to do things like this::
-
- from setuptools import setup, find_packages
- setup(
- ...
- packages=find_packages('src'), # include all packages under src
- package_dir={'':'src'}, # tell distutils packages are under src
-
- include_package_data=True, # include everything in source control
-
- # ...but exclude README.txt from all packages
- exclude_package_data={'': ['README.txt']},
- )
-
-The ``exclude_package_data`` option is a dictionary mapping package names to
-lists of wildcard patterns, just like the ``package_data`` option. And, just
-as with that option, a key of ``''`` will apply the given pattern(s) to all
-packages. However, any files that match these patterns will be *excluded*
-from installation, even if they were listed in ``package_data`` or were
-included as a result of using ``include_package_data``.
-
-In summary, the three options allow you to:
-
-``include_package_data``
- Accept all data files and directories matched by ``MANIFEST.in``.
-
-``package_data``
- Specify additional patterns to match files that may or may
- not be matched by ``MANIFEST.in`` or found in source control.
-
-``exclude_package_data``
- Specify patterns for data files and directories that should *not* be
- included when a package is installed, even if they would otherwise have
- been included due to the use of the preceding options.
-
-NOTE: Due to the way the distutils build process works, a data file that you
-include in your project and then stop including may be "orphaned" in your
-project's build directories, requiring you to run ``setup.py clean --all`` to
-fully remove them. This may also be important for your users and contributors
-if they track intermediate revisions of your project using Subversion; be sure
-to let them know when you make changes that remove files from inclusion so they
-can run ``setup.py clean --all``.
-
-
-Accessing Data Files at Runtime
--------------------------------
-
-Typically, existing programs manipulate a package's ``__file__`` attribute in
-order to find the location of data files. However, this manipulation isn't
-compatible with PEP 302-based import hooks, including importing from zip files
-and Python Eggs. It is strongly recommended that, if you are using data files,
-you should use the :ref:`ResourceManager API` of ``pkg_resources`` to access
-them. The ``pkg_resources`` module is distributed as part of setuptools, so if
-you're using setuptools to distribute your package, there is no reason not to
-use its resource management API. See also `Accessing Package Resources`_ for
-a quick example of converting code that uses ``__file__`` to use
-``pkg_resources`` instead.
-
-.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
-
-
-Non-Package Data Files
-----------------------
-
-The ``distutils`` normally install general "data files" to a platform-specific
-location (e.g. ``/usr/share``). This feature intended to be used for things
-like documentation, example configuration files, and the like. ``setuptools``
-does not install these data files in a separate location, however. They are
-bundled inside the egg file or directory, alongside the Python modules and
-packages. The data files can also be accessed using the :ref:`ResourceManager
-API`, by specifying a ``Requirement`` instead of a package name::
-
- from pkg_resources import Requirement, resource_filename
- filename = resource_filename(Requirement.parse("MyProject"),"sample.conf")
-
-The above code will obtain the filename of the "sample.conf" file in the data
-root of the "MyProject" distribution.
-
-Note, by the way, that this encapsulation of data files means that you can't
-actually install data files to some arbitrary location on a user's machine;
-this is a feature, not a bug. You can always include a script in your
-distribution that extracts and copies your the documentation or data files to
-a user-specified location, at their discretion. If you put related data files
-in a single directory, you can use ``resource_filename()`` with the directory
-name to get a filesystem directory that then can be copied with the ``shutil``
-module. (Even if your package is installed as a zipfile, calling
-``resource_filename()`` on a directory will return an actual filesystem
-directory, whose contents will be that entire subtree of your distribution.)
-
-(Of course, if you're writing a new package, you can just as easily place your
-data files or directories inside one of your packages, rather than using the
-distutils' approach. However, if you're updating an existing application, it
-may be simpler not to change the way it currently specifies these data files.)
-
-
-Automatic Resource Extraction
------------------------------
-
-If you are using tools that expect your resources to be "real" files, or your
-project includes non-extension native libraries or other files that your C
-extensions expect to be able to access, you may need to list those files in
-the ``eager_resources`` argument to ``setup()``, so that the files will be
-extracted together, whenever a C extension in the project is imported.
-
-This is especially important if your project includes shared libraries *other*
-than distutils-built C extensions, and those shared libraries use file
-extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
-extensions that setuptools 0.6a8 and higher automatically detects as shared
-libraries and adds to the ``native_libs.txt`` file for you. Any shared
-libraries whose names do not end with one of those extensions should be listed
-as ``eager_resources``, because they need to be present in the filesystem when
-he C extensions that link to them are used.
-
-The ``pkg_resources`` runtime for compressed packages will automatically
-extract *all* C extensions and ``eager_resources`` at the same time, whenever
-*any* C extension or eager resource is requested via the ``resource_filename()``
-API. (C extensions are imported using ``resource_filename()`` internally.)
-This ensures that C extensions will see all of the "real" files that they
-expect to see.
-
-Note also that you can list directory resource names in ``eager_resources`` as
-well, in which case the directory's contents (including subdirectories) will be
-extracted whenever any C extension or eager resource is requested.
-
-Please note that if you're not sure whether you need to use this argument, you
-don't! It's really intended to support projects with lots of non-Python
-dependencies and as a last resort for crufty projects that can't otherwise
-handle being compressed. If your package is pure Python, Python plus data
-files, or Python plus C, you really don't need this. You've got to be using
-either C or an external program that needs "real" files in your project before
-there's any possibility of ``eager_resources`` being relevant to your project.
-
-
-Extensible Applications and Frameworks
-======================================
-
-
-.. _Entry Points:
-
-Dynamic Discovery of Services and Plugins
------------------------------------------
-
-``setuptools`` supports creating libraries that "plug in" to extensible
-applications and frameworks, by letting you register "entry points" in your
-project that can be imported by the application or framework.
-
-For example, suppose that a blogging tool wants to support plugins
-that provide translation for various file types to the blog's output format.
-The framework might define an "entry point group" called ``blogtool.parsers``,
-and then allow plugins to register entry points for the file extensions they
-support.
-
-This would allow people to create distributions that contain one or more
-parsers for different file types, and then the blogging tool would be able to
-find the parsers at runtime by looking up an entry point for the file
-extension (or mime type, or however it wants to).
-
-Note that if the blogging tool includes parsers for certain file formats, it
-can register these as entry points in its own setup script, which means it
-doesn't have to special-case its built-in formats. They can just be treated
-the same as any other plugin's entry points would be.
-
-If you're creating a project that plugs in to an existing application or
-framework, you'll need to know what entry points or entry point groups are
-defined by that application or framework. Then, you can register entry points
-in your setup script. Here are a few examples of ways you might register an
-``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group,
-for our hypothetical blogging tool::
-
- setup(
- # ...
- entry_points={'blogtool.parsers': '.rst = some_module:SomeClass'}
- )
-
- setup(
- # ...
- entry_points={'blogtool.parsers': ['.rst = some_module:a_func']}
- )
-
- setup(
- # ...
- entry_points="""
- [blogtool.parsers]
- .rst = some.nested.module:SomeClass.some_classmethod [reST]
- """,
- extras_require=dict(reST="Docutils>=0.3.5")
- )
-
-The ``entry_points`` argument to ``setup()`` accepts either a string with
-``.ini``-style sections, or a dictionary mapping entry point group names to
-either strings or lists of strings containing entry point specifiers. An
-entry point specifier consists of a name and value, separated by an ``=``
-sign. The value consists of a dotted module name, optionally followed by a
-``:`` and a dotted identifier naming an object within the module. It can
-also include a bracketed list of "extras" that are required for the entry
-point to be used. When the invoking application or framework requests loading
-of an entry point, any requirements implied by the associated extras will be
-passed to ``pkg_resources.require()``, so that an appropriate error message
-can be displayed if the needed package(s) are missing. (Of course, the
-invoking app or framework can ignore such errors if it wants to make an entry
-point optional if a requirement isn't installed.)
-
-
-Defining Additional Metadata
-----------------------------
-
-Some extensible applications and frameworks may need to define their own kinds
-of metadata to include in eggs, which they can then access using the
-``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin
-developers include additional files in their ``ProjectName.egg-info``
-directory. However, since it can be tedious to create such files by hand, you
-may want to create a distutils extension that will create the necessary files
-from arguments to ``setup()``, in much the same way that ``setuptools`` does
-for many of the ``setup()`` arguments it adds. See the section below on
-`Creating distutils Extensions`_ for more details, especially the subsection on
-`Adding new EGG-INFO Files`_.
-
-
-"Development Mode"
-==================
-
-Under normal circumstances, the ``distutils`` assume that you are going to
-build a distribution of your project, not use it in its "raw" or "unbuilt"
-form. If you were to use the ``distutils`` that way, you would have to rebuild
-and reinstall your project every time you made a change to it during
-development.
-
-Another problem that sometimes comes up with the ``distutils`` is that you may
-need to do development on two related projects at the same time. You may need
-to put both projects' packages in the same directory to run them, but need to
-keep them separate for revision control purposes. How can you do this?
-
-Setuptools allows you to deploy your projects for use in a common directory or
-staging area, but without copying any files. Thus, you can edit each project's
-code in its checkout directory, and only need to run build commands when you
-change a project's C extensions or similarly compiled files. You can even
-deploy a project into another project's checkout directory, if that's your
-preferred way of working (as opposed to using a common independent staging area
-or the site-packages directory).
-
-To do this, use the ``setup.py develop`` command. It works very similarly to
-``setup.py install`` or the EasyInstall tool, except that it doesn't actually
-install anything. Instead, it creates a special ``.egg-link`` file in the
-deployment directory, that links to your project's source code. And, if your
-deployment directory is Python's ``site-packages`` directory, it will also
-update the ``easy-install.pth`` file to include your project's source code,
-thereby making it available on ``sys.path`` for all programs using that Python
-installation.
-
-If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link``
-will not link directly to your source code when run under Python 3, since
-that source code would be made for Python 2 and not work under Python 3.
-Instead the ``setup.py develop`` will build Python 3 code under the ``build``
-directory, and link there. This means that after doing code changes you will
-have to run ``setup.py build`` before these changes are picked up by your
-Python 3 installation.
-
-In addition, the ``develop`` command creates wrapper scripts in the target
-script directory that will run your in-development scripts after ensuring that
-all your ``install_requires`` packages are available on ``sys.path``.
-
-You can deploy the same project to multiple staging areas, e.g. if you have
-multiple projects on the same machine that are sharing the same project you're
-doing development work.
-
-When you're done with a given development task, you can remove the project
-source from a staging area using ``setup.py develop --uninstall``, specifying
-the desired staging area if it's not the default.
-
-There are several options to control the precise behavior of the ``develop``
-command; see the section on the `develop`_ command below for more details.
-
-Note that you can also apply setuptools commands to non-setuptools projects,
-using commands like this::
-
- python -c "import setuptools; execfile('setup.py')" develop
-
-That is, you can simply list the normal setup commands and options following
-the quoted part.
-
-
-Distributing a ``setuptools``-based project
-===========================================
-
-Using ``setuptools``... Without bundling it!
----------------------------------------------
-
-.. warning:: **ez_setup** is deprecated in favor of PIP with **PEP-518** support.
-
-Your users might not have ``setuptools`` installed on their machines, or even
-if they do, it might not be the right version. Fixing this is easy; just
-download `ez_setup.py`_, and put it in the same directory as your ``setup.py``
-script. (Be sure to add it to your revision control system, too.) Then add
-these two lines to the very top of your setup script, before the script imports
-anything from setuptools:
-
-.. code-block:: python
-
- import ez_setup
- ez_setup.use_setuptools()
-
-That's it. The ``ez_setup`` module will automatically download a matching
-version of ``setuptools`` from PyPI, if it isn't present on the target system.
-Whenever you install an updated version of setuptools, you should also update
-your projects' ``ez_setup.py`` files, so that a matching version gets installed
-on the target machine(s).
-
-By the way, setuptools supports the new PyPI "upload" command, so you can use
-``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your
-source or egg distributions respectively. Your project's current version must
-be registered with PyPI first, of course; you can use ``setup.py register`` to
-do that. Or you can do it all in one step, e.g. ``setup.py register sdist
-bdist_egg upload`` will register the package, build source and egg
-distributions, and then upload them both to PyPI, where they'll be easily
-found by other projects that depend on them.
-
-(By the way, if you need to distribute a specific version of ``setuptools``,
-you can specify the exact version and base download URL as parameters to the
-``use_setuptools()`` function. See the function's docstring for details.)
-
-
-What Your Users Should Know
----------------------------
-
-In general, a setuptools-based project looks just like any distutils-based
-project -- as long as your users have an internet connection and are installing
-to ``site-packages``, that is. But for some users, these conditions don't
-apply, and they may become frustrated if this is their first encounter with
-a setuptools-based project. To keep these users happy, you should review the
-following topics in your project's installation instructions, if they are
-relevant to your project and your target audience isn't already familiar with
-setuptools and ``easy_install``.
-
-Network Access
- If your project is using ``ez_setup``, you should inform users of the
- need to either have network access, or to preinstall the correct version of
- setuptools using the `EasyInstall installation instructions`_. Those
- instructions also have tips for dealing with firewalls as well as how to
- manually download and install setuptools.
-
-Custom Installation Locations
- You should inform your users that if they are installing your project to
- somewhere other than the main ``site-packages`` directory, they should
- first install setuptools using the instructions for `Custom Installation
- Locations`_, before installing your project.
-
-Your Project's Dependencies
- If your project depends on other projects that may need to be downloaded
- from PyPI or elsewhere, you should list them in your installation
- instructions, or tell users how to find out what they are. While most
- users will not need this information, any users who don't have unrestricted
- internet access may have to find, download, and install the other projects
- manually. (Note, however, that they must still install those projects
- using ``easy_install``, or your project will not know they are installed,
- and your setup script will try to download them again.)
-
- If you want to be especially friendly to users with limited network access,
- you may wish to build eggs for your project and its dependencies, making
- them all available for download from your site, or at least create a page
- with links to all of the needed eggs. In this way, users with limited
- network access can manually download all the eggs to a single directory,
- then use the ``-f`` option of ``easy_install`` to specify the directory
- to find eggs in. Users who have full network access can just use ``-f``
- with the URL of your download page, and ``easy_install`` will find all the
- needed eggs using your links directly. This is also useful when your
- target audience isn't able to compile packages (e.g. most Windows users)
- and your package or some of its dependencies include C code.
-
-Revision Control System Users and Co-Developers
- Users and co-developers who are tracking your in-development code using
- a revision control system should probably read this manual's sections
- regarding such development. Alternately, you may wish to create a
- quick-reference guide containing the tips from this manual that apply to
- your particular situation. For example, if you recommend that people use
- ``setup.py develop`` when tracking your in-development code, you should let
- them know that this needs to be run after every update or commit.
-
- Similarly, if you remove modules or data files from your project, you
- should remind them to run ``setup.py clean --all`` and delete any obsolete
- ``.pyc`` or ``.pyo``. (This tip applies to the distutils in general, not
- just setuptools, but not everybody knows about them; be kind to your users
- by spelling out your project's best practices rather than leaving them
- guessing.)
-
-Creating System Packages
- Some users want to manage all Python packages using a single package
- manager, and sometimes that package manager isn't ``easy_install``!
- Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and
- ``bdist_dumb`` formats for system packaging. If a user has a locally-
- installed "bdist" packaging tool that internally uses the distutils
- ``install`` command, it should be able to work with ``setuptools``. Some
- examples of "bdist" formats that this should work with include the
- ``bdist_nsi`` and ``bdist_msi`` formats for Windows.
-
- However, packaging tools that build binary distributions by running
- ``setup.py install`` on the command line or as a subprocess will require
- modification to work with setuptools. They should use the
- ``--single-version-externally-managed`` option to the ``install`` command,
- combined with the standard ``--root`` or ``--record`` options.
- See the `install command`_ documentation below for more details. The
- ``bdist_deb`` command is an example of a command that currently requires
- this kind of patching to work with setuptools.
-
- If you or your users have a problem building a usable system package for
- your project, please report the problem via the mailing list so that
- either the "bdist" tool in question or setuptools can be modified to
- resolve the issue.
-
-
-Setting the ``zip_safe`` flag
------------------------------
-
-For some use cases (such as bundling as part of a larger application), Python
-packages may be run directly from a zip file.
-Not all packages, however, are capable of running in compressed form, because
-they may expect to be able to access either source code or data files as
-normal operating system files. So, ``setuptools`` can install your project
-as a zipfile or a directory, and its default choice is determined by the
-project's ``zip_safe`` flag.
-
-You can pass a True or False value for the ``zip_safe`` argument to the
-``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg``
-command will analyze your project's contents to see if it can detect any
-conditions that would prevent it from working in a zipfile. It will output
-notices to the console about any such conditions that it finds.
-
-Currently, this analysis is extremely conservative: it will consider the
-project unsafe if it contains any C extensions or datafiles whatsoever. This
-does *not* mean that the project can't or won't work as a zipfile! It just
-means that the ``bdist_egg`` authors aren't yet comfortable asserting that
-the project *will* work. If the project contains no C or data files, and does
-no ``__file__`` or ``__path__`` introspection or source code manipulation, then
-there is an extremely solid chance the project will work when installed as a
-zipfile. (And if the project uses ``pkg_resources`` for all its data file
-access, then C extensions and other data files shouldn't be a problem at all.
-See the `Accessing Data Files at Runtime`_ section above for more information.)
-
-However, if ``bdist_egg`` can't be *sure* that your package will work, but
-you've checked over all the warnings it issued, and you are either satisfied it
-*will* work (or if you want to try it for yourself), then you should set
-``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it
-doesn't work, you can always change it to ``False``, which will force
-``setuptools`` to install your project as a directory rather than as a zipfile.
-
-Of course, the end-user can still override either decision, if they are using
-EasyInstall to install your package. And, if you want to override for testing
-purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py
-easy_install --always-unzip .`` in your project directory. to install the
-package as a zipfile or directory, respectively.
-
-In the future, as we gain more experience with different packages and become
-more satisfied with the robustness of the ``pkg_resources`` runtime, the
-"zip safety" analysis may become less conservative. However, we strongly
-recommend that you determine for yourself whether your project functions
-correctly when installed as a zipfile, correct any problems if you can, and
-then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
-flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to
-try to guess whether your project can work as a zipfile.
-
-
-Namespace Packages
-------------------
-
-Sometimes, a large package is more useful if distributed as a collection of
-smaller eggs. However, Python does not normally allow the contents of a
-package to be retrieved from more than one location. "Namespace packages"
-are a solution for this problem. When you declare a package to be a namespace
-package, it means that the package has no meaningful contents in its
-``__init__.py``, and that it is merely a container for modules and subpackages.
-
-The ``pkg_resources`` runtime will then automatically ensure that the contents
-of namespace packages that are spread over multiple eggs or directories are
-combined into a single "virtual" package.
-
-The ``namespace_packages`` argument to ``setup()`` lets you declare your
-project's namespace packages, so that they will be included in your project's
-metadata. The argument should list the namespace packages that the egg
-participates in. For example, the ZopeInterface project might do this::
-
- setup(
- # ...
- namespace_packages=['zope']
- )
-
-because it contains a ``zope.interface`` package that lives in the ``zope``
-namespace package. Similarly, a project for a standalone ``zope.publisher``
-would also declare the ``zope`` namespace package. When these projects are
-installed and used, Python will see them both as part of a "virtual" ``zope``
-package, even though they will be installed in different locations.
-
-Namespace packages don't have to be top-level packages. For example, Zope 3's
-``zope.app`` package is a namespace package, and in the future PEAK's
-``peak.util`` package will be too.
-
-Note, by the way, that your project's source tree must include the namespace
-packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
-packages), in a normal Python package layout. These ``__init__.py`` files
-*must* contain the line::
-
- __import__('pkg_resources').declare_namespace(__name__)
-
-This code ensures that the namespace package machinery is operating and that
-the current package is registered as a namespace package.
-
-You must NOT include any other code and data in a namespace package's
-``__init__.py``. Even though it may appear to work during development, or when
-projects are installed as ``.egg`` files, it will not work when the projects
-are installed using "system" packaging tools -- in such cases the
-``__init__.py`` files will not be installed, let alone executed.
-
-You must include the ``declare_namespace()`` line in the ``__init__.py`` of
-*every* project that has contents for the namespace package in question, in
-order to ensure that the namespace will be declared regardless of which
-project's copy of ``__init__.py`` is loaded first. If the first loaded
-``__init__.py`` doesn't declare it, it will never *be* declared, because no
-other copies will ever be loaded!
-
-
-TRANSITIONAL NOTE
-~~~~~~~~~~~~~~~~~
-
-Setuptools automatically calls ``declare_namespace()`` for you at runtime,
-but future versions may *not*. This is because the automatic declaration
-feature has some negative side effects, such as needing to import all namespace
-packages during the initialization of the ``pkg_resources`` runtime, and also
-the need for ``pkg_resources`` to be explicitly imported before any namespace
-packages work at all. In some future releases, you'll be responsible
-for including your own declaration lines, and the automatic declaration feature
-will be dropped to get rid of the negative side effects.
-
-During the remainder of the current development cycle, therefore, setuptools
-will warn you about missing ``declare_namespace()`` calls in your
-``__init__.py`` files, and you should correct these as soon as possible
-before the compatibility support is removed.
-Namespace packages without declaration lines will not work
-correctly once a user has upgraded to a later version, so it's important that
-you make this change now in order to avoid having your code break in the field.
-Our apologies for the inconvenience, and thank you for your patience.
-
-
-
-Tagging and "Daily Build" or "Snapshot" Releases
-------------------------------------------------
-
-When a set of related projects are under development, it may be important to
-track finer-grained version increments than you would normally use for e.g.
-"stable" releases. While stable releases might be measured in dotted numbers
-with alpha/beta/etc. status codes, development versions of a project often
-need to be tracked by revision or build number or even build date. This is
-especially true when projects in development need to refer to one another, and
-therefore may literally need an up-to-the-minute version of something!
-
-To support these scenarios, ``setuptools`` allows you to "tag" your source and
-egg distributions by adding one or more of the following to the project's
-"official" version identifier:
-
-* A manually-specified pre-release tag, such as "build" or "dev", or a
- manually-specified post-release tag, such as a build or revision number
- (``--tag-build=STRING, -bSTRING``)
-
-* An 8-character representation of the build date (``--tag-date, -d``), as
- a postrelease tag
-
-You can add these tags by adding ``egg_info`` and the desired options to
-the command line ahead of the ``sdist`` or ``bdist`` commands that you want
-to generate a daily build or snapshot for. See the section below on the
-`egg_info`_ command for more details.
-
-(Also, before you release your project, be sure to see the section above on
-`Specifying Your Project's Version`_ for more information about how pre- and
-post-release tags affect how setuptools and EasyInstall interpret version
-numbers. This is important in order to make sure that dependency processing
-tools will know which versions of your project are newer than others.)
-
-Finally, if you are creating builds frequently, and either building them in a
-downloadable location or are copying them to a distribution server, you should
-probably also check out the `rotate`_ command, which lets you automatically
-delete all but the N most-recently-modified distributions matching a glob
-pattern. So, you can use a command line like::
-
- setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
-
-to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
-most recent Subversion revision that affected the source tree), and then
-delete any egg files from the distribution directory except for the three
-that were built most recently.
-
-If you have to manage automated builds for multiple packages, each with
-different tagging and rotation policies, you may also want to check out the
-`alias`_ command, which would let each package define an alias like ``daily``
-that would perform the necessary tag, build, and rotate commands. Then, a
-simpler script or cron job could just run ``setup.py daily`` in each project
-directory. (And, you could also define sitewide or per-user default versions
-of the ``daily`` alias, so that projects that didn't define their own would
-use the appropriate defaults.)
-
-
-Generating Source Distributions
--------------------------------
-
-``setuptools`` enhances the distutils' default algorithm for source file
-selection with pluggable endpoints for looking up files to include. If you are
-using a revision control system, and your source distributions only need to
-include files that you're tracking in revision control, use a corresponding
-plugin instead of writing a ``MANIFEST.in`` file. See the section below on
-`Adding Support for Revision Control Systems`_ for information on plugins.
-
-If you need to include automatically generated files, or files that are kept in
-an unsupported revision control system, you'll need to create a ``MANIFEST.in``
-file to specify any files that the default file location algorithm doesn't
-catch. See the distutils documentation for more information on the format of
-the ``MANIFEST.in`` file.
-
-But, be sure to ignore any part of the distutils documentation that deals with
-``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
-from these issues and doesn't work the same way in any case. Unlike the
-distutils, setuptools regenerates the source distribution manifest file
-every time you build a source distribution, and it builds it inside the
-project's ``.egg-info`` directory, out of the way of your main project
-directory. You therefore need not worry about whether it is up-to-date or not.
-
-Indeed, because setuptools' approach to determining the contents of a source
-distribution is so much simpler, its ``sdist`` command omits nearly all of
-the options that the distutils' more complex ``sdist`` process requires. For
-all practical purposes, you'll probably use only the ``--formats`` option, if
-you use any option at all.
-
-
-Making your package available for EasyInstall
----------------------------------------------
-
-If you use the ``register`` command (``setup.py register``) to register your
-package with PyPI, that's most of the battle right there. (See the
-`docs for the register command`_ for more details.)
-
-.. _docs for the register command: http://docs.python.org/dist/package-index.html
-
-If you also use the `upload`_ command to upload actual distributions of your
-package, that's even better, because EasyInstall will be able to find and
-download them directly from your project's PyPI page.
-
-However, there may be reasons why you don't want to upload distributions to
-PyPI, and just want your existing distributions (or perhaps a Subversion
-checkout) to be used instead.
-
-So here's what you need to do before running the ``register`` command. There
-are three ``setup()`` arguments that affect EasyInstall:
-
-``url`` and ``download_url``
- These become links on your project's PyPI page. EasyInstall will examine
- them to see if they link to a package ("primary links"), or whether they are
- HTML pages. If they're HTML pages, EasyInstall scans all HREF's on the
- page for primary links
-
-``long_description``
- EasyInstall will check any URLs contained in this argument to see if they
- are primary links.
-
-A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip,
-.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or
-``#egg=project-version`` fragment identifier attached to it. EasyInstall
-attempts to determine a project name and optional version number from the text
-of a primary link *without* downloading it. When it has found all the primary
-links, EasyInstall will select the best match based on requested version,
-platform compatibility, and other criteria.
-
-So, if your ``url`` or ``download_url`` point either directly to a downloadable
-source distribution, or to HTML page(s) that have direct links to such, then
-EasyInstall will be able to locate downloads automatically. If you want to
-make Subversion checkouts available, then you should create links with either
-``#egg=project`` or ``#egg=project-version`` added to the URL. You should
-replace ``project`` and ``version`` with the values they would have in an egg
-filename. (Be sure to actually generate an egg and then use the initial part
-of the filename, rather than trying to guess what the escaped form of the
-project name and version number will be.)
-
-Note that Subversion checkout links are of lower precedence than other kinds
-of distributions, so EasyInstall will not select a Subversion checkout for
-downloading unless it has a version included in the ``#egg=`` suffix, and
-it's a higher version than EasyInstall has seen in any other links for your
-project.
-
-As a result, it's a common practice to use mark checkout URLs with a version of
-"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like
-this::
-
- easy_install --editable projectname==dev
-
-in order to check out the in-development version of ``projectname``.
-
-
-Making "Official" (Non-Snapshot) Releases
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When you make an official release, creating source or binary distributions,
-you will need to override the tag settings from ``setup.cfg``, so that you
-don't end up registering versions like ``foobar-0.7a1.dev-r34832``. This is
-easy to do if you are developing on the trunk and using tags or branches for
-your releases - just make the change to ``setup.cfg`` after branching or
-tagging the release, so the trunk will still produce development snapshots.
-
-Alternately, if you are not branching for releases, you can override the
-default version options on the command line, using something like::
-
- python setup.py egg_info -Db "" sdist bdist_egg register upload
-
-The first part of this command (``egg_info -Db ""``) will override the
-configured tag information, before creating source and binary eggs, registering
-the project with PyPI, and uploading the files. Thus, these commands will use
-the plain version from your ``setup.py``, without adding the build designation
-string.
-
-Of course, if you will be doing this a lot, you may wish to create a personal
-alias for this operation, e.g.::
-
- python setup.py alias -u release egg_info -Db ""
-
-You can then use it like this::
-
- python setup.py release sdist bdist_egg register upload
-
-Or of course you can create more elaborate aliases that do all of the above.
-See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
-
-
-
-Distributing Extensions compiled with Pyrex
--------------------------------------------
-
-``setuptools`` includes transparent support for building Pyrex extensions, as
-long as you define your extensions using ``setuptools.Extension``, *not*
-``distutils.Extension``. You must also not import anything from Pyrex in
-your setup script.
-
-If you follow these rules, you can safely list ``.pyx`` files as the source
-of your ``Extension`` objects in the setup script. ``setuptools`` will detect
-at build time whether Pyrex is installed or not. If it is, then ``setuptools``
-will use it. If not, then ``setuptools`` will silently change the
-``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx``
-files, so that the normal distutils C compilation process will occur.
-
-Of course, for this to work, your source distributions must include the C
-code generated by Pyrex, as well as your original ``.pyx`` files. This means
-that you will probably want to include current ``.c`` files in your revision
-control system, rebuilding them whenever you check changes in for the ``.pyx``
-source files. This will ensure that people tracking your project in a revision
-control system will be able to build it even if they don't have Pyrex
-installed, and that your source releases will be similarly usable with or
-without Pyrex.
-
-
------------------
-Command Reference
------------------
-
-.. _alias:
-
-``alias`` - Define shortcuts for commonly used commands
-=======================================================
-
-Sometimes, you need to use the same commands over and over, but you can't
-necessarily set them as defaults. For example, if you produce both development
-snapshot releases and "stable" releases of a project, you may want to put
-the distributions in different places, or use different ``egg_info`` tagging
-options, etc. In these cases, it doesn't make sense to set the options in
-a distutils configuration file, because the values of the options changed based
-on what you're trying to do.
-
-Setuptools therefore allows you to define "aliases" - shortcut names for
-an arbitrary string of commands and options, using ``setup.py alias aliasname
-expansion``, where aliasname is the name of the new alias, and the remainder of
-the command line supplies its expansion. For example, this command defines
-a sitewide alias called "daily", that sets various ``egg_info`` tagging
-options::
-
- setup.py alias --global-config daily egg_info --tag-build=development
-
-Once the alias is defined, it can then be used with other setup commands,
-e.g.::
-
- setup.py daily bdist_egg # generate a daily-build .egg file
- setup.py daily sdist # generate a daily-build source distro
- setup.py daily sdist bdist_egg # generate both
-
-The above commands are interpreted as if the word ``daily`` were replaced with
-``egg_info --tag-build=development``.
-
-Note that setuptools will expand each alias *at most once* in a given command
-line. This serves two purposes. First, if you accidentally create an alias
-loop, it will have no effect; you'll instead get an error message about an
-unknown command. Second, it allows you to define an alias for a command, that
-uses that command. For example, this (project-local) alias::
-
- setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
-
-redefines the ``bdist_egg`` command so that it always runs the ``rotate``
-command afterwards to delete all but the newest egg file. It doesn't loop
-indefinitely on ``bdist_egg`` because the alias is only expanded once when
-used.
-
-You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
-
- setup.py alias --global-config --remove daily
-
-would delete the "daily" alias we defined above.
-
-Aliases can be defined on a project-specific, per-user, or sitewide basis. The
-default is to define or remove a project-specific alias, but you can use any of
-the `configuration file options`_ (listed under the `saveopts`_ command, below)
-to determine which distutils configuration file an aliases will be added to
-(or removed from).
-
-Note that if you omit the "expansion" argument to the ``alias`` command,
-you'll get output showing that alias' current definition (and what
-configuration file it's defined in). If you omit the alias name as well,
-you'll get a listing of all current aliases along with their configuration
-file locations.
-
-
-``bdist_egg`` - Create a Python Egg for the project
-===================================================
-
-This command generates a Python Egg (``.egg`` file) for the project. Python
-Eggs are the preferred binary distribution format for EasyInstall, because they
-are cross-platform (for "pure" packages), directly importable, and contain
-project metadata including scripts and information about the project's
-dependencies. They can be simply downloaded and added to ``sys.path``
-directly, or they can be placed in a directory on ``sys.path`` and then
-automatically discovered by the egg runtime system.
-
-This command runs the `egg_info`_ command (if it hasn't already run) to update
-the project's metadata (``.egg-info``) directory. If you have added any extra
-metadata files to the ``.egg-info`` directory, those files will be included in
-the new egg file's metadata directory, for use by the egg runtime system or by
-any applications or frameworks that use that metadata.
-
-You won't usually need to specify any special options for this command; just
-use ``bdist_egg`` and you're done. But there are a few options that may
-be occasionally useful:
-
-``--dist-dir=DIR, -d DIR``
- Set the directory where the ``.egg`` file will be placed. If you don't
- supply this, then the ``--dist-dir`` setting of the ``bdist`` command
- will be used, which is usually a directory named ``dist`` in the project
- directory.
-
-``--plat-name=PLATFORM, -p PLATFORM``
- Set the platform name string that will be embedded in the egg's filename
- (assuming the egg contains C extensions). This can be used to override
- the distutils default platform name with something more meaningful. Keep
- in mind, however, that the egg runtime system expects to see eggs with
- distutils platform names, so it may ignore or reject eggs with non-standard
- platform names. Similarly, the EasyInstall program may ignore them when
- searching web pages for download links. However, if you are
- cross-compiling or doing some other unusual things, you might find a use
- for this option.
-
-``--exclude-source-files``
- Don't include any modules' ``.py`` files in the egg, just compiled Python,
- C, and data files. (Note that this doesn't affect any ``.py`` files in the
- EGG-INFO directory or its subdirectories, since for example there may be
- scripts with a ``.py`` extension which must still be retained.) We don't
- recommend that you use this option except for packages that are being
- bundled for proprietary end-user applications, or for "embedded" scenarios
- where space is at an absolute premium. On the other hand, if your package
- is going to be installed and used in compressed form, you might as well
- exclude the source because Python's ``traceback`` module doesn't currently
- understand how to display zipped source code anyway, or how to deal with
- files that are in a different place from where their code was compiled.
-
-There are also some options you will probably never need, but which are there
-because they were copied from similar ``bdist`` commands used as an example for
-creating this one. They may be useful for testing and debugging, however,
-which is why we kept them:
-
-``--keep-temp, -k``
- Keep the contents of the ``--bdist-dir`` tree around after creating the
- ``.egg`` file.
-
-``--bdist-dir=DIR, -b DIR``
- Set the temporary directory for creating the distribution. The entire
- contents of this directory are zipped to create the ``.egg`` file, after
- running various installation commands to copy the package's modules, data,
- and extensions here.
-
-``--skip-build``
- Skip doing any "build" commands; just go straight to the
- install-and-compress phases.
-
-
-.. _develop:
-
-``develop`` - Deploy the project source in "Development Mode"
-=============================================================
-
-This command allows you to deploy your project's source for use in one or more
-"staging areas" where it will be available for importing. This deployment is
-done in such a way that changes to the project source are immediately available
-in the staging area(s), without needing to run a build or install step after
-each change.
-
-The ``develop`` command works by creating an ``.egg-link`` file (named for the
-project) in the given staging area. If the staging area is Python's
-``site-packages`` directory, it also updates an ``easy-install.pth`` file so
-that the project is on ``sys.path`` by default for all programs run using that
-Python installation.
-
-The ``develop`` command also installs wrapper scripts in the staging area (or
-a separate directory, as specified) that will ensure the project's dependencies
-are available on ``sys.path`` before running the project's source scripts.
-And, it ensures that any missing project dependencies are available in the
-staging area, by downloading and installing them if necessary.
-
-Last, but not least, the ``develop`` command invokes the ``build_ext -i``
-command to ensure any C extensions in the project have been built and are
-up-to-date, and the ``egg_info`` command to ensure the project's metadata is
-updated (so that the runtime and wrappers know what the project's dependencies
-are). If you make any changes to the project's setup script or C extensions,
-you should rerun the ``develop`` command against all relevant staging areas to
-keep the project's scripts, metadata and extensions up-to-date. Most other
-kinds of changes to your project should not require any build operations or
-rerunning ``develop``, but keep in mind that even minor changes to the setup
-script (e.g. changing an entry point definition) require you to re-run the
-``develop`` or ``test`` commands to keep the distribution updated.
-
-Here are some of the options that the ``develop`` command accepts. Note that
-they affect the project's dependencies as well as the project itself, so if you
-have dependencies that need to be installed and you use ``--exclude-scripts``
-(for example), the dependencies' scripts will not be installed either! For
-this reason, you may want to use EasyInstall to install the project's
-dependencies before using the ``develop`` command, if you need finer control
-over the installation options for dependencies.
-
-``--uninstall, -u``
- Un-deploy the current project. You may use the ``--install-dir`` or ``-d``
- option to designate the staging area. The created ``.egg-link`` file will
- be removed, if present and it is still pointing to the project directory.
- The project directory will be removed from ``easy-install.pth`` if the
- staging area is Python's ``site-packages`` directory.
-
- Note that this option currently does *not* uninstall script wrappers! You
- must uninstall them yourself, or overwrite them by using EasyInstall to
- activate a different version of the package. You can also avoid installing
- script wrappers in the first place, if you use the ``--exclude-scripts``
- (aka ``-x``) option when you run ``develop`` to deploy the project.
-
-``--multi-version, -m``
- "Multi-version" mode. Specifying this option prevents ``develop`` from
- adding an ``easy-install.pth`` entry for the project(s) being deployed, and
- if an entry for any version of a project already exists, the entry will be
- removed upon successful deployment. In multi-version mode, no specific
- version of the package is available for importing, unless you use
- ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
- a wrapper script generated by ``setuptools`` or EasyInstall. (In which
- case the wrapper script calls ``require()`` for you.)
-
- Note that if you install to a directory other than ``site-packages``,
- this option is automatically in effect, because ``.pth`` files can only be
- used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
- the ``--install-dir`` or ``-d`` option (or they are set via configuration
- file(s)) your project and its dependencies will be deployed in multi-
- version mode.
-
-``--install-dir=DIR, -d DIR``
- Set the installation directory (staging area). If this option is not
- directly specified on the command line or in a distutils configuration
- file, the distutils default installation location is used. Normally, this
- will be the ``site-packages`` directory, but if you are using distutils
- configuration files, setting things like ``prefix`` or ``install_lib``,
- then those settings are taken into account when computing the default
- staging area.
-
-``--script-dir=DIR, -s DIR``
- Set the script installation directory. If you don't supply this option
- (via the command line or a configuration file), but you *have* supplied
- an ``--install-dir`` (via command line or config file), then this option
- defaults to the same directory, so that the scripts will be able to find
- their associated package installation. Otherwise, this setting defaults
- to the location where the distutils would normally install scripts, taking
- any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
- Don't deploy script wrappers. This is useful if you don't want to disturb
- existing versions of the scripts in the staging area.
-
-``--always-copy, -a``
- Copy all needed distributions to the staging area, even if they
- are already present in another directory on ``sys.path``. By default, if
- a requirement can be met using a distribution that is already available in
- a directory on ``sys.path``, it will not be copied to the staging area.
-
-``--egg-path=DIR``
- Force the generated ``.egg-link`` file to use a specified relative path
- to the source directory. This can be useful in circumstances where your
- installation directory is being shared by code running under multiple
- platforms (e.g. Mac and Windows) which have different absolute locations
- for the code under development, but the same *relative* locations with
- respect to the installation directory. If you use this option when
- installing, you must supply the same relative path when uninstalling.
-
-In addition to the above options, the ``develop`` command also accepts all of
-the same options accepted by ``easy_install``. If you've configured any
-``easy_install`` settings in your ``setup.cfg`` (or other distutils config
-files), the ``develop`` command will use them as defaults, unless you override
-them in a ``[develop]`` section or on the command line.
-
-
-``easy_install`` - Find and install packages
-============================================
-
-This command runs the `EasyInstall tool
-<easy_install.html>`_ for you. It is exactly
-equivalent to running the ``easy_install`` command. All command line arguments
-following this command are consumed and not processed further by the distutils,
-so this must be the last command listed on the command line. Please see
-the EasyInstall documentation for the options reference and usage examples.
-Normally, there is no reason to use this command via the command line, as you
-can just use ``easy_install`` directly. It's only listed here so that you know
-it's a distutils command, which means that you can:
-
-* create command aliases that use it,
-* create distutils extensions that invoke it as a subcommand, and
-* configure options for it in your ``setup.cfg`` or other distutils config
- files.
-
-
-.. _egg_info:
-
-``egg_info`` - Create egg metadata and set build tags
-=====================================================
-
-This command performs two operations: it updates a project's ``.egg-info``
-metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
-commands), and it allows you to temporarily change a project's version string,
-to support "daily builds" or "snapshot" releases. It is run automatically by
-the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands
-in order to update the project's metadata, but you can also specify it
-explicitly in order to temporarily change the project's version string while
-executing other commands. (It also generates the``.egg-info/SOURCES.txt``
-manifest file, which is used when you are building source distributions.)
-
-In addition to writing the core egg metadata defined by ``setuptools`` and
-required by ``pkg_resources``, this command can be extended to write other
-metadata files as well, by defining entry points in the ``egg_info.writers``
-group. See the section on `Adding new EGG-INFO Files`_ below for more details.
-Note that using additional metadata writers may require you to include a
-``setup_requires`` argument to ``setup()`` in order to ensure that the desired
-writers are available on ``sys.path``.
-
-
-Release Tagging Options
------------------------
-
-The following options can be used to modify the project's version string for
-all remaining commands on the setup command line. The options are processed
-in the order shown, so if you use more than one, the requested tags will be
-added in the following order:
-
-``--tag-build=NAME, -b NAME``
- Append NAME to the project's version string. Due to the way setuptools
- processes "pre-release" version suffixes beginning with the letters "a"
- through "e" (like "alpha", "beta", and "candidate"), you will usually want
- to use a tag like ".build" or ".dev", as this will cause the version number
- to be considered *lower* than the project's default version. (If you
- want to make the version number *higher* than the default version, you can
- always leave off --tag-build and then use one or both of the following
- options.)
-
- If you have a default build tag set in your ``setup.cfg``, you can suppress
- it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
- to the ``egg_info`` command.
-
-``--tag-date, -d``
- Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
- project's version number.
-
-``--no-date, -D``
- Don't include a date stamp in the version number. This option is included
- so you can override a default setting in ``setup.cfg``.
-
-
-(Note: Because these options modify the version number used for source and
-binary distributions of your project, you should first make sure that you know
-how the resulting version numbers will be interpreted by automated tools
-like EasyInstall. See the section above on `Specifying Your Project's
-Version`_ for an explanation of pre- and post-release tags, as well as tips on
-how to choose and verify a versioning scheme for your your project.)
-
-For advanced uses, there is one other option that can be set, to change the
-location of the project's ``.egg-info`` directory. Commands that need to find
-the project's source directory or metadata should get it from this setting:
-
-
-Other ``egg_info`` Options
---------------------------
-
-``--egg-base=SOURCEDIR, -e SOURCEDIR``
- Specify the directory that should contain the .egg-info directory. This
- should normally be the root of your project's source tree (which is not
- necessarily the same as your project directory; some projects use a ``src``
- or ``lib`` subdirectory as the source root). You should not normally need
- to specify this directory, as it is normally determined from the
- ``package_dir`` argument to the ``setup()`` function, if any. If there is
- no ``package_dir`` set, this option defaults to the current directory.
-
-
-``egg_info`` Examples
----------------------
-
-Creating a dated "nightly build" snapshot egg::
-
- python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
-
-Creating and uploading a release with no version tags, even if some default
-tags are specified in ``setup.cfg``::
-
- python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-(Notice that ``egg_info`` must always appear on the command line *before* any
-commands that you want the version changes to apply to.)
-
-
-.. _install command:
-
-``install`` - Run ``easy_install`` or old-style installation
-============================================================
-
-The setuptools ``install`` command is basically a shortcut to run the
-``easy_install`` command on the current project. However, for convenience
-in creating "system packages" of setuptools-based projects, you can also
-use this option:
-
-``--single-version-externally-managed``
- This boolean option tells the ``install`` command to perform an "old style"
- installation, with the addition of an ``.egg-info`` directory so that the
- installed project will still have its metadata available and operate
- normally. If you use this option, you *must* also specify the ``--root``
- or ``--record`` options (or both), because otherwise you will have no way
- to identify and remove the installed files.
-
-This option is automatically in effect when ``install`` is invoked by another
-distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm``
-will create system packages of eggs. It is also automatically in effect if
-you specify the ``--root`` option.
-
-
-``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages``
-==============================================================================
-
-Setuptools runs this command as part of ``install`` operations that use the
-``--single-version-externally-managed`` options. You should not invoke it
-directly; it is documented here for completeness and so that distutils
-extensions such as system package builders can make use of it. This command
-has only one option:
-
-``--install-dir=DIR, -d DIR``
- The parent directory where the ``.egg-info`` directory will be placed.
- Defaults to the same as the ``--install-dir`` option specified for the
- ``install_lib`` command, which is usually the system ``site-packages``
- directory.
-
-This command assumes that the ``egg_info`` command has been given valid options
-via the command line or ``setup.cfg``, as it will invoke the ``egg_info``
-command and use its options to locate the project's source ``.egg-info``
-directory.
-
-
-.. _rotate:
-
-``rotate`` - Delete outdated distribution files
-===============================================
-
-As you develop new versions of your project, your distribution (``dist``)
-directory will gradually fill up with older source and/or binary distribution
-files. The ``rotate`` command lets you automatically clean these up, keeping
-only the N most-recently modified files matching a given pattern.
-
-``--match=PATTERNLIST, -m PATTERNLIST``
- Comma-separated list of glob patterns to match. This option is *required*.
- The project name and ``-*`` is prepended to the supplied patterns, in order
- to match only distributions belonging to the current project (in case you
- have a shared distribution directory for multiple projects). Typically,
- you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
- the specified type. Note that each supplied pattern is treated as a
- distinct group of files for purposes of selecting files to delete.
-
-``--keep=COUNT, -k COUNT``
- Number of matching distributions to keep. For each group of files
- identified by a pattern specified with the ``--match`` option, delete all
- but the COUNT most-recently-modified files in that group. This option is
- *required*.
-
-``--dist-dir=DIR, -d DIR``
- Directory where the distributions are. This defaults to the value of the
- ``bdist`` command's ``--dist-dir`` option, which will usually be the
- project's ``dist`` subdirectory.
-
-**Example 1**: Delete all .tar.gz files from the distribution directory, except
-for the 3 most recently modified ones::
-
- setup.py rotate --match=.tar.gz --keep=3
-
-**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
-directory, except the most recently modified one for each Python version::
-
- setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
-
-
-.. _saveopts:
-
-``saveopts`` - Save used options to a configuration file
-========================================================
-
-Finding and editing ``distutils`` configuration files can be a pain, especially
-since you also have to translate the configuration options from command-line
-form to the proper configuration file format. You can avoid these hassles by
-using the ``saveopts`` command. Just add it to the command line to save the
-options you used. For example, this command builds the project using
-the ``mingw32`` C compiler, then saves the --compiler setting as the default
-for future builds (even those run implicitly by the ``install`` command)::
-
- setup.py build --compiler=mingw32 saveopts
-
-The ``saveopts`` command saves all options for every command specified on the
-command line to the project's local ``setup.cfg`` file, unless you use one of
-the `configuration file options`_ to change where the options are saved. For
-example, this command does the same as above, but saves the compiler setting
-to the site-wide (global) distutils configuration::
-
- setup.py build --compiler=mingw32 saveopts -g
-
-Note that it doesn't matter where you place the ``saveopts`` command on the
-command line; it will still save all the options specified for all commands.
-For example, this is another valid way to spell the last example::
-
- setup.py saveopts -g build --compiler=mingw32
-
-Note, however, that all of the commands specified are always run, regardless of
-where ``saveopts`` is placed on the command line.
-
-
-Configuration File Options
---------------------------
-
-Normally, settings such as options and aliases are saved to the project's
-local ``setup.cfg`` file. But you can override this and save them to the
-global or per-user configuration files, or to a manually-specified filename.
-
-``--global-config, -g``
- Save settings to the global ``distutils.cfg`` file inside the ``distutils``
- package directory. You must have write access to that directory to use
- this option. You also can't combine this option with ``-u`` or ``-f``.
-
-``--user-config, -u``
- Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
- ``$HOME/pydistutils.cfg`` (Windows) file. You can't combine this option
- with ``-g`` or ``-f``.
-
-``--filename=FILENAME, -f FILENAME``
- Save settings to the specified configuration file to use. You can't
- combine this option with ``-g`` or ``-u``. Note that if you specify a
- non-standard filename, the ``distutils`` and ``setuptools`` will not
- use the file's contents. This option is mainly included for use in
- testing.
-
-These options are used by other ``setuptools`` commands that modify
-configuration files, such as the `alias`_ and `setopt`_ commands.
-
-
-.. _setopt:
-
-``setopt`` - Set a distutils or setuptools option in a config file
-==================================================================
-
-This command is mainly for use by scripts, but it can also be used as a quick
-and dirty way to change a distutils configuration option without having to
-remember what file the options are in and then open an editor.
-
-**Example 1**. Set the default C compiler to ``mingw32`` (using long option
-names)::
-
- setup.py setopt --command=build --option=compiler --set-value=mingw32
-
-**Example 2**. Remove any setting for the distutils default package
-installation directory (short option names)::
-
- setup.py setopt -c install -o install_lib -r
-
-
-Options for the ``setopt`` command:
-
-``--command=COMMAND, -c COMMAND``
- Command to set the option for. This option is required.
-
-``--option=OPTION, -o OPTION``
- The name of the option to set. This option is required.
-
-``--set-value=VALUE, -s VALUE``
- The value to set the option to. Not needed if ``-r`` or ``--remove`` is
- set.
-
-``--remove, -r``
- Remove (unset) the option, instead of setting it.
-
-In addition to the above options, you may use any of the `configuration file
-options`_ (listed under the `saveopts`_ command, above) to determine which
-distutils configuration file the option will be added to (or removed from).
-
-
-.. _test:
-
-``test`` - Build package and run a unittest suite
-=================================================
-
-When doing test-driven development, or running automated builds that need
-testing before they are deployed for downloading or use, it's often useful
-to be able to run a project's unit tests without actually deploying the project
-anywhere, even using the ``develop`` command. The ``test`` command runs a
-project's unit tests without actually deploying it, by temporarily putting the
-project's source on ``sys.path``, after first running ``build_ext -i`` and
-``egg_info`` to ensure that any C extensions and project metadata are
-up-to-date.
-
-To use this command, your project's tests must be wrapped in a ``unittest``
-test suite by either a function, a ``TestCase`` class or method, or a module
-or package containing ``TestCase`` classes. If the named suite is a module,
-and the module has an ``additional_tests()`` function, it is called and the
-result (which must be a ``unittest.TestSuite``) is added to the tests to be
-run. If the named suite is a package, any submodules and subpackages are
-recursively added to the overall test suite. (Note: if your project specifies
-a ``test_loader``, the rules for processing the chosen ``test_suite`` may
-differ; see the `test_loader`_ documentation for more details.)
-
-Note that many test systems including ``doctest`` support wrapping their
-non-``unittest`` tests in ``TestSuite`` objects. So, if you are using a test
-package that does not support this, we suggest you encourage its developers to
-implement test suite support, as this is a convenient and standard way to
-aggregate a collection of tests to be run under a common test harness.
-
-By default, tests will be run in the "verbose" mode of the ``unittest``
-package's text test runner, but you can get the "quiet" mode (just dots) if
-you supply the ``-q`` or ``--quiet`` option, either as a global option to
-the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
-command itself (e.g. ``setup.py test -q``). There is one other option
-available:
-
-``--test-suite=NAME, -s NAME``
- Specify the test suite (or module, class, or method) to be run
- (e.g. ``some_module.test_suite``). The default for this option can be
- set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
-
- setup(
- # ...
- test_suite="my_package.tests.test_all"
- )
-
- If you did not set a ``test_suite`` in your ``setup()`` call, and do not
- provide a ``--test-suite`` option, an error will occur.
-
-
-.. _upload:
-
-``upload`` - Upload source and/or egg distributions to PyPI
-===========================================================
-
-The ``upload`` command is implemented and `documented
-<https://docs.python.org/3.1/distutils/uploading.html>`_
-in distutils.
-
-Setuptools augments the ``upload`` command with support
-for `keyring <https://pypi.org/project/keyring/>`_,
-allowing the password to be stored in a secure
-location and not in plaintext in the .pypirc file. To use
-keyring, first install keyring and set the password for
-the relevant repository, e.g.::
-
- python -m keyring set <repository> <username>
- Password for '<username>' in '<repository>': ********
-
-Then, in .pypirc, set the repository configuration as normal,
-but omit the password. Thereafter, uploads will use the
-password from the keyring.
-
-New in 20.1: Added keyring support.
-
-
------------------------------------------
-Configuring setup() using setup.cfg files
------------------------------------------
-
-.. note:: New in 30.3.0 (8 Dec 2016).
-
-.. important::
- A ``setup.py`` file containing a ``setup()`` function call is still
- required even if your configuration resides in ``setup.cfg``.
-
-``Setuptools`` allows using configuration files (usually :file:`setup.cfg`)
-to define a package’s metadata and other options that are normally supplied
-to the ``setup()`` function.
-
-This approach not only allows automation scenarios but also reduces
-boilerplate code in some cases.
-
-.. note::
-
- This implementation has limited compatibility with the distutils2-like
- ``setup.cfg`` sections used by the ``pbr`` and ``d2to1`` packages.
-
- Namely: only metadata-related keys from ``metadata`` section are supported
- (except for ``description-file``); keys from ``files``, ``entry_points``
- and ``backwards_compat`` are not supported.
-
-
-.. code-block:: ini
-
- [metadata]
- name = my_package
- version = attr: src.VERSION
- description = My package description
- long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst
- keywords = one, two
- license = BSD 3-Clause License
- classifiers =
- Framework :: Django
- Programming Language :: Python :: 3
- Programming Language :: Python :: 3.5
-
- [options]
- zip_safe = False
- include_package_data = True
- packages = find:
- scripts =
- bin/first.py
- bin/second.py
-
- [options.package_data]
- * = *.txt, *.rst
- hello = *.msg
-
- [options.extras_require]
- pdf = ReportLab>=1.2; RXP
- rest = docutils>=0.3; pack ==1.1, ==1.3
-
- [options.packages.find]
- exclude =
- src.subpackage1
- src.subpackage2
-
-
-Metadata and options are set in the config sections of the same name.
-
-* Keys are the same as the keyword arguments one provides to the ``setup()``
- function.
-
-* Complex values can be written comma-separated or placed one per line
- in *dangling* config values. The following are equivalent:
-
- .. code-block:: ini
-
- [metadata]
- keywords = one, two
-
- [metadata]
- keywords =
- one
- two
-
-* In some cases, complex values can be provided in dedicated subsections for
- clarity.
-
-* Some keys allow ``file:``, ``attr:``, and ``find:`` directives in order to
- cover common usecases.
-
-* Unknown keys are ignored.
-
-
-Specifying values
-=================
-
-Some values are treated as simple strings, some allow more logic.
-
-Type names used below:
-
-* ``str`` - simple string
-* ``list-comma`` - dangling list or string of comma-separated values
-* ``list-semi`` - dangling list or string of semicolon-separated values
-* ``bool`` - ``True`` is 1, yes, true
-* ``dict`` - list-comma where keys are separated from values by ``=``
-* ``section`` - values are read from a dedicated (sub)section
-
-
-Special directives:
-
-* ``attr:`` - Value is read from a module attribute. ``attr:`` supports
- callables and iterables; unsupported types are cast using ``str()``.
-* ``file:`` - Value is read from a list of files and then concatenated
-
-
-.. note::
- The ``file:`` directive is sandboxed and won't reach anything outside
- the directory containing ``setup.py``.
-
-
-Metadata
---------
-
-.. note::
- The aliases given below are supported for compatibility reasons,
- but their use is not advised.
-
-============================== ================= =====
-Key Aliases Type
-============================== ================= =====
-name str
-version attr:, str
-url home-page str
-download_url download-url str
-project_urls dict
-author str
-author_email author-email str
-maintainer str
-maintainer_email maintainer-email str
-classifiers classifier file:, list-comma
-license file:, str
-description summary file:, str
-long_description long-description file:, str
-long_description_content_type str
-keywords list-comma
-platforms platform list-comma
-provides list-comma
-requires list-comma
-obsoletes list-comma
-============================== ================= =====
-
-
-Options
--------
-
-======================= =====
-Key Type
-======================= =====
-zip_safe bool
-setup_requires list-semi
-install_requires list-semi
-extras_require section
-python_requires str
-entry_points file:, section
-use_2to3 bool
-use_2to3_fixers list-comma
-use_2to3_exclude_fixers list-comma
-convert_2to3_doctests list-comma
-scripts list-comma
-eager_resources list-comma
-dependency_links list-comma
-tests_require list-semi
-include_package_data bool
-packages find:, list-comma
-package_dir dict
-package_data section
-exclude_package_data section
-namespace_packages list-comma
-py_modules list-comma
-======================= =====
-
-.. note::
-
- **packages** - The ``find:`` directive can be further configured
- in a dedicated subsection ``options.packages.find``. This subsection
- accepts the same keys as the `setuptools.find` function:
- ``where``, ``include``, and ``exclude``.
-
-
-Configuration API
-=================
-
-Some automation tools may wish to access data from a configuration file.
-
-``Setuptools`` exposes a ``read_configuration()`` function for
-parsing ``metadata`` and ``options`` sections into a dictionary.
-
-
-.. code-block:: python
-
- from setuptools.config import read_configuration
-
- conf_dict = read_configuration('/home/user/dev/package/setup.cfg')
-
-
-By default, ``read_configuration()`` will read only the file provided
-in the first argument. To include values from other configuration files
-which could be in various places, set the ``find_others`` keyword argument
-to ``True``.
-
-If you have only a configuration file but not the whole package, you can still
-try to get data out of it with the help of the ``ignore_option_errors`` keyword
-argument. When it is set to ``True``, all options with errors possibly produced
-by directives, such as ``attr:`` and others, will be silently ignored.
-As a consequence, the resulting dictionary will include no such options.
-
-
---------------------------------
-Extending and Reusing Setuptools
---------------------------------
-
-Creating ``distutils`` Extensions
-=================================
-
-It can be hard to add new commands or setup arguments to the distutils. But
-the ``setuptools`` package makes it a bit easier, by allowing you to distribute
-a distutils extension as a separate project, and then have projects that need
-the extension just refer to it in their ``setup_requires`` argument.
-
-With ``setuptools``, your distutils extension projects can hook in new
-commands and ``setup()`` arguments just by defining "entry points". These
-are mappings from command or argument names to a specification of where to
-import a handler from. (See the section on `Dynamic Discovery of Services and
-Plugins`_ above for some more background on entry points.)
-
-
-Adding Commands
----------------
-
-You can add new ``setup`` commands by defining entry points in the
-``distutils.commands`` group. For example, if you wanted to add a ``foo``
-command, you might add something like this to your distutils extension
-project's setup script::
-
- setup(
- # ...
- entry_points={
- "distutils.commands": [
- "foo = mypackage.some_module:foo",
- ],
- },
- )
-
-(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
-a ``setuptools.Command`` subclass.)
-
-Once a project containing such entry points has been activated on ``sys.path``,
-(e.g. by running "install" or "develop" with a site-packages installation
-directory) the command(s) will be available to any ``setuptools``-based setup
-scripts. It is not necessary to use the ``--command-packages`` option or
-to monkeypatch the ``distutils.command`` package to install your commands;
-``setuptools`` automatically adds a wrapper to the distutils to search for
-entry points in the active distributions on ``sys.path``. In fact, this is
-how setuptools' own commands are installed: the setuptools project's setup
-script defines entry points for them!
-
-
-Adding ``setup()`` Arguments
-----------------------------
-
-Sometimes, your commands may need additional arguments to the ``setup()``
-call. You can enable this by defining entry points in the
-``distutils.setup_keywords`` group. For example, if you wanted a ``setup()``
-argument called ``bar_baz``, you might add something like this to your
-distutils extension project's setup script::
-
- setup(
- # ...
- entry_points={
- "distutils.commands": [
- "foo = mypackage.some_module:foo",
- ],
- "distutils.setup_keywords": [
- "bar_baz = mypackage.some_module:validate_bar_baz",
- ],
- },
- )
-
-The idea here is that the entry point defines a function that will be called
-to validate the ``setup()`` argument, if it's supplied. The ``Distribution``
-object will have the initial value of the attribute set to ``None``, and the
-validation function will only be called if the ``setup()`` call sets it to
-a non-None value. Here's an example validation function::
-
- def assert_bool(dist, attr, value):
- """Verify that value is True, False, 0, or 1"""
- if bool(value) != value:
- raise DistutilsSetupError(
- "%r must be a boolean value (got %r)" % (attr,value)
- )
-
-Your function should accept three arguments: the ``Distribution`` object,
-the attribute name, and the attribute value. It should raise a
-``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
-is invalid. Remember, your function will only be called with non-None values,
-and the default value of arguments defined this way is always None. So, your
-commands should always be prepared for the possibility that the attribute will
-be ``None`` when they access it later.
-
-If more than one active distribution defines an entry point for the same
-``setup()`` argument, *all* of them will be called. This allows multiple
-distutils extensions to define a common argument, as long as they agree on
-what values of that argument are valid.
-
-Also note that as with commands, it is not necessary to subclass or monkeypatch
-the distutils ``Distribution`` class in order to add your arguments; it is
-sufficient to define the entry points in your extension, as long as any setup
-script using your extension lists your project in its ``setup_requires``
-argument.
-
-
-Adding new EGG-INFO Files
--------------------------
-
-Some extensible applications or frameworks may want to allow third parties to
-develop plugins with application or framework-specific metadata included in
-the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
-metadata API. The easiest way to allow this is to create a distutils extension
-to be used from the plugin projects' setup scripts (via ``setup_requires``)
-that defines a new setup keyword, and then uses that data to write an EGG-INFO
-file when the ``egg_info`` command is run.
-
-The ``egg_info`` command looks for extension points in an ``egg_info.writers``
-group, and calls them to write the files. Here's a simple example of a
-distutils extension defining a setup argument ``foo_bar``, which is a list of
-lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
-project that uses the argument::
-
- setup(
- # ...
- entry_points={
- "distutils.setup_keywords": [
- "foo_bar = setuptools.dist:assert_string_list",
- ],
- "egg_info.writers": [
- "foo_bar.txt = setuptools.command.egg_info:write_arg",
- ],
- },
- )
-
-This simple example makes use of two utility functions defined by setuptools
-for its own use: a routine to validate that a setup keyword is a sequence of
-strings, and another one that looks up a setup argument and writes it to
-a file. Here's what the writer utility looks like::
-
- def write_arg(cmd, basename, filename):
- argname = os.path.splitext(basename)[0]
- value = getattr(cmd.distribution, argname, None)
- if value is not None:
- value = '\n'.join(value) + '\n'
- cmd.write_or_delete_file(argname, filename, value)
-
-As you can see, ``egg_info.writers`` entry points must be a function taking
-three arguments: a ``egg_info`` command instance, the basename of the file to
-write (e.g. ``foo_bar.txt``), and the actual full filename that should be
-written to.
-
-In general, writer functions should honor the command object's ``dry_run``
-setting when writing files, and use the ``distutils.log`` object to do any
-console output. The easiest way to conform to this requirement is to use
-the ``cmd`` object's ``write_file()``, ``delete_file()``, and
-``write_or_delete_file()`` methods exclusively for your file operations. See
-those methods' docstrings for more details.
-
-
-Adding Support for Revision Control Systems
--------------------------------------------------
-
-If the files you want to include in the source distribution are tracked using
-Git, Mercurial or SVN, you can use the following packages to achieve that:
-
-- Git and Mercurial: `setuptools_scm <https://pypi.org/project/setuptools_scm/>`_
-- SVN: `setuptools_svn <https://pypi.org/project/setuptools_svn/>`_
-
-If you would like to create a plugin for ``setuptools`` to find files tracked
-by another revision control system, you can do so by adding an entry point to
-the ``setuptools.file_finders`` group. The entry point should be a function
-accepting a single directory name, and should yield all the filenames within
-that directory (and any subdirectories thereof) that are under revision
-control.
-
-For example, if you were going to create a plugin for a revision control system
-called "foobar", you would write a function something like this:
-
-.. code-block:: python
-
- def find_files_for_foobar(dirname):
- # loop to yield paths that start with `dirname`
-
-And you would register it in a setup script using something like this::
-
- entry_points={
- "setuptools.file_finders": [
- "foobar = my_foobar_module:find_files_for_foobar",
- ]
- }
-
-Then, anyone who wants to use your plugin can simply install it, and their
-local setuptools installation will be able to find the necessary files.
-
-It is not necessary to distribute source control plugins with projects that
-simply use the other source control system, or to specify the plugins in
-``setup_requires``. When you create a source distribution with the ``sdist``
-command, setuptools automatically records what files were found in the
-``SOURCES.txt`` file. That way, recipients of source distributions don't need
-to have revision control at all. However, if someone is working on a package
-by checking out with that system, they will need the same plugin(s) that the
-original author is using.
-
-A few important points for writing revision control file finders:
-
-* Your finder function MUST return relative paths, created by appending to the
- passed-in directory name. Absolute paths are NOT allowed, nor are relative
- paths that reference a parent directory of the passed-in directory.
-
-* Your finder function MUST accept an empty string as the directory name,
- meaning the current directory. You MUST NOT convert this to a dot; just
- yield relative paths. So, yielding a subdirectory named ``some/dir`` under
- the current directory should NOT be rendered as ``./some/dir`` or
- ``/somewhere/some/dir``, but *always* as simply ``some/dir``
-
-* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
- with the absence of needed programs (i.e., ones belonging to the revision
- control system itself. It *may*, however, use ``distutils.log.warn()`` to
- inform the user of the missing program(s).
-
-
-Subclassing ``Command``
------------------------
-
-Sorry, this section isn't written yet, and neither is a lot of what's below
-this point.
-
-XXX
-
-
-Reusing ``setuptools`` Code
-===========================
-
-``ez_setup``
-------------
-
-XXX
-
-
-``setuptools.archive_util``
----------------------------
-
-XXX
-
-
-``setuptools.sandbox``
-----------------------
-
-XXX
-
-
-``setuptools.package_index``
-----------------------------
-
-XXX
-
-
-Mailing List and Bug Tracker
-============================
-
-Please use the `distutils-sig mailing list`_ for questions and discussion about
-setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
-confirmed via the list are actual bugs, and which you have reduced to a minimal
-set of steps to reproduce.
-
-.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
-.. _setuptools bug tracker: https://github.com/pypa/setuptools/
diff --git a/docs/userguide/commands.rst b/docs/userguide/commands.rst
new file mode 100644
index 0000000..e632e55
--- /dev/null
+++ b/docs/userguide/commands.rst
@@ -0,0 +1,566 @@
+-----------------
+Command Reference
+-----------------
+
+.. _alias:
+
+``alias`` - Define shortcuts for commonly used commands
+=======================================================
+
+Sometimes, you need to use the same commands over and over, but you can't
+necessarily set them as defaults. For example, if you produce both development
+snapshot releases and "stable" releases of a project, you may want to put
+the distributions in different places, or use different ``egg_info`` tagging
+options, etc. In these cases, it doesn't make sense to set the options in
+a distutils configuration file, because the values of the options changed based
+on what you're trying to do.
+
+Setuptools therefore allows you to define "aliases" - shortcut names for
+an arbitrary string of commands and options, using ``setup.py alias aliasname
+expansion``, where aliasname is the name of the new alias, and the remainder of
+the command line supplies its expansion. For example, this command defines
+a sitewide alias called "daily", that sets various ``egg_info`` tagging
+options::
+
+ setup.py alias --global-config daily egg_info --tag-build=development
+
+Once the alias is defined, it can then be used with other setup commands,
+e.g.::
+
+ setup.py daily bdist_egg # generate a daily-build .egg file
+ setup.py daily sdist # generate a daily-build source distro
+ setup.py daily sdist bdist_egg # generate both
+
+The above commands are interpreted as if the word ``daily`` were replaced with
+``egg_info --tag-build=development``.
+
+Note that setuptools will expand each alias *at most once* in a given command
+line. This serves two purposes. First, if you accidentally create an alias
+loop, it will have no effect; you'll instead get an error message about an
+unknown command. Second, it allows you to define an alias for a command, that
+uses that command. For example, this (project-local) alias::
+
+ setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
+
+redefines the ``bdist_egg`` command so that it always runs the ``rotate``
+command afterwards to delete all but the newest egg file. It doesn't loop
+indefinitely on ``bdist_egg`` because the alias is only expanded once when
+used.
+
+You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
+
+ setup.py alias --global-config --remove daily
+
+would delete the "daily" alias we defined above.
+
+Aliases can be defined on a project-specific, per-user, or sitewide basis. The
+default is to define or remove a project-specific alias, but you can use any of
+the `configuration file options`_ (listed under the `saveopts`_ command, below)
+to determine which distutils configuration file an aliases will be added to
+(or removed from).
+
+Note that if you omit the "expansion" argument to the ``alias`` command,
+you'll get output showing that alias' current definition (and what
+configuration file it's defined in). If you omit the alias name as well,
+you'll get a listing of all current aliases along with their configuration
+file locations.
+
+
+``bdist_egg`` - Create a Python Egg for the project
+===================================================
+
+.. warning::
+ **eggs** are deprecated in favor of wheels, and not supported by pip.
+
+This command generates a Python Egg (``.egg`` file) for the project. Python
+Eggs are the preferred binary distribution format for EasyInstall, because they
+are cross-platform (for "pure" packages), directly importable, and contain
+project metadata including scripts and information about the project's
+dependencies. They can be simply downloaded and added to ``sys.path``
+directly, or they can be placed in a directory on ``sys.path`` and then
+automatically discovered by the egg runtime system.
+
+This command runs the `egg_info`_ command (if it hasn't already run) to update
+the project's metadata (``.egg-info``) directory. If you have added any extra
+metadata files to the ``.egg-info`` directory, those files will be included in
+the new egg file's metadata directory, for use by the egg runtime system or by
+any applications or frameworks that use that metadata.
+
+You won't usually need to specify any special options for this command; just
+use ``bdist_egg`` and you're done. But there are a few options that may
+be occasionally useful:
+
+``--dist-dir=DIR, -d DIR``
+ Set the directory where the ``.egg`` file will be placed. If you don't
+ supply this, then the ``--dist-dir`` setting of the ``bdist`` command
+ will be used, which is usually a directory named ``dist`` in the project
+ directory.
+
+``--plat-name=PLATFORM, -p PLATFORM``
+ Set the platform name string that will be embedded in the egg's filename
+ (assuming the egg contains C extensions). This can be used to override
+ the distutils default platform name with something more meaningful. Keep
+ in mind, however, that the egg runtime system expects to see eggs with
+ distutils platform names, so it may ignore or reject eggs with non-standard
+ platform names. Similarly, the EasyInstall program may ignore them when
+ searching web pages for download links. However, if you are
+ cross-compiling or doing some other unusual things, you might find a use
+ for this option.
+
+``--exclude-source-files``
+ Don't include any modules' ``.py`` files in the egg, just compiled Python,
+ C, and data files. (Note that this doesn't affect any ``.py`` files in the
+ EGG-INFO directory or its subdirectories, since for example there may be
+ scripts with a ``.py`` extension which must still be retained.) We don't
+ recommend that you use this option except for packages that are being
+ bundled for proprietary end-user applications, or for "embedded" scenarios
+ where space is at an absolute premium. On the other hand, if your package
+ is going to be installed and used in compressed form, you might as well
+ exclude the source because Python's ``traceback`` module doesn't currently
+ understand how to display zipped source code anyway, or how to deal with
+ files that are in a different place from where their code was compiled.
+
+There are also some options you will probably never need, but which are there
+because they were copied from similar ``bdist`` commands used as an example for
+creating this one. They may be useful for testing and debugging, however,
+which is why we kept them:
+
+``--keep-temp, -k``
+ Keep the contents of the ``--bdist-dir`` tree around after creating the
+ ``.egg`` file.
+
+``--bdist-dir=DIR, -b DIR``
+ Set the temporary directory for creating the distribution. The entire
+ contents of this directory are zipped to create the ``.egg`` file, after
+ running various installation commands to copy the package's modules, data,
+ and extensions here.
+
+``--skip-build``
+ Skip doing any "build" commands; just go straight to the
+ install-and-compress phases.
+
+
+.. _develop:
+
+``develop`` - Deploy the project source in "Development Mode"
+=============================================================
+
+This command allows you to deploy your project's source for use in one or more
+"staging areas" where it will be available for importing. This deployment is
+done in such a way that changes to the project source are immediately available
+in the staging area(s), without needing to run a build or install step after
+each change.
+
+The ``develop`` command works by creating an ``.egg-link`` file (named for the
+project) in the given staging area. If the staging area is Python's
+``site-packages`` directory, it also updates an ``easy-install.pth`` file so
+that the project is on ``sys.path`` by default for all programs run using that
+Python installation.
+
+The ``develop`` command also installs wrapper scripts in the staging area (or
+a separate directory, as specified) that will ensure the project's dependencies
+are available on ``sys.path`` before running the project's source scripts.
+And, it ensures that any missing project dependencies are available in the
+staging area, by downloading and installing them if necessary.
+
+Last, but not least, the ``develop`` command invokes the ``build_ext -i``
+command to ensure any C extensions in the project have been built and are
+up-to-date, and the ``egg_info`` command to ensure the project's metadata is
+updated (so that the runtime and wrappers know what the project's dependencies
+are). If you make any changes to the project's setup script or C extensions,
+you should rerun the ``develop`` command against all relevant staging areas to
+keep the project's scripts, metadata and extensions up-to-date. Most other
+kinds of changes to your project should not require any build operations or
+rerunning ``develop``, but keep in mind that even minor changes to the setup
+script (e.g. changing an entry point definition) require you to re-run the
+``develop`` or ``test`` commands to keep the distribution updated.
+
+Here are some of the options that the ``develop`` command accepts. Note that
+they affect the project's dependencies as well as the project itself, so if you
+have dependencies that need to be installed and you use ``--exclude-scripts``
+(for example), the dependencies' scripts will not be installed either! For
+this reason, you may want to use pip to install the project's dependencies
+before using the ``develop`` command, if you need finer control over the
+installation options for dependencies.
+
+``--uninstall, -u``
+ Un-deploy the current project. You may use the ``--install-dir`` or ``-d``
+ option to designate the staging area. The created ``.egg-link`` file will
+ be removed, if present and it is still pointing to the project directory.
+ The project directory will be removed from ``easy-install.pth`` if the
+ staging area is Python's ``site-packages`` directory.
+
+ Note that this option currently does *not* uninstall script wrappers! You
+ must uninstall them yourself, or overwrite them by using pip to install a
+ different version of the package. You can also avoid installing script
+ wrappers in the first place, if you use the ``--exclude-scripts`` (aka
+ ``-x``) option when you run ``develop`` to deploy the project.
+
+``--multi-version, -m``
+ "Multi-version" mode. Specifying this option prevents ``develop`` from
+ adding an ``easy-install.pth`` entry for the project(s) being deployed, and
+ if an entry for any version of a project already exists, the entry will be
+ removed upon successful deployment. In multi-version mode, no specific
+ version of the package is available for importing, unless you use
+ ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
+ a wrapper script generated by ``setuptools``. (In which case the wrapper
+ script calls ``require()`` for you.)
+
+ Note that if you install to a directory other than ``site-packages``,
+ this option is automatically in effect, because ``.pth`` files can only be
+ used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
+ the ``--install-dir`` or ``-d`` option (or they are set via configuration
+ file(s)) your project and its dependencies will be deployed in multi-
+ version mode.
+
+``--install-dir=DIR, -d DIR``
+ Set the installation directory (staging area). If this option is not
+ directly specified on the command line or in a distutils configuration
+ file, the distutils default installation location is used. Normally, this
+ will be the ``site-packages`` directory, but if you are using distutils
+ configuration files, setting things like ``prefix`` or ``install_lib``,
+ then those settings are taken into account when computing the default
+ staging area.
+
+``--script-dir=DIR, -s DIR``
+ Set the script installation directory. If you don't supply this option
+ (via the command line or a configuration file), but you *have* supplied
+ an ``--install-dir`` (via command line or config file), then this option
+ defaults to the same directory, so that the scripts will be able to find
+ their associated package installation. Otherwise, this setting defaults
+ to the location where the distutils would normally install scripts, taking
+ any distutils configuration file settings into account.
+
+``--exclude-scripts, -x``
+ Don't deploy script wrappers. This is useful if you don't want to disturb
+ existing versions of the scripts in the staging area.
+
+``--always-copy, -a``
+ Copy all needed distributions to the staging area, even if they
+ are already present in another directory on ``sys.path``. By default, if
+ a requirement can be met using a distribution that is already available in
+ a directory on ``sys.path``, it will not be copied to the staging area.
+
+``--egg-path=DIR``
+ Force the generated ``.egg-link`` file to use a specified relative path
+ to the source directory. This can be useful in circumstances where your
+ installation directory is being shared by code running under multiple
+ platforms (e.g. Mac and Windows) which have different absolute locations
+ for the code under development, but the same *relative* locations with
+ respect to the installation directory. If you use this option when
+ installing, you must supply the same relative path when uninstalling.
+
+In addition to the above options, the ``develop`` command also accepts all of
+the same options accepted by ``easy_install``. If you've configured any
+``easy_install`` settings in your ``setup.cfg`` (or other distutils config
+files), the ``develop`` command will use them as defaults, unless you override
+them in a ``[develop]`` section or on the command line.
+
+
+.. _egg_info:
+
+``egg_info`` - Create egg metadata and set build tags
+=====================================================
+
+This command performs two operations: it updates a project's ``.egg-info``
+metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
+commands), and it allows you to temporarily change a project's version string,
+to support "daily builds" or "snapshot" releases. It is run automatically by
+the ``sdist``, ``bdist_egg``, ``develop``, and ``test`` commands in order to
+update the project's metadata, but you can also specify it explicitly in order
+to temporarily change the project's version string while executing other
+commands. (It also generates the ``.egg-info/SOURCES.txt`` manifest file, which
+is used when you are building source distributions.)
+
+In addition to writing the core egg metadata defined by ``setuptools`` and
+required by ``pkg_resources``, this command can be extended to write other
+metadata files as well, by defining entry points in the ``egg_info.writers``
+group. See the section on :ref:`Adding new EGG-INFO Files` below for more details.
+Note that using additional metadata writers may require you to include a
+``setup_requires`` argument to ``setup()`` in order to ensure that the desired
+writers are available on ``sys.path``.
+
+
+Release Tagging Options
+-----------------------
+
+The following options can be used to modify the project's version string for
+all remaining commands on the setup command line. The options are processed
+in the order shown, so if you use more than one, the requested tags will be
+added in the following order:
+
+``--tag-build=NAME, -b NAME``
+ Append NAME to the project's version string. Due to the way setuptools
+ processes "pre-release" version suffixes beginning with the letters "a"
+ through "e" (like "alpha", "beta", and "candidate"), you will usually want
+ to use a tag like ".build" or ".dev", as this will cause the version number
+ to be considered *lower* than the project's default version. (If you
+ want to make the version number *higher* than the default version, you can
+ always leave off --tag-build and then use one or both of the following
+ options.)
+
+ If you have a default build tag set in your ``setup.cfg``, you can suppress
+ it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
+ to the ``egg_info`` command.
+
+``--tag-date, -d``
+ Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
+ project's version number.
+
+``--no-date, -D``
+ Don't include a date stamp in the version number. This option is included
+ so you can override a default setting in ``setup.cfg``.
+
+
+(Note: Because these options modify the version number used for source and
+binary distributions of your project, you should first make sure that you know
+how the resulting version numbers will be interpreted by automated tools
+like pip. See the section above on :ref:`Specifying Your Project's Version` for an
+explanation of pre- and post-release tags, as well as tips on how to choose and
+verify a versioning scheme for your project.)
+
+For advanced uses, there is one other option that can be set, to change the
+location of the project's ``.egg-info`` directory. Commands that need to find
+the project's source directory or metadata should get it from this setting:
+
+
+Other ``egg_info`` Options
+--------------------------
+
+``--egg-base=SOURCEDIR, -e SOURCEDIR``
+ Specify the directory that should contain the .egg-info directory. This
+ should normally be the root of your project's source tree (which is not
+ necessarily the same as your project directory; some projects use a ``src``
+ or ``lib`` subdirectory as the source root). You should not normally need
+ to specify this directory, as it is normally determined from the
+ ``package_dir`` argument to the ``setup()`` function, if any. If there is
+ no ``package_dir`` set, this option defaults to the current directory.
+
+
+``egg_info`` Examples
+---------------------
+
+Creating a dated "nightly build" snapshot egg::
+
+ setup.py egg_info --tag-date --tag-build=DEV bdist_egg
+
+Creating a release with no version tags, even if some default tags are
+specified in ``setup.cfg``::
+
+ setup.py egg_info -RDb "" sdist bdist_egg
+
+(Notice that ``egg_info`` must always appear on the command line *before* any
+commands that you want the version changes to apply to.)
+
+.. _rotate:
+
+``rotate`` - Delete outdated distribution files
+===============================================
+
+As you develop new versions of your project, your distribution (``dist``)
+directory will gradually fill up with older source and/or binary distribution
+files. The ``rotate`` command lets you automatically clean these up, keeping
+only the N most-recently modified files matching a given pattern.
+
+``--match=PATTERNLIST, -m PATTERNLIST``
+ Comma-separated list of glob patterns to match. This option is *required*.
+ The project name and ``-*`` is prepended to the supplied patterns, in order
+ to match only distributions belonging to the current project (in case you
+ have a shared distribution directory for multiple projects). Typically,
+ you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
+ the specified type. Note that each supplied pattern is treated as a
+ distinct group of files for purposes of selecting files to delete.
+
+``--keep=COUNT, -k COUNT``
+ Number of matching distributions to keep. For each group of files
+ identified by a pattern specified with the ``--match`` option, delete all
+ but the COUNT most-recently-modified files in that group. This option is
+ *required*.
+
+``--dist-dir=DIR, -d DIR``
+ Directory where the distributions are. This defaults to the value of the
+ ``bdist`` command's ``--dist-dir`` option, which will usually be the
+ project's ``dist`` subdirectory.
+
+**Example 1**: Delete all .tar.gz files from the distribution directory, except
+for the 3 most recently modified ones::
+
+ setup.py rotate --match=.tar.gz --keep=3
+
+**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
+directory, except the most recently modified one for each Python version::
+
+ setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
+
+
+.. _saveopts:
+
+``saveopts`` - Save used options to a configuration file
+========================================================
+
+Finding and editing ``distutils`` configuration files can be a pain, especially
+since you also have to translate the configuration options from command-line
+form to the proper configuration file format. You can avoid these hassles by
+using the ``saveopts`` command. Just add it to the command line to save the
+options you used. For example, this command builds the project using
+the ``mingw32`` C compiler, then saves the --compiler setting as the default
+for future builds (even those run implicitly by the ``install`` command)::
+
+ setup.py build --compiler=mingw32 saveopts
+
+The ``saveopts`` command saves all options for every command specified on the
+command line to the project's local ``setup.cfg`` file, unless you use one of
+the `configuration file options`_ to change where the options are saved. For
+example, this command does the same as above, but saves the compiler setting
+to the site-wide (global) distutils configuration::
+
+ setup.py build --compiler=mingw32 saveopts -g
+
+Note that it doesn't matter where you place the ``saveopts`` command on the
+command line; it will still save all the options specified for all commands.
+For example, this is another valid way to spell the last example::
+
+ setup.py saveopts -g build --compiler=mingw32
+
+Note, however, that all of the commands specified are always run, regardless of
+where ``saveopts`` is placed on the command line.
+
+
+Configuration File Options
+--------------------------
+
+Normally, settings such as options and aliases are saved to the project's
+local ``setup.cfg`` file. But you can override this and save them to the
+global or per-user configuration files, or to a manually-specified filename.
+
+``--global-config, -g``
+ Save settings to the global ``distutils.cfg`` file inside the ``distutils``
+ package directory. You must have write access to that directory to use
+ this option. You also can't combine this option with ``-u`` or ``-f``.
+
+``--user-config, -u``
+ Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
+ ``$HOME/pydistutils.cfg`` (Windows) file. You can't combine this option
+ with ``-g`` or ``-f``.
+
+``--filename=FILENAME, -f FILENAME``
+ Save settings to the specified configuration file to use. You can't
+ combine this option with ``-g`` or ``-u``. Note that if you specify a
+ non-standard filename, the ``distutils`` and ``setuptools`` will not
+ use the file's contents. This option is mainly included for use in
+ testing.
+
+These options are used by other ``setuptools`` commands that modify
+configuration files, such as the `alias`_ and `setopt`_ commands.
+
+
+.. _setopt:
+
+``setopt`` - Set a distutils or setuptools option in a config file
+==================================================================
+
+This command is mainly for use by scripts, but it can also be used as a quick
+and dirty way to change a distutils configuration option without having to
+remember what file the options are in and then open an editor.
+
+**Example 1**. Set the default C compiler to ``mingw32`` (using long option
+names)::
+
+ setup.py setopt --command=build --option=compiler --set-value=mingw32
+
+**Example 2**. Remove any setting for the distutils default package
+installation directory (short option names)::
+
+ setup.py setopt -c install -o install_lib -r
+
+
+Options for the ``setopt`` command:
+
+``--command=COMMAND, -c COMMAND``
+ Command to set the option for. This option is required.
+
+``--option=OPTION, -o OPTION``
+ The name of the option to set. This option is required.
+
+``--set-value=VALUE, -s VALUE``
+ The value to set the option to. Not needed if ``-r`` or ``--remove`` is
+ set.
+
+``--remove, -r``
+ Remove (unset) the option, instead of setting it.
+
+In addition to the above options, you may use any of the `configuration file
+options`_ (listed under the `saveopts`_ command, above) to determine which
+distutils configuration file the option will be added to (or removed from).
+
+
+.. _test:
+
+``test`` - Build package and run a unittest suite
+=================================================
+
+.. warning::
+ ``test`` is deprecated and will be removed in a future version. Users
+ looking for a generic test entry point independent of test runner are
+ encouraged to use `tox <https://tox.readthedocs.io>`_.
+
+When doing test-driven development, or running automated builds that need
+testing before they are deployed for downloading or use, it's often useful
+to be able to run a project's unit tests without actually deploying the project
+anywhere, even using the ``develop`` command. The ``test`` command runs a
+project's unit tests without actually deploying it, by temporarily putting the
+project's source on ``sys.path``, after first running ``build_ext -i`` and
+``egg_info`` to ensure that any C extensions and project metadata are
+up-to-date.
+
+To use this command, your project's tests must be wrapped in a ``unittest``
+test suite by either a function, a ``TestCase`` class or method, or a module
+or package containing ``TestCase`` classes. If the named suite is a module,
+and the module has an ``additional_tests()`` function, it is called and the
+result (which must be a ``unittest.TestSuite``) is added to the tests to be
+run. If the named suite is a package, any submodules and subpackages are
+recursively added to the overall test suite. (Note: if your project specifies
+a ``test_loader``, the rules for processing the chosen ``test_suite`` may
+differ; see the :ref:`test_loader <test_loader>` documentation for more details.)
+
+Note that many test systems including ``doctest`` support wrapping their
+non-``unittest`` tests in ``TestSuite`` objects. So, if you are using a test
+package that does not support this, we suggest you encourage its developers to
+implement test suite support, as this is a convenient and standard way to
+aggregate a collection of tests to be run under a common test harness.
+
+By default, tests will be run in the "verbose" mode of the ``unittest``
+package's text test runner, but you can get the "quiet" mode (just dots) if
+you supply the ``-q`` or ``--quiet`` option, either as a global option to
+the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
+command itself (e.g. ``setup.py test -q``). There is one other option
+available:
+
+``--test-suite=NAME, -s NAME``
+ Specify the test suite (or module, class, or method) to be run
+ (e.g. ``some_module.test_suite``). The default for this option can be
+ set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
+
+ setup(
+ # ...
+ test_suite="my_package.tests.test_all"
+ )
+
+ If you did not set a ``test_suite`` in your ``setup()`` call, and do not
+ provide a ``--test-suite`` option, an error will occur.
+
+New in 41.5.0: Deprecated the test command.
+
+
+.. _upload:
+
+``upload`` - Upload source and/or egg distributions to PyPI
+===========================================================
+
+The ``upload`` command was deprecated in version 40.0 and removed in version
+42.0. Use `twine <https://pypi.org/p/twine>`_ instead.
+
+For more information on the current best practices in uploading your packages
+to PyPI, see the Python Packaging User Guide's "Packaging Python Projects"
+tutorial specifically the section on `uploading the distribution archives
+<https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives>`_.
diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
new file mode 100644
index 0000000..9817e63
--- /dev/null
+++ b/docs/userguide/datafiles.rst
@@ -0,0 +1,206 @@
+====================
+Data Files Support
+====================
+
+The distutils have traditionally allowed installation of "data files", which
+are placed in a platform-specific location. However, the most common use case
+for data files distributed with a package is for use *by* the package, usually
+by including the data files **inside the package directory**.
+
+Setuptools offers three ways to specify this most common type of data files to
+be included in your package's [#datafiles]_.
+First, you can simply use the ``include_package_data`` keyword, e.g.::
+
+ from setuptools import setup, find_packages
+ setup(
+ ...
+ include_package_data=True
+ )
+
+This tells setuptools to install any data files it finds in your packages.
+The data files must be specified via the |MANIFEST.in|_ file.
+(They can also be tracked by a revision control system, using an appropriate
+plugin such as :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
+See the section below on :ref:`Adding Support for Revision
+Control Systems` for information on how to write such plugins.)
+
+If you want finer-grained control over what files are included (for example,
+if you have documentation files in your package directories and want to exclude
+them from installation), then you can also use the ``package_data`` keyword,
+e.g.::
+
+ from setuptools import setup, find_packages
+ setup(
+ ...
+ package_data={
+ # If any package contains *.txt or *.rst files, include them:
+ "": ["*.txt", "*.rst"],
+ # And include any *.msg files found in the "hello" package, too:
+ "hello": ["*.msg"],
+ }
+ )
+
+The ``package_data`` argument is a dictionary that maps from package names to
+lists of glob patterns. The globs may include subdirectory names, if the data
+files are contained in a subdirectory of the package. For example, if the
+package tree looks like this::
+
+ setup.py
+ src/
+ mypkg/
+ __init__.py
+ mypkg.txt
+ data/
+ somefile.dat
+ otherdata.dat
+
+The setuptools setup file might look like this::
+
+ from setuptools import setup, find_packages
+ setup(
+ ...
+ packages=find_packages("src"), # include all packages under src
+ package_dir={"": "src"}, # tell distutils packages are under src
+
+ package_data={
+ # If any package contains *.txt files, include them:
+ "": ["*.txt"],
+ # And include any *.dat files found in the "data" subdirectory
+ # of the "mypkg" package, also:
+ "mypkg": ["data/*.dat"],
+ }
+ )
+
+Notice that if you list patterns in ``package_data`` under the empty string,
+these patterns are used to find files in every package, even ones that also
+have their own patterns listed. Thus, in the above example, the ``mypkg.txt``
+file gets included even though it's not listed in the patterns for ``mypkg``.
+
+Also notice that if you use paths, you *must* use a forward slash (``/``) as
+the path separator, even if you are on Windows. Setuptools automatically
+converts slashes to appropriate platform-specific separators at build time.
+
+If datafiles are contained in a subdirectory of a package that isn't a package
+itself (no ``__init__.py``), then the subdirectory names (or ``*``) are required
+in the ``package_data`` argument (as shown above with ``"data/*.dat"``).
+
+When building an ``sdist``, the datafiles are also drawn from the
+``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if
+the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``.
+
+.. note::
+ If using the ``include_package_data`` argument, files specified by
+ ``package_data`` will *not* be automatically added to the manifest unless
+ they are listed in the |MANIFEST.in|_ file or by a plugin like
+ :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
+
+.. https://docs.python.org/3/distutils/setupscript.html#installing-package-data
+
+Sometimes, the ``include_package_data`` or ``package_data`` options alone
+aren't sufficient to precisely define what files you want included. For
+example, you may want to include package README files in your revision control
+system and source distributions, but exclude them from being installed. So,
+setuptools offers an ``exclude_package_data`` option as well, that allows you
+to do things like this::
+
+ from setuptools import setup, find_packages
+ setup(
+ ...
+ packages=find_packages("src"), # include all packages under src
+ package_dir={"": "src"}, # tell distutils packages are under src
+
+ include_package_data=True, # include everything in source control
+
+ # ...but exclude README.txt from all packages
+ exclude_package_data={"": ["README.txt"]},
+ )
+
+The ``exclude_package_data`` option is a dictionary mapping package names to
+lists of wildcard patterns, just like the ``package_data`` option. And, just
+as with that option, a key of ``""`` will apply the given pattern(s) to all
+packages. However, any files that match these patterns will be *excluded*
+from installation, even if they were listed in ``package_data`` or were
+included as a result of using ``include_package_data``.
+
+In summary, the three options allow you to:
+
+``include_package_data``
+ Accept all data files and directories matched by |MANIFEST.in|_ or added by
+ a :ref:`plugin <Adding Support for Revision Control Systems>`.
+
+``package_data``
+ Specify additional patterns to match files that may or may
+ not be matched by |MANIFEST.in|_ or added by
+ a :ref:`plugin <Adding Support for Revision Control Systems>`.
+
+``exclude_package_data``
+ Specify patterns for data files and directories that should *not* be
+ included when a package is installed, even if they would otherwise have
+ been included due to the use of the preceding options.
+
+NOTE: Due to the way the distutils build process works, a data file that you
+include in your project and then stop including may be "orphaned" in your
+project's build directories, requiring you to run ``setup.py clean --all`` to
+fully remove them. This may also be important for your users and contributors
+if they track intermediate revisions of your project using Subversion; be sure
+to let them know when you make changes that remove files from inclusion so they
+can run ``setup.py clean --all``.
+
+
+.. _Accessing Data Files at Runtime:
+
+Accessing Data Files at Runtime
+-------------------------------
+
+Typically, existing programs manipulate a package's ``__file__`` attribute in
+order to find the location of data files. However, this manipulation isn't
+compatible with PEP 302-based import hooks, including importing from zip files
+and Python Eggs. It is strongly recommended that, if you are using data files,
+you should use :mod:`importlib.resources` to access them.
+:mod:`importlib.resources` was added to Python 3.7 and the latest version of
+the library is also available via the :pypi:`importlib-resources` backport.
+See :doc:`importlib-resources:using` for detailed instructions [#importlib]_.
+
+.. tip:: Files inside the package directory should be *read-only* to avoid a
+ series of common problems (e.g. when multiple users share a common Python
+ installation, when the package is loaded from a zip file, or when multiple
+ instances of a Python application run in parallel).
+
+ If your Python package needs to write to a file for shared data or configuration,
+ you can use standard platform/OS-specific system directories, such as
+ ``~/.local/config/$appname`` or ``/usr/share/$appname/$version`` (Linux specific) [#system-dirs]_.
+ A common approach is to add a read-only template file to the package
+ directory that is then copied to the correct system directory if no
+ pre-existing file is found.
+
+
+Non-Package Data Files
+----------------------
+
+Historically, ``setuptools`` by way of ``easy_install`` would encapsulate data
+files from the distribution into the egg (see `the old docs
+<https://github.com/pypa/setuptools/blob/52aacd5b276fedd6849c3a648a0014f5da563e93/docs/setuptools.txt#L970-L1001>`_). As eggs are deprecated and pip-based installs
+fall back to the platform-specific location for installing data files, there is
+no supported facility to reliably retrieve these resources.
+
+Instead, the PyPA recommends that any data files you wish to be accessible at
+run time be included **inside the package**.
+
+
+----
+
+.. [#datafiles] ``setuptools`` consider a *package data file* any non-Python
+ file **inside the package directory** (i.e., that co-exists in the same
+ location as the regular ``.py`` files being distributed).
+
+.. [#system-dirs] These locations can be discovered with the help of
+ third-party libraries such as :pypi:`platformdirs`.
+
+.. [#importlib] Recent versions of :mod:`importlib.resources` available in
+ Pythons' standard library should be API compatible with
+ :pypi:`importlib-metadata`. However this might vary depending on which version
+ of Python is installed.
+
+
+.. |MANIFEST.in| replace:: ``MANIFEST.in``
+.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/
diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst
new file mode 100644
index 0000000..52379db
--- /dev/null
+++ b/docs/userguide/declarative_config.rst
@@ -0,0 +1,282 @@
+.. _declarative config:
+
+------------------------------------------------
+Configuring setuptools using ``setup.cfg`` files
+------------------------------------------------
+
+.. note:: New in 30.3.0 (8 Dec 2016).
+
+.. important::
+ If compatibility with legacy builds (i.e. those not using the :pep:`517`
+ build API) is desired, a ``setup.py`` file containing a ``setup()`` function
+ call is still required even if your configuration resides in ``setup.cfg``.
+
+``Setuptools`` allows using configuration files (usually :file:`setup.cfg`)
+to define a package’s metadata and other options that are normally supplied
+to the ``setup()`` function (declarative config).
+
+This approach not only allows automation scenarios but also reduces
+boilerplate code in some cases.
+
+.. _example-setup-config:
+
+.. code-block:: ini
+
+ [metadata]
+ name = my_package
+ version = attr: my_package.VERSION
+ description = My package description
+ long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst
+ keywords = one, two
+ license = BSD 3-Clause License
+ classifiers =
+ Framework :: Django
+ Programming Language :: Python :: 3
+
+ [options]
+ zip_safe = False
+ include_package_data = True
+ packages = find:
+ install_requires =
+ requests
+ importlib-metadata; python_version<"3.8"
+
+ [options.package_data]
+ * = *.txt, *.rst
+ hello = *.msg
+
+ [options.entry_points]
+ console_scripts =
+ executable-name = my_package.module:function
+
+ [options.extras_require]
+ pdf = ReportLab>=1.2; RXP
+ rest = docutils>=0.3; pack ==1.1, ==1.3
+
+ [options.packages.find]
+ exclude =
+ examples*
+ tools*
+ docs*
+ my_package.tests*
+
+Metadata and options are set in the config sections of the same name.
+
+* Keys are the same as the keyword arguments one provides to the ``setup()``
+ function.
+
+* Complex values can be written comma-separated or placed one per line
+ in *dangling* config values. The following are equivalent:
+
+ .. code-block:: ini
+
+ [metadata]
+ keywords = one, two
+
+ [metadata]
+ keywords =
+ one
+ two
+
+* In some cases, complex values can be provided in dedicated subsections for
+ clarity.
+
+* Some keys allow ``file:``, ``attr:``, ``find:``, and ``find_namespace:`` directives in
+ order to cover common usecases.
+
+* Unknown keys are ignored.
+
+
+Using a ``src/`` layout
+=======================
+
+One commonly used package configuration has all the module source code in a
+subdirectory (often called the ``src/`` layout), like this::
+
+ ├── src
+ │   └── mypackage
+ │   ├── __init__.py
+ │   └── mod1.py
+ ├── setup.py
+ └── setup.cfg
+
+You can set up your ``setup.cfg`` to automatically find all your packages in
+the subdirectory like this:
+
+.. code-block:: ini
+
+ # This example contains just the necessary options for a src-layout, set up
+ # the rest of the file as described above.
+
+ [options]
+ package_dir=
+ =src
+ packages=find:
+
+ [options.packages.find]
+ where=src
+
+Specifying values
+=================
+
+Some values are treated as simple strings, some allow more logic.
+
+Type names used below:
+
+* ``str`` - simple string
+* ``list-comma`` - dangling list or string of comma-separated values
+* ``list-semi`` - dangling list or string of semicolon-separated values
+* ``bool`` - ``True`` is 1, yes, true
+* ``dict`` - list-comma where keys are separated from values by ``=``
+* ``section`` - values are read from a dedicated (sub)section
+
+
+Special directives:
+
+* ``attr:`` - Value is read from a module attribute. ``attr:`` supports
+ callables and iterables; unsupported types are cast using ``str()``.
+
+ In order to support the common case of a literal value assigned to a variable
+ in a module containing (directly or indirectly) third-party imports,
+ ``attr:`` first tries to read the value from the module by examining the
+ module's AST. If that fails, ``attr:`` falls back to importing the module.
+
+* ``file:`` - Value is read from a list of files and then concatenated
+
+ .. note::
+ The ``file:`` directive is sandboxed and won't reach anything outside
+ the directory containing ``setup.py``.
+
+
+Metadata
+--------
+
+.. note::
+ The aliases given below are supported for compatibility reasons,
+ but their use is not advised.
+
+============================== ================= ================= =============== ==========
+Key Aliases Type Minimum Version Notes
+============================== ================= ================= =============== ==========
+name str
+version attr:, file:, str 39.2.0 [#meta-1]_
+url home-page str
+download_url download-url str
+project_urls dict 38.3.0
+author str
+author_email author-email str
+maintainer str
+maintainer_email maintainer-email str
+classifiers classifier file:, list-comma
+license str
+license_files license_file list-comma 42.0.0
+description summary file:, str
+long_description long-description file:, str
+long_description_content_type str 38.6.0
+keywords list-comma
+platforms platform list-comma
+provides list-comma
+requires list-comma
+obsoletes list-comma
+============================== ================= ================= =============== ==========
+
+**Notes**:
+
+.. [#meta-1] The ``version`` file attribute has only been supported since 39.2.0.
+
+ A version loaded using the ``file:`` directive must comply with PEP 440.
+ It is easy to accidentally put something other than a valid version
+ string in such a file, so validation is stricter in this case.
+
+
+Options
+-------
+
+======================= =================================== =============== =========
+Key Type Minimum Version Notes
+======================= =================================== =============== =========
+zip_safe bool
+setup_requires list-semi 36.7.0
+install_requires list-semi
+extras_require section [#opt-2]_
+python_requires str 34.4.0
+entry_points file:, section 51.0.0
+scripts list-comma
+eager_resources list-comma
+dependency_links list-comma
+tests_require list-semi
+include_package_data bool
+packages find:, find_namespace:, list-comma [#opt-3]_
+package_dir dict
+package_data section [#opt-1]_
+exclude_package_data section
+namespace_packages list-comma
+py_modules list-comma 34.4.0
+data_files section 40.6.0 [#opt-4]_
+======================= =================================== =============== =========
+
+**Notes**:
+
+.. [#opt-1] In the ``package_data`` section, a key named with a single asterisk
+ (``*``) refers to all packages, in lieu of the empty string used in ``setup.py``.
+
+.. [#opt-2] In the ``extras_require`` section, values are parsed as ``list-semi``.
+ This implies that in order to include markers, they **must** be *dangling*:
+
+ .. code-block:: ini
+
+ [options.extras_require]
+ rest = docutils>=0.3; pack ==1.1, ==1.3
+ pdf =
+ ReportLab>=1.2
+ RXP
+ importlib-metadata; python_version < "3.8"
+
+.. [#opt-3] The ``find:`` and ``find_namespace:`` directive can be further configured
+ in a dedicated subsection ``options.packages.find``. This subsection accepts the
+ same keys as the ``setuptools.find_packages`` and the
+ ``setuptools.find_namespace_packages`` function:
+ ``where``, ``include``, and ``exclude``.
+
+ The ``find_namespace:`` directive is supported since Python >=3.3.
+
+.. [#opt-4] ``data_files`` is deprecated and should be avoided.
+ Please check :doc:`/userguide/datafiles` for more information.
+
+
+Compatibility with other tools
+==============================
+
+Historically, several tools explored declarative package configuration
+in parallel. And several of them chose to place the packaging
+configuration within the project's :file:`setup.cfg` file.
+One of the first was ``distutils2``, which development has stopped in
+2013. Other include ``pbr`` which is still under active development or
+``d2to1``, which was a plug-in that backports declarative configuration
+to ``distutils``, but has had no release since Oct. 2015.
+As a way to harmonize packaging tools, ``setuptools``, having held the
+position of *de facto* standard, has gradually integrated those
+features as part of its core features.
+
+Still this has lead to some confusion and feature incompatibilities:
+
+- some tools support features others don't;
+- some have similar features but the declarative syntax differs;
+
+The table below tries to summarize the differences. But, please, refer
+to each tool documentation for up-to-date information.
+
+=========================== ========== ========== ===== ===
+feature setuptools distutils2 d2to1 pbr
+=========================== ========== ========== ===== ===
+[metadata] description-file S Y Y Y
+[files] S Y Y Y
+entry_points Y Y Y S
+[backwards_compat] N Y Y Y
+=========================== ========== ========== ===== ===
+
+Y: supported, N: unsupported, S: syntax differs (see
+:ref:`above example<example-setup-config>`).
+
+Also note that some features were only recently added to ``setuptools``.
+Please refer to the previous sections to find out when.
diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst
new file mode 100644
index 0000000..279f794
--- /dev/null
+++ b/docs/userguide/dependency_management.rst
@@ -0,0 +1,443 @@
+=====================================
+Dependencies Management in Setuptools
+=====================================
+
+There are three types of dependency styles offered by setuptools:
+1) build system requirement, 2) required dependency and 3) optional
+dependency.
+
+.. Note::
+ Packages that are added to dependency can be optionally specified with the
+ version by following `PEP 440 <https://www.python.org/dev/peps/pep-0440/>`_
+
+
+Build system requirement
+========================
+
+Package requirement
+-------------------
+After organizing all the scripts and files and getting ready for packaging,
+there needs to be a way to tell Python what programs it needs to actually
+do the packaging (in our case, ``setuptools`` of course). Usually,
+you also need the ``wheel`` package as well since it is recommended that you
+upload a ``.whl`` file to PyPI alongside your ``.tar.gz`` file. Unlike the
+other two types of dependency keyword, this one is specified in your
+``pyproject.toml`` file (if you have forgot what this is, go to
+:doc:`quickstart` or (WIP)):
+
+.. code-block:: ini
+
+ [build-system]
+ requires = ["setuptools"]
+ #...
+
+.. note::
+ This used to be accomplished with the ``setup_requires`` keyword but is
+ now considered deprecated in favor of the PEP 517 style described above.
+ To peek into how this legacy keyword is used, consult our :doc:`guide on
+ deprecated practice (WIP) <../deprecated/index>`
+
+
+.. _Declaring Dependencies:
+
+Declaring required dependency
+=============================
+This is where a package declares its core dependencies, without which it won't
+be able to run. ``setuptools`` support automatically download and install
+these dependencies when the package is installed. Although there is more
+finesse to it, let's start with a simple example.
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ #...
+ install_requires =
+ docutils
+ BazSpam ==1.1
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ ...,
+ install_requires=[
+ 'docutils',
+ 'BazSpam ==1.1',
+ ],
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ # ...
+ dependencies = [
+ "docutils",
+ "BazSpam == 1.1",
+ ]
+ # ...
+
+
+When your project is installed (e.g. using pip), all of the dependencies not
+already installed will be located (via PyPI), downloaded, built (if necessary),
+and installed and 2) Any scripts in your project will be installed with wrappers
+that verify the availability of the specified dependencies at runtime.
+
+
+Platform specific dependencies
+------------------------------
+Setuptools offer the capability to evaluate certain conditions before blindly
+installing everything listed in ``install_requires``. This is great for platform
+specific dependencies. For example, the ``enum`` package was added in Python
+3.4, therefore, package that depends on it can elect to install it only when
+the Python version is older than 3.4. To accomplish this
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ #...
+ install_requires =
+ enum34;python_version<'3.4'
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ ...,
+ install_requires=[
+ "enum34;python_version<'3.4'",
+ ],
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ # ...
+ dependencies = [
+ "enum34; python_version<'3.4'",
+ ]
+ # ...
+
+Similarly, if you also wish to declare ``pywin32`` with a minimal version of 1.0
+and only install it if the user is using a Windows operating system:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ #...
+ install_requires =
+ enum34;python_version<'3.4'
+ pywin32 >= 1.0;platform_system=='Windows'
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ ...,
+ install_requires=[
+ "enum34;python_version<'3.4'",
+ "pywin32 >= 1.0;platform_system=='Windows'",
+ ],
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ # ...
+ dependencies = [
+ "enum34; python_version<'3.4'",
+ "pywin32 >= 1.0; platform_system=='Windows'",
+ ]
+ # ...
+
+The environmental markers that may be used for testing platform types are
+detailed in `PEP 508 <https://www.python.org/dev/peps/pep-0508/>`_.
+
+
+Dependencies that aren't in PyPI
+--------------------------------
+.. warning::
+ Dependency links support has been dropped by pip starting with version
+ 19.0 (released 2019-01-22).
+
+If your project depends on packages that don't exist on PyPI, you may still be
+able to depend on them, as long as they are available for download as:
+
+- an egg, in the standard distutils ``sdist`` format,
+- a single ``.py`` file, or
+- a VCS repository (Subversion, Mercurial, or Git).
+
+You just need to add some URLs to the ``dependency_links`` argument to
+``setup()``.
+
+The URLs must be either:
+
+1. direct download URLs,
+2. the URLs of web pages that contain direct download links, or
+3. the repository's URL
+
+In general, it's better to link to web pages, because it is usually less
+complex to update a web page than to release a new version of your project.
+You can also use a SourceForge ``showfiles.php`` link in the case where a
+package you depend on is distributed via SourceForge.
+
+If you depend on a package that's distributed as a single ``.py`` file, you
+must include an ``"#egg=project-version"`` suffix to the URL, to give a project
+name and version number. (Be sure to escape any dashes in the name or version
+by replacing them with underscores.) EasyInstall will recognize this suffix
+and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
+as an egg.
+
+In the case of a VCS checkout, you should also append ``#egg=project-version``
+in order to identify for what package that checkout should be used. You can
+append ``@REV`` to the URL's path (before the fragment) to specify a revision.
+Additionally, you can also force the VCS being used by prepending the URL with
+a certain prefix. Currently available are:
+
+- ``svn+URL`` for Subversion,
+- ``git+URL`` for Git, and
+- ``hg+URL`` for Mercurial
+
+A more complete example would be:
+
+ ``vcs+proto://host/path@revision#egg=project-version``
+
+Be careful with the version. It should match the one inside the project files.
+If you want to disregard the version, you have to omit it both in the
+``requires`` and in the URL's fragment.
+
+This will do a checkout (or a clone, in Git and Mercurial parlance) to a
+temporary folder and run ``setup.py bdist_egg``.
+
+The ``dependency_links`` option takes the form of a list of URL strings. For
+example, this will cause a search of the specified page for eggs or source
+distributions, if the package's dependencies aren't already installed:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ #...
+ dependency_links = http://peak.telecommunity.com/snapshots/
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ ...,
+ dependency_links=[
+ "http://peak.telecommunity.com/snapshots/",
+ ],
+ )
+
+
+Optional dependencies
+=====================
+Setuptools allows you to declare dependencies that only get installed under
+specific circumstances. These dependencies are specified with ``extras_require``
+keyword and are only installed if another package depends on it (either
+directly or indirectly) This makes it convenient to declare dependencies for
+ancillary functions such as "tests" and "docs".
+
+.. note::
+ ``tests_require`` is now deprecated
+
+For example, Package-A offers optional PDF support and requires two other
+dependencies for it to work:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [metadata]
+ name = Package-A
+
+ [options.extras_require]
+ PDF = ReportLab>=1.2; RXP
+
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ name="Project-A",
+ ...,
+ extras_require={
+ "PDF": ["ReportLab>=1.2", "RXP"],
+ },
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ # ...
+ [project.optional-dependencies]
+ PDF = ["ReportLab>=1.2", "RXP"]
+
+The name ``PDF`` is an arbitrary identifier of such a list of dependencies, to
+which other components can refer and have them installed.
+
+A use case for this approach is that other package can use this "extra" for their
+own dependencies. For example, if "Project-B" needs "project A" with PDF support
+installed, it might declare the dependency like this:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [metadata]
+ name = Project-B
+ #...
+
+ [options]
+ #...
+ install_requires =
+ Project-A[PDF]
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ name="Project-B",
+ install_requires=["Project-A[PDF]"],
+ ...,
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ name = "Project-B"
+ # ...
+ dependencies = [
+ "Project-A[PDF]"
+ ]
+
+This will cause ReportLab to be installed along with project A, if project B is
+installed -- even if project A was already installed. In this way, a project
+can encapsulate groups of optional "downstream dependencies" under a feature
+name, so that packages that depend on it don't have to know what the downstream
+dependencies are. If a later version of Project A builds in PDF support and
+no longer needs ReportLab, or if it ends up needing other dependencies besides
+ReportLab in order to provide PDF support, Project B's setup information does
+not need to change, but the right packages will still be installed if needed.
+
+.. note::
+ Best practice: if a project ends up no longer needing any other packages to
+ support a feature, it should keep an empty requirements list for that feature
+ in its ``extras_require`` argument, so that packages depending on that feature
+ don't break (due to an invalid feature name).
+
+Historically ``setuptools`` also used to support extra dependencies in console
+scripts, for example:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [metadata]
+ name = Project A
+ #...
+
+ [options]
+ #...
+ entry_points=
+ [console_scripts]
+ rst2pdf = project_a.tools.pdfgen [PDF]
+ rst2html = project_a.tools.htmlgen
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ name="Project-A",
+ ...,
+ entry_points={
+ "console_scripts": [
+ "rst2pdf = project_a.tools.pdfgen [PDF]",
+ "rst2html = project_a.tools.htmlgen",
+ ],
+ },
+ )
+
+This syntax indicates that the entry point (in this case a console script)
+is only valid when the PDF extra is installed. It is up to the installer
+to determine how to handle the situation where PDF was not indicated
+(e.g. omit the console script, provide a warning when attempting to load
+the entry point, assume the extras are present and let the implementation
+fail later).
+
+.. warning::
+ ``pip`` and other tools might not support this use case for extra
+ dependencies, therefore this practice is considered **deprecated**.
+ See :doc:`PyPUG:specifications/entry-points`.
+
+
+Python requirement
+==================
+In some cases, you might need to specify the minimum required python version.
+This can be configured as shown in the example below.
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [metadata]
+ name = Project-B
+ #...
+
+ [options]
+ #...
+ python_requires = >=3.6
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ name="Project-B",
+ python_requires=">=3.6",
+ ...,
+ )
+
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ name = "Project-B"
+ requires-python = ">=3.6"
+ # ...
+
+----
+
+.. rubric:: Notes
+
+.. [#experimental]
+ While the ``[build-system]`` table should always be specified in the
+ ``pyproject.toml`` file, support for adding package metadata and build configuration
+ options via the ``[project]`` and ``[tool.setuptools]`` tables is still
+ experimental and might change (or be completely removed) in future releases.
+ See :doc:`/userguide/pyproject_config`.
diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst
new file mode 100644
index 0000000..90bc567
--- /dev/null
+++ b/docs/userguide/development_mode.rst
@@ -0,0 +1,52 @@
+"Development Mode"
+==================
+
+Under normal circumstances, the ``distutils`` assume that you are going to
+build a distribution of your project, not use it in its "raw" or "unbuilt"
+form. However, if you were to use the ``distutils`` to build a distribution,
+you would have to rebuild and reinstall your project every time you made a
+change to it during development.
+
+Another problem that sometimes comes up with the ``distutils`` is that you may
+need to do development on two related projects at the same time. You may need
+to put both projects' packages in the same directory to run them, but need to
+keep them separate for revision control purposes. How can you do this?
+
+Setuptools allows you to deploy your projects for use in a common directory or
+staging area, but without copying any files. Thus, you can edit each project's
+code in its checkout directory, and only need to run build commands when you
+change a project's C extensions or similarly compiled files. You can even
+deploy a project into another project's checkout directory, if that's your
+preferred way of working (as opposed to using a common independent staging area
+or the site-packages directory).
+
+To do this, use the ``setup.py develop`` command. It works very similarly to
+``setup.py install``, except that it doesn't actually install anything.
+Instead, it creates a special ``.egg-link`` file in the deployment directory,
+that links to your project's source code. And, if your deployment directory is
+Python's ``site-packages`` directory, it will also update the
+``easy-install.pth`` file to include your project's source code, thereby making
+it available on ``sys.path`` for all programs using that Python installation.
+
+In addition, the ``develop`` command creates wrapper scripts in the target
+script directory that will run your in-development scripts after ensuring that
+all your ``install_requires`` packages are available on ``sys.path``.
+
+You can deploy the same project to multiple staging areas, e.g. if you have
+multiple projects on the same machine that are sharing the same project you're
+doing development work.
+
+When you're done with a given development task, you can remove the project
+source from a staging area using ``setup.py develop --uninstall``, specifying
+the desired staging area if it's not the default.
+
+There are several options to control the precise behavior of the ``develop``
+command; see the section on the :ref:`develop <develop>` command below for more details.
+
+Note that you can also apply setuptools commands to non-setuptools projects,
+using commands like this::
+
+ python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop
+
+That is, you can simply list the normal setup commands and options following
+the quoted part.
diff --git a/docs/userguide/distribution.rst b/docs/userguide/distribution.rst
new file mode 100644
index 0000000..db0f1a5
--- /dev/null
+++ b/docs/userguide/distribution.rst
@@ -0,0 +1,251 @@
+Tagging and "Daily Build" or "Snapshot" Releases
+------------------------------------------------
+
+When a set of related projects are under development, it may be important to
+track finer-grained version increments than you would normally use for e.g.
+"stable" releases. While stable releases might be measured in dotted numbers
+with alpha/beta/etc. status codes, development versions of a project often
+need to be tracked by revision or build number or even build date. This is
+especially true when projects in development need to refer to one another, and
+therefore may literally need an up-to-the-minute version of something!
+
+To support these scenarios, ``setuptools`` allows you to "tag" your source and
+egg distributions by adding one or more of the following to the project's
+"official" version identifier:
+
+* A manually-specified pre-release tag, such as "build" or "dev", or a
+ manually-specified post-release tag, such as a build or revision number
+ (``--tag-build=STRING, -bSTRING``)
+
+* An 8-character representation of the build date (``--tag-date, -d``), as
+ a postrelease tag
+
+You can add these tags by adding ``egg_info`` and the desired options to
+the command line ahead of the ``sdist`` or ``bdist`` commands that you want
+to generate a daily build or snapshot for. See the section below on the
+:ref:`egg_info <egg_info>` command for more details.
+
+(Also, before you release your project, be sure to see the section on
+:ref:`Specifying Your Project's Version` for more information about how pre- and
+post-release tags affect how version numbers are interpreted. This is
+important in order to make sure that dependency processing tools will know
+which versions of your project are newer than others.)
+
+Finally, if you are creating builds frequently, and either building them in a
+downloadable location or are copying them to a distribution server, you should
+probably also check out the :ref:`rotate <rotate>` command, which lets you automatically
+delete all but the N most-recently-modified distributions matching a glob
+pattern. So, you can use a command line like::
+
+ setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
+
+to build an egg whose version info includes "DEV-rNNNN" (where NNNN is the
+most recent Subversion revision that affected the source tree), and then
+delete any egg files from the distribution directory except for the three
+that were built most recently.
+
+If you have to manage automated builds for multiple packages, each with
+different tagging and rotation policies, you may also want to check out the
+:ref:`alias <alias>` command, which would let each package define an alias like ``daily``
+that would perform the necessary tag, build, and rotate commands. Then, a
+simpler script or cron job could just run ``setup.py daily`` in each project
+directory. (And, you could also define sitewide or per-user default versions
+of the ``daily`` alias, so that projects that didn't define their own would
+use the appropriate defaults.)
+
+Generating Source Distributions
+-------------------------------
+
+``setuptools`` enhances the distutils' default algorithm for source file
+selection with pluggable endpoints for looking up files to include. If you are
+using a revision control system, and your source distributions only need to
+include files that you're tracking in revision control, use a corresponding
+plugin instead of writing a ``MANIFEST.in`` file. See the section below on
+:ref:`Adding Support for Revision Control Systems` for information on plugins.
+
+If you need to include automatically generated files, or files that are kept in
+an unsupported revision control system, you'll need to create a ``MANIFEST.in``
+file to specify any files that the default file location algorithm doesn't
+catch. See the distutils documentation for more information on the format of
+the ``MANIFEST.in`` file.
+
+But, be sure to ignore any part of the distutils documentation that deals with
+``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
+from these issues and doesn't work the same way in any case. Unlike the
+distutils, setuptools regenerates the source distribution manifest file
+every time you build a source distribution, and it builds it inside the
+project's ``.egg-info`` directory, out of the way of your main project
+directory. You therefore need not worry about whether it is up-to-date or not.
+
+Indeed, because setuptools' approach to determining the contents of a source
+distribution is so much simpler, its ``sdist`` command omits nearly all of
+the options that the distutils' more complex ``sdist`` process requires. For
+all practical purposes, you'll probably use only the ``--formats`` option, if
+you use any option at all.
+
+
+Making "Official" (Non-Snapshot) Releases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you make an official release, creating source or binary distributions,
+you will need to override the tag settings from ``setup.cfg``, so that you
+don't end up registering versions like ``foobar-0.7a1.dev-r34832``. This is
+easy to do if you are developing on the trunk and using tags or branches for
+your releases - just make the change to ``setup.cfg`` after branching or
+tagging the release, so the trunk will still produce development snapshots.
+
+Alternately, if you are not branching for releases, you can override the
+default version options on the command line, using something like::
+
+ setup.py egg_info -Db "" sdist bdist_egg
+
+The first part of this command (``egg_info -Db ""``) will override the
+configured tag information, before creating source and binary eggs. Thus, these
+commands will use the plain version from your ``setup.py``, without adding the
+build designation string.
+
+Of course, if you will be doing this a lot, you may wish to create a personal
+alias for this operation, e.g.::
+
+ setup.py alias -u release egg_info -Db ""
+
+You can then use it like this::
+
+ setup.py release sdist bdist_egg
+
+Or of course you can create more elaborate aliases that do all of the above.
+See the sections below on the :ref:`egg_info <egg_info>` and
+:ref:`alias <alias>` commands for more ideas.
+
+Distributing Extensions compiled with Cython
+--------------------------------------------
+
+``setuptools`` will detect at build time whether Cython is installed or not.
+If Cython is not found ``setuptools`` will ignore pyx files.
+
+To ensure Cython is available, include Cython in the build-requires section
+of your pyproject.toml::
+
+ [build-system]
+ requires=[..., "cython"]
+
+Built with pip 10 or later, that declaration is sufficient to include Cython
+in the build. For broader compatibility, declare the dependency in your
+setup-requires of setup.cfg::
+
+ [options]
+ setup_requires =
+ ...
+ cython
+
+As long as Cython is present in the build environment, ``setuptools`` includes
+transparent support for building Cython extensions, as
+long as extensions are defined using ``setuptools.Extension``.
+
+If you follow these rules, you can safely list ``.pyx`` files as the source
+of your ``Extension`` objects in the setup script. If it is, then ``setuptools``
+will use it.
+
+Of course, for this to work, your source distributions must include the C
+code generated by Cython, as well as your original ``.pyx`` files. This means
+that you will probably want to include current ``.c`` files in your revision
+control system, rebuilding them whenever you check changes in for the ``.pyx``
+source files. This will ensure that people tracking your project in a revision
+control system will be able to build it even if they don't have Cython
+installed, and that your source releases will be similarly usable with or
+without Cython.
+
+
+.. _Specifying Your Project's Version:
+
+Specifying Your Project's Version
+---------------------------------
+
+Setuptools can work well with most versioning schemes. Over the years,
+setuptools has tried to closely follow the
+`PEP 440 <https://www.python.org/dev/peps/pep-0440/>`_ scheme, but it
+also supports legacy versions. There are, however, a
+few special things to watch out for, in order to ensure that setuptools and
+other tools can always tell what version of your package is newer than another
+version. Knowing these things will also help you correctly specify what
+versions of other projects your project depends on.
+
+A version consists of an alternating series of release numbers and pre-release
+or post-release tags. A release number is a series of digits punctuated by
+dots, such as ``2.4`` or ``0.5``. Each series of digits is treated
+numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
+same release number, denoting the first subrelease of release 2. But ``2.10``
+is the *tenth* subrelease of release 2, and so is a different and newer release
+from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also
+ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
+
+Following a release number, you can have either a pre-release or post-release
+tag. Pre-release tags make a version be considered *older* than the version
+they are appended to. So, revision ``2.4`` is *newer* than revision ``2.4c1``,
+which in turn is newer than ``2.4b1`` or ``2.4a1``. Postrelease tags make
+a version be considered *newer* than the version they are appended to. So,
+revisions like ``2.4-1`` are newer than ``2.4``, but *older*
+than ``2.4.1`` (which has a higher release number).
+
+In the case of legacy versions (for example, ``2.4pl1``), they are considered
+older than non-legacy versions. Taking that in count, a revision ``2.4pl1``
+is *older* than ``2.4``
+
+A pre-release tag is a series of letters that are alphabetically before
+"final". Some examples of prerelease tags would include ``alpha``, ``beta``,
+``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash
+before the prerelease tag if it's immediately after a number, but it's okay to
+do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all
+represent release candidate 1 of version ``2.4``, and are treated as identical
+by setuptools.
+
+In addition, there are three special prerelease tags that are treated as if
+they were the letter ``c``: ``pre``, ``preview``, and ``rc``. So, version
+``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
+``2.4c1``, and are treated as identical by setuptools.
+
+A post-release tag is either a series of letters that are alphabetically
+greater than or equal to "final", or a dash (``-``). Post-release tags are
+generally used to separate patch numbers, port numbers, build numbers, revision
+numbers, or date stamps from the release number. For example, the version
+``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
+version ``2.4``. Or you might use ``2.4-20051127`` to denote a date-stamped
+post-release.
+
+Notice that after each pre or post-release tag, you are free to place another
+release number, followed again by more pre- or post-release tags. For example,
+``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
+development version of the ninth alpha of release 0.6. Notice that ``dev`` is
+a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
+which would be the actual ninth alpha of release 0.6. But the ``-r41475`` is
+a post-release tag, so this version is *newer* than ``0.6a9.dev``.
+
+For the most part, setuptools' interpretation of version numbers is intuitive,
+but here are a few tips that will keep you out of trouble in the corner cases:
+
+* Don't stick adjoining pre-release tags together without a dot or number
+ between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
+ *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in
+ ``1.9a.dev``, or separate the prerelease tags with a number, as in
+ ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
+ identical versions from setuptools' point of view, so you can use whatever
+ scheme you prefer.
+
+* If you want to be certain that your chosen numbering scheme works the way
+ you think it will, you can use the ``pkg_resources.parse_version()`` function
+ to compare different version numbers::
+
+ >>> from pkg_resources import parse_version
+ >>> parse_version("1.9.a.dev") == parse_version("1.9a0dev")
+ True
+ >>> parse_version("2.1-rc2") < parse_version("2.1")
+ True
+ >>> parse_version("0.6a9dev-r41475") < parse_version("0.6a9")
+ True
+
+Once you've decided on a version numbering scheme for your project, you can
+have setuptools automatically tag your in-development releases with various
+pre- or post-release tags. See the following sections for more details:
+
+* `Tagging and "Daily Build" or "Snapshot" Releases`_
+* The :ref:`egg_info <egg_info>` command
diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst
new file mode 100644
index 0000000..b97419c
--- /dev/null
+++ b/docs/userguide/entry_point.rst
@@ -0,0 +1,181 @@
+.. _`entry_points`:
+
+============
+Entry Points
+============
+
+Packages may provide commands to be run at the console (console scripts),
+such as the ``pip`` command. These commands are defined for a package
+as a specific kind of entry point in the ``setup.cfg`` or
+``setup.py``.
+
+
+Console Scripts
+===============
+
+First consider an example without entry points. Imagine a package
+defined thus:
+
+.. code-block:: bash
+
+ timmins/
+ timmins/__init__.py
+ timmins/__main__.py
+ setup.cfg # or setup.py
+ #other necessary files
+
+with ``__init__.py`` as:
+
+.. code-block:: python
+
+ def hello_world():
+ print("Hello world")
+
+and ``__main__.py`` providing a hook:
+
+.. code-block:: python
+
+ from . import hello_world
+
+ if __name__ == '__main__':
+ hello_world()
+
+After installing the package, the function may be invoked through the
+`runpy <https://docs.python.org/3/library/runpy.html>`_ module:
+
+.. code-block:: bash
+
+ python -m timmins
+
+Adding a console script entry point allows the package to define a
+user-friendly name for installers of the package to execute. Installers
+like pip will create wrapper scripts to execute a function. In the
+above example, to create a command ``hello-world`` that invokes
+``timmins.hello_world``, add a console script entry point to
+``setup.cfg``:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options.entry_points]
+ console_scripts =
+ hello-world = timmins:hello_world
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ from setuptools import setup
+
+ setup(
+ name='timmins',
+ version='0.0.1',
+ packages=['timmins'],
+ # ...
+ entry_points={
+ 'console_scripts': [
+ 'hello-world=timmins:hello_world',
+ ]
+ }
+ )
+
+
+After installing the package, a user may invoke that function by simply calling
+``hello-world`` on the command line.
+
+The syntax for entry points is specified as follows:
+
+.. code-block:: ini
+
+ <name> = [<package>.[<subpackage>.]]<module>[:<object>.<object>]
+
+where ``name`` is the name for the script you want to create, the left hand
+side of ``:`` is the module that contains your function and the right hand
+side is the object you want to invoke (e.g. a function).
+
+In addition to ``console_scripts``, Setuptools supports ``gui_scripts``, which
+will launch a GUI application without running in a terminal window.
+
+
+.. _dynamic discovery of services and plugins:
+
+Advertising Behavior
+====================
+
+Console scripts are one use of the more general concept of entry points. Entry
+points more generally allow a packager to advertise behavior for discovery by
+other libraries and applications. This feature enables "plug-in"-like
+functionality, where one library solicits entry points and any number of other
+libraries provide those entry points.
+
+A good example of this plug-in behavior can be seen in
+`pytest plugins <https://docs.pytest.org/en/latest/writing_plugins.html>`_,
+where pytest is a test framework that allows other libraries to extend
+or modify its functionality through the ``pytest11`` entry point.
+
+The console scripts work similarly, where libraries advertise their commands
+and tools like ``pip`` create wrapper scripts that invoke those commands.
+
+For a project wishing to solicit entry points, Setuptools recommends the
+`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_
+module (part of stdlib since Python 3.8) or its backport,
+:pypi:`importlib_metadata`.
+
+For example, to find the console script entry points from the example above:
+
+.. code-block:: pycon
+
+ >>> from importlib import metadata
+ >>> eps = metadata.entry_points()['console_scripts']
+
+``eps`` is now a list of ``EntryPoint`` objects, one of which corresponds
+to the ``hello-world = timmins:hello_world`` defined above. Each ``EntryPoint``
+contains the ``name``, ``group``, and ``value``. It also supplies a ``.load()``
+method to import and load that entry point (module or object).
+
+.. code-block:: ini
+
+ [options.entry_points]
+ my.plugins =
+ hello-world = timmins:hello_world
+
+Then, a different project wishing to load 'my.plugins' plugins could run
+the following routine to load (and invoke) such plugins:
+
+.. code-block:: pycon
+
+ >>> from importlib import metadata
+ >>> eps = metadata.entry_points()['my.plugins']
+ >>> for ep in eps:
+ ... plugin = ep.load()
+ ... plugin()
+ ...
+
+The project soliciting the entry points needs not to have any dependency
+or prior knowledge about the libraries implementing the entry points, and
+downstream users are able to compose functionality by pulling together
+libraries implementing the entry points.
+
+
+Dependency Management
+=====================
+
+Some entry points may require additional dependencies to properly function.
+For such an entry point, declare in square brackets any number of dependency
+``extras`` following the entry point definition. Such entry points will only
+be viable if their extras were declared and installed. See the
+:doc:`guide on dependencies management <dependency_management>` for
+more information on defining extra requirements. Consider from the
+above example:
+
+.. code-block:: ini
+
+ [options.entry_points]
+ console_scripts =
+ hello-world = timmins:hello_world [pretty-printer]
+
+In this case, the ``hello-world`` script is only viable if the ``pretty-printer``
+extra is indicated, and so a plugin host might exclude that entry point
+(i.e. not install a console script) if the relevant extra dependencies are not
+installed.
diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst
new file mode 100644
index 0000000..21fb05b
--- /dev/null
+++ b/docs/userguide/extension.rst
@@ -0,0 +1,250 @@
+.. _Creating ``distutils`` Extensions:
+
+Creating ``distutils`` Extensions
+=================================
+
+It can be hard to add new commands or setup arguments to the distutils. But
+the ``setuptools`` package makes it a bit easier, by allowing you to distribute
+a distutils extension as a separate project, and then have projects that need
+the extension just refer to it in their ``setup_requires`` argument.
+
+With ``setuptools``, your distutils extension projects can hook in new
+commands and ``setup()`` arguments just by defining "entry points". These
+are mappings from command or argument names to a specification of where to
+import a handler from. (See the section on :ref:`Dynamic Discovery of
+Services and Plugins` above for some more background on entry points.)
+
+
+Adding Commands
+---------------
+
+You can add new ``setup`` commands by defining entry points in the
+``distutils.commands`` group. For example, if you wanted to add a ``foo``
+command, you might add something like this to your distutils extension
+project's setup script::
+
+ setup(
+ # ...
+ entry_points={
+ "distutils.commands": [
+ "foo = mypackage.some_module:foo",
+ ],
+ },
+ )
+
+(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
+a ``setuptools.Command`` subclass.)
+
+Once a project containing such entry points has been activated on ``sys.path``,
+(e.g. by running "install" or "develop" with a site-packages installation
+directory) the command(s) will be available to any ``setuptools``-based setup
+scripts. It is not necessary to use the ``--command-packages`` option or
+to monkeypatch the ``distutils.command`` package to install your commands;
+``setuptools`` automatically adds a wrapper to the distutils to search for
+entry points in the active distributions on ``sys.path``. In fact, this is
+how setuptools' own commands are installed: the setuptools project's setup
+script defines entry points for them!
+
+.. note::
+ When creating commands, and specially when defining custom ways of building
+ compiled extensions (for example via ``build_ext``), consider
+ handling exceptions such as ``CompileError``, ``LinkError``, ``LibError``,
+ among others. These exceptions are available in the ``setuptools.errors``
+ module.
+
+
+Adding ``setup()`` Arguments
+----------------------------
+
+.. warning:: Adding arguments to setup is discouraged as such arguments
+ are only supported through imperative execution and not supported through
+ declarative config.
+
+Sometimes, your commands may need additional arguments to the ``setup()``
+call. You can enable this by defining entry points in the
+``distutils.setup_keywords`` group. For example, if you wanted a ``setup()``
+argument called ``bar_baz``, you might add something like this to your
+distutils extension project's setup script::
+
+ setup(
+ # ...
+ entry_points={
+ "distutils.commands": [
+ "foo = mypackage.some_module:foo",
+ ],
+ "distutils.setup_keywords": [
+ "bar_baz = mypackage.some_module:validate_bar_baz",
+ ],
+ },
+ )
+
+The idea here is that the entry point defines a function that will be called
+to validate the ``setup()`` argument, if it's supplied. The ``Distribution``
+object will have the initial value of the attribute set to ``None``, and the
+validation function will only be called if the ``setup()`` call sets it to
+a non-None value. Here's an example validation function::
+
+ def assert_bool(dist, attr, value):
+ """Verify that value is True, False, 0, or 1"""
+ if bool(value) != value:
+ raise DistutilsSetupError(
+ "%r must be a boolean value (got %r)" % (attr,value)
+ )
+
+Your function should accept three arguments: the ``Distribution`` object,
+the attribute name, and the attribute value. It should raise a
+``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
+is invalid. Remember, your function will only be called with non-None values,
+and the default value of arguments defined this way is always None. So, your
+commands should always be prepared for the possibility that the attribute will
+be ``None`` when they access it later.
+
+If more than one active distribution defines an entry point for the same
+``setup()`` argument, *all* of them will be called. This allows multiple
+distutils extensions to define a common argument, as long as they agree on
+what values of that argument are valid.
+
+Also note that as with commands, it is not necessary to subclass or monkeypatch
+the distutils ``Distribution`` class in order to add your arguments; it is
+sufficient to define the entry points in your extension, as long as any setup
+script using your extension lists your project in its ``setup_requires``
+argument.
+
+
+Customizing Distribution Options
+--------------------------------
+
+Plugins may wish to extend or alter the options on a Distribution object to
+suit the purposes of that project. For example, a tool that infers the
+``Distribution.version`` from SCM-metadata may need to hook into the
+option finalization. To enable this feature, Setuptools offers an entry
+point "setuptools.finalize_distribution_options". That entry point must
+be a callable taking one argument (the Distribution instance).
+
+If the callable has an ``.order`` property, that value will be used to
+determine the order in which the hook is called. Lower numbers are called
+first and the default is zero (0).
+
+Plugins may read, alter, and set properties on the distribution, but each
+plugin is encouraged to load the configuration/settings for their behavior
+independently.
+
+
+.. _Adding new EGG-INFO Files:
+
+Adding new EGG-INFO Files
+-------------------------
+
+Some extensible applications or frameworks may want to allow third parties to
+develop plugins with application or framework-specific metadata included in
+the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
+metadata API. The easiest way to allow this is to create a distutils extension
+to be used from the plugin projects' setup scripts (via ``setup_requires``)
+that defines a new setup keyword, and then uses that data to write an EGG-INFO
+file when the ``egg_info`` command is run.
+
+The ``egg_info`` command looks for extension points in an ``egg_info.writers``
+group, and calls them to write the files. Here's a simple example of a
+distutils extension defining a setup argument ``foo_bar``, which is a list of
+lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
+project that uses the argument::
+
+ setup(
+ # ...
+ entry_points={
+ "distutils.setup_keywords": [
+ "foo_bar = setuptools.dist:assert_string_list",
+ ],
+ "egg_info.writers": [
+ "foo_bar.txt = setuptools.command.egg_info:write_arg",
+ ],
+ },
+ )
+
+This simple example makes use of two utility functions defined by setuptools
+for its own use: a routine to validate that a setup keyword is a sequence of
+strings, and another one that looks up a setup argument and writes it to
+a file. Here's what the writer utility looks like::
+
+ def write_arg(cmd, basename, filename):
+ argname = os.path.splitext(basename)[0]
+ value = getattr(cmd.distribution, argname, None)
+ if value is not None:
+ value = "\n".join(value) + "\n"
+ cmd.write_or_delete_file(argname, filename, value)
+
+As you can see, ``egg_info.writers`` entry points must be a function taking
+three arguments: a ``egg_info`` command instance, the basename of the file to
+write (e.g. ``foo_bar.txt``), and the actual full filename that should be
+written to.
+
+In general, writer functions should honor the command object's ``dry_run``
+setting when writing files, and use the ``distutils.log`` object to do any
+console output. The easiest way to conform to this requirement is to use
+the ``cmd`` object's ``write_file()``, ``delete_file()``, and
+``write_or_delete_file()`` methods exclusively for your file operations. See
+those methods' docstrings for more details.
+
+
+.. _Adding Support for Revision Control Systems:
+
+Adding Support for Revision Control Systems
+-------------------------------------------------
+
+If the files you want to include in the source distribution are tracked using
+Git, Mercurial or SVN, you can use the following packages to achieve that:
+
+- Git and Mercurial: :pypi:`setuptools_scm`
+- SVN: :pypi:`setuptools_svn`
+
+If you would like to create a plugin for ``setuptools`` to find files tracked
+by another revision control system, you can do so by adding an entry point to
+the ``setuptools.file_finders`` group. The entry point should be a function
+accepting a single directory name, and should yield all the filenames within
+that directory (and any subdirectories thereof) that are under revision
+control.
+
+For example, if you were going to create a plugin for a revision control system
+called "foobar", you would write a function something like this:
+
+.. code-block:: python
+
+ def find_files_for_foobar(dirname):
+ ... # loop to yield paths that start with `dirname`
+
+And you would register it in a setup script using something like this::
+
+ entry_points={
+ "setuptools.file_finders": [
+ "foobar = my_foobar_module:find_files_for_foobar",
+ ]
+ }
+
+Then, anyone who wants to use your plugin can simply install it, and their
+local setuptools installation will be able to find the necessary files.
+
+It is not necessary to distribute source control plugins with projects that
+simply use the other source control system, or to specify the plugins in
+``setup_requires``. When you create a source distribution with the ``sdist``
+command, setuptools automatically records what files were found in the
+``SOURCES.txt`` file. That way, recipients of source distributions don't need
+to have revision control at all. However, if someone is working on a package
+by checking out with that system, they will need the same plugin(s) that the
+original author is using.
+
+A few important points for writing revision control file finders:
+
+* Your finder function MUST return relative paths, created by appending to the
+ passed-in directory name. Absolute paths are NOT allowed, nor are relative
+ paths that reference a parent directory of the passed-in directory.
+
+* Your finder function MUST accept an empty string as the directory name,
+ meaning the current directory. You MUST NOT convert this to a dot; just
+ yield relative paths. So, yielding a subdirectory named ``some/dir`` under
+ the current directory should NOT be rendered as ``./some/dir`` or
+ ``/somewhere/some/dir``, but *always* as simply ``some/dir``
+
+* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
+ with the absence of needed programs (i.e., ones belonging to the revision
+ control system itself. It *may*, however, use ``distutils.log.warn()`` to
+ inform the user of the missing program(s).
diff --git a/docs/userguide/functionalities_rewrite.rst b/docs/userguide/functionalities_rewrite.rst
new file mode 100644
index 0000000..d0997ca
--- /dev/null
+++ b/docs/userguide/functionalities_rewrite.rst
@@ -0,0 +1,9 @@
+========================================================
+Using setuptools to package and distribute your project
+========================================================
+
+``setuptools`` offers a variety of functionalities that make it easy to
+build and distribute your python package. Here we provide an overview on
+the commonly used ones.
+
+
diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst
new file mode 100644
index 0000000..49655ac
--- /dev/null
+++ b/docs/userguide/index.rst
@@ -0,0 +1,38 @@
+==================================================
+Building and Distributing Packages with Setuptools
+==================================================
+
+``Setuptools`` is a collection of enhancements to the Python ``distutils``
+that allow developers to more easily build and
+distribute Python packages, especially ones that have dependencies on other
+packages.
+
+Packages built and distributed using ``setuptools`` look to the user like
+ordinary Python packages based on the ``distutils``.
+
+Transition to PEP517
+====================
+
+Since setuptools no longer serves as the default build tool, one must explicitly
+opt in (by providing a :file:`pyproject.toml` file) to use this library. The user
+facing part is provided by tools such as pip and
+backend interface is described :doc:`in this document <../build_meta>`. The
+quickstart provides an overview of the new workflow.
+
+.. toctree::
+ :maxdepth: 1
+
+ quickstart
+ package_discovery
+ entry_point
+ dependency_management
+ datafiles
+ development_mode
+ distribution
+ extension
+ declarative_config
+ pyproject_config
+ keywords
+ commands
+ functionalities_rewrite
+ miscellaneous
diff --git a/docs/userguide/keywords.rst b/docs/userguide/keywords.rst
new file mode 100644
index 0000000..5388ffe
--- /dev/null
+++ b/docs/userguide/keywords.rst
@@ -0,0 +1,163 @@
+New and Changed ``setup()`` Keywords
+====================================
+
+The following keyword arguments to ``setup()`` are added or changed by
+``setuptools``. All of them are optional; you do not have to supply them
+unless you need the associated ``setuptools`` feature.
+
+``include_package_data``
+ If set to ``True``, this tells ``setuptools`` to automatically include any
+ data files it finds inside your package directories that are specified by
+ your ``MANIFEST.in`` file. For more information, see the section on
+ :ref:`Including Data Files`.
+
+``exclude_package_data``
+ A dictionary mapping package names to lists of glob patterns that should
+ be *excluded* from your package directories. You can use this to trim back
+ any excess files included by ``include_package_data``. For a complete
+ description and examples, see the section on :ref:`Including Data Files`.
+
+``package_data``
+ A dictionary mapping package names to lists of glob patterns. For a
+ complete description and examples, see the section on :ref:`Including
+ Data Files`. You do not need to use this option if you are using
+ ``include_package_data``, unless you need to add e.g. files that are
+ generated by your setup script and build process. (And are therefore not
+ in source control or are files that you don't want to include in your
+ source distribution.)
+
+``zip_safe``
+ A boolean (True or False) flag specifying whether the project can be
+ safely installed and run from a zip file. If this argument is not
+ supplied, the ``bdist_egg`` command will have to analyze all of your
+ project's contents for possible problems each time it builds an egg.
+
+``install_requires``
+ A string or list of strings specifying what other distributions need to
+ be installed when this one is. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
+
+``entry_points``
+ A dictionary mapping entry point group names to strings or lists of strings
+ defining the entry points. Entry points are used to support dynamic
+ discovery of services or plugins provided by a project. See :ref:`Dynamic
+ Discovery of Services and Plugins` for details and examples of the format
+ of this argument. In addition, this keyword is used to support
+ :ref:`Automatic Script Creation <entry_points>`.
+
+``extras_require``
+ A dictionary mapping names of "extras" (optional features of your project)
+ to strings or lists of strings specifying what other distributions must be
+ installed to support those features. See the section on :ref:`Declaring
+ Dependencies` for details and examples of the format of this argument.
+
+``python_requires``
+ A string corresponding to a version specifier (as defined in PEP 440) for
+ the Python version, used to specify the Requires-Python defined in PEP 345.
+
+``setup_requires``
+ A string or list of strings specifying what other distributions need to
+ be present in order for the *setup script* to run. ``setuptools`` will
+ attempt to obtain these (using pip if available) before processing the
+ rest of the setup script or commands. This argument is needed if you
+ are using distutils extensions as part of your build process; for
+ example, extensions that process setup() arguments and turn them into
+ EGG-INFO metadata files.
+
+ (Note: projects listed in ``setup_requires`` will NOT be automatically
+ installed on the system where the setup script is being run. They are
+ simply downloaded to the ./.eggs directory if they're not locally available
+ already. If you want them to be installed, as well as being available
+ when the setup script is run, you should add them to ``install_requires``
+ **and** ``setup_requires``.)
+
+``dependency_links``
+ A list of strings naming URLs to be searched when satisfying dependencies.
+ These links will be used if needed to install packages specified by
+ ``setup_requires`` or ``tests_require``. They will also be written into
+ the egg's metadata for use during install by tools that support them.
+
+``namespace_packages``
+ A list of strings naming the project's "namespace packages". A namespace
+ package is a package that may be split across multiple project
+ distributions. For example, Zope 3's ``zope`` package is a namespace
+ package, because subpackages like ``zope.interface`` and ``zope.publisher``
+ may be distributed separately. The egg runtime system can automatically
+ merge such subpackages into a single parent package at runtime, as long
+ as you declare them in each project that contains any subpackages of the
+ namespace package, and as long as the namespace package's ``__init__.py``
+ does not contain any code other than a namespace declaration. See the
+ section below on :ref:`Namespace Packages` for more information.
+
+``test_suite``
+ A string naming a ``unittest.TestCase`` subclass (or a package or module
+ containing one or more of them, or a method of such a subclass), or naming
+ a function that can be called with no arguments and returns a
+ ``unittest.TestSuite``. If the named suite is a module, and the module
+ has an ``additional_tests()`` function, it is called and the results are
+ added to the tests to be run. If the named suite is a package, any
+ submodules and subpackages are recursively added to the overall test suite.
+
+ Specifying this argument enables use of the :ref:`test <test>` command to run the
+ specified test suite, e.g. via ``setup.py test``. See the section on the
+ :ref:`test <test>` command below for more details.
+
+ New in 41.5.0: Deprecated the test command.
+
+``tests_require``
+ If your project's tests need one or more additional packages besides those
+ needed to install it, you can use this option to specify them. It should
+ be a string or list of strings specifying what other distributions need to
+ be present for the package's tests to run. When you run the ``test``
+ command, ``setuptools`` will attempt to obtain these (using pip if
+ available). Note that these required projects will *not* be installed on
+ the system where the tests are run, but only downloaded to the project's setup
+ directory if they're not already installed locally.
+
+ New in 41.5.0: Deprecated the test command.
+
+.. _test_loader:
+
+``test_loader``
+ If you would like to use a different way of finding tests to run than what
+ setuptools normally uses, you can specify a module name and class name in
+ this argument. The named class must be instantiable with no arguments, and
+ its instances must support the ``loadTestsFromNames()`` method as defined
+ in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will
+ pass only one test "name" in the ``names`` argument: the value supplied for
+ the ``test_suite`` argument. The loader you specify may interpret this
+ string in any way it likes, as there are no restrictions on what may be
+ contained in a ``test_suite`` string.
+
+ The module name and class name must be separated by a ``:``. The default
+ value of this argument is ``"setuptools.command.test:ScanningLoader"``. If
+ you want to use the default ``unittest`` behavior, you can specify
+ ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This
+ will prevent automatic scanning of submodules and subpackages.
+
+ The module and class you specify here may be contained in another package,
+ as long as you use the ``tests_require`` option to ensure that the package
+ containing the loader class is available when the ``test`` command is run.
+
+ New in 41.5.0: Deprecated the test command.
+
+``eager_resources``
+ A list of strings naming resources that should be extracted together, if
+ any of them is needed, or if any C extensions included in the project are
+ imported. This argument is only useful if the project will be installed as
+ a zipfile, and there is a need to have all of the listed resources be
+ extracted to the filesystem *as a unit*. Resources listed here
+ should be "/"-separated paths, relative to the source root, so to list a
+ resource ``foo.png`` in package ``bar.baz``, you would include the string
+ ``bar/baz/foo.png`` in this argument.
+
+ If you only need to obtain resources one at a time, or you don't have any C
+ extensions that access other files in the project (such as data files or
+ shared libraries), you probably do NOT need this argument and shouldn't
+ mess with it. For more details on how this argument works, see the section
+ below on :ref:`Automatic Resource Extraction`.
+
+``project_urls``
+ An arbitrary map of URL names to hyperlinks, allowing more extensible
+ documentation of where various resources can be found than the simple
+ ``url`` and ``download_url`` options provide.
diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst
new file mode 100644
index 0000000..5fd2f0a
--- /dev/null
+++ b/docs/userguide/miscellaneous.rst
@@ -0,0 +1,159 @@
+.. _Automatic Resource Extraction:
+
+Automatic Resource Extraction
+-----------------------------
+
+If you are using tools that expect your resources to be "real" files, or your
+project includes non-extension native libraries or other files that your C
+extensions expect to be able to access, you may need to list those files in
+the ``eager_resources`` argument to ``setup()``, so that the files will be
+extracted together, whenever a C extension in the project is imported.
+
+This is especially important if your project includes shared libraries *other*
+than distutils-built C extensions, and those shared libraries use file
+extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
+extensions that setuptools 0.6a8 and higher automatically detects as shared
+libraries and adds to the ``native_libs.txt`` file for you. Any shared
+libraries whose names do not end with one of those extensions should be listed
+as ``eager_resources``, because they need to be present in the filesystem when
+he C extensions that link to them are used.
+
+The ``pkg_resources`` runtime for compressed packages will automatically
+extract *all* C extensions and ``eager_resources`` at the same time, whenever
+*any* C extension or eager resource is requested via the ``resource_filename()``
+API. (C extensions are imported using ``resource_filename()`` internally.)
+This ensures that C extensions will see all of the "real" files that they
+expect to see.
+
+Note also that you can list directory resource names in ``eager_resources`` as
+well, in which case the directory's contents (including subdirectories) will be
+extracted whenever any C extension or eager resource is requested.
+
+Please note that if you're not sure whether you need to use this argument, you
+don't! It's really intended to support projects with lots of non-Python
+dependencies and as a last resort for crufty projects that can't otherwise
+handle being compressed. If your package is pure Python, Python plus data
+files, or Python plus C, you really don't need this. You've got to be using
+either C or an external program that needs "real" files in your project before
+there's any possibility of ``eager_resources`` being relevant to your project.
+
+Defining Additional Metadata
+----------------------------
+
+Some extensible applications and frameworks may need to define their own kinds
+of metadata to include in eggs, which they can then access using the
+``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin
+developers include additional files in their ``ProjectName.egg-info``
+directory. However, since it can be tedious to create such files by hand, you
+may want to create a distutils extension that will create the necessary files
+from arguments to ``setup()``, in much the same way that ``setuptools`` does
+for many of the ``setup()`` arguments it adds. See the section below on
+:ref:`Creating ``distutils\`\` Extensions` for more details, especially the
+subsection on :ref:`Adding new EGG-INFO Files`.
+
+Setting the ``zip_safe`` flag
+-----------------------------
+
+For some use cases (such as bundling as part of a larger application), Python
+packages may be run directly from a zip file.
+Not all packages, however, are capable of running in compressed form, because
+they may expect to be able to access either source code or data files as
+normal operating system files. So, ``setuptools`` can install your project
+as a zipfile or a directory, and its default choice is determined by the
+project's ``zip_safe`` flag.
+
+You can pass a True or False value for the ``zip_safe`` argument to the
+``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg``
+command will analyze your project's contents to see if it can detect any
+conditions that would prevent it from working in a zipfile. It will output
+notices to the console about any such conditions that it finds.
+
+Currently, this analysis is extremely conservative: it will consider the
+project unsafe if it contains any C extensions or datafiles whatsoever. This
+does *not* mean that the project can't or won't work as a zipfile! It just
+means that the ``bdist_egg`` authors aren't yet comfortable asserting that
+the project *will* work. If the project contains no C or data files, and does
+no ``__file__`` or ``__path__`` introspection or source code manipulation, then
+there is an extremely solid chance the project will work when installed as a
+zipfile. (And if the project uses ``pkg_resources`` for all its data file
+access, then C extensions and other data files shouldn't be a problem at all.
+See the :ref:`Accessing Data Files at Runtime` section above for more information.)
+
+However, if ``bdist_egg`` can't be *sure* that your package will work, but
+you've checked over all the warnings it issued, and you are either satisfied it
+*will* work (or if you want to try it for yourself), then you should set
+``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it
+doesn't work, you can always change it to ``False``, which will force
+``setuptools`` to install your project as a directory rather than as a zipfile.
+
+In the future, as we gain more experience with different packages and become
+more satisfied with the robustness of the ``pkg_resources`` runtime, the
+"zip safety" analysis may become less conservative. However, we strongly
+recommend that you determine for yourself whether your project functions
+correctly when installed as a zipfile, correct any problems if you can, and
+then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
+flag, so that it will not be necessary for ``bdist_egg`` to try to guess
+whether your project can work as a zipfile.
+
+
+.. _Controlling files in the distribution:
+
+Controlling files in the distribution
+-------------------------------------
+
+For the most common use cases, ``setuptools`` will automatically find out which
+files are necessary for distributing the package.
+This includes all :term:`pure Python modules <Pure Module>` in the
+``py_modules`` or ``packages`` configuration, and the C sources (but not C
+headers) listed as part of extensions when creating a :term:`Source
+Distribution (or "sdist")`.
+
+However, when building more complex packages (e.g. packages that include
+non-Python files, or that need to use custom C headers), you might find that
+not all files present in your project folder are included in package
+:term:`distribution archive <Distribution Package>`.
+
+In these situations you can use a ``setuptools``
+:ref:`plugin <Adding Support for Revision Control Systems>`,
+such as :pypi:`setuptools-scm` or :pypi:`setuptools-svn` to automatically
+include all files tracked by your Revision Control System into the ``sdist``.
+
+.. _Using MANIFEST.in:
+
+Alternatively, if you need finer control, you can add a ``MANIFEST.in`` file at
+the root of your project.
+This file contains instructions that tell ``setuptools`` which files exactly
+should be part of the ``sdist`` (or not).
+A comprehensive guide to ``MANIFEST.in`` syntax is available at the
+:doc:`PyPA's Packaging User Guide <PyPUG:guides/using-manifest-in>`.
+
+Once the correct files are present in the ``sdist``, they can then be used by
+binary extensions during the build process, or included in the final
+:term:`wheel <Wheel>` [#build-process]_ if you configure ``setuptools`` with
+``include_package_data=True``.
+
+.. important::
+ Please note that, when using ``include_package_data=True``, only files **inside
+ the package directory** are included in the final ``wheel``, by default.
+
+ So for example, if you create a :term:`Python project <Project>` that uses
+ :pypi:`setuptools-scm` and have a ``tests`` directory outside of the package
+ folder, the ``tests`` directory will be present in the ``sdist`` but not in the
+ ``wheel`` [#wheel-vs-sdist]_.
+
+ See :doc:`/userguide/datafiles` for more information.
+
+----
+
+.. [#build-process]
+ You can think about the build process as two stages: first the ``sdist``
+ will be created and then the ``wheel`` will be produced from that ``sdist``.
+
+.. [#wheel-vs-sdist]
+ This happens because the ``sdist`` can contain files that are useful during
+ development or the build process itself, but not in runtime (e.g. tests,
+ docs, examples, etc...).
+ The ``wheel``, on the other hand, is a file format that has been optimized
+ and is ready to be unpacked into a running installation of Python or
+ :term:`Virtual Environment`.
+ Therefore it only contains items that are required during runtime.
diff --git a/docs/userguide/package_discovery.rst b/docs/userguide/package_discovery.rst
new file mode 100644
index 0000000..ee8e983
--- /dev/null
+++ b/docs/userguide/package_discovery.rst
@@ -0,0 +1,576 @@
+.. _`package_discovery`:
+
+========================================
+Package Discovery and Namespace Package
+========================================
+
+.. note::
+ a full specification for the keyword supplied to ``setup.cfg`` or
+ ``setup.py`` can be found at :doc:`keywords reference <keywords>`
+
+.. note::
+ the examples provided here are only to demonstrate the functionality
+ introduced. More metadata and options arguments need to be supplied
+ if you want to replicate them on your system. If you are completely
+ new to setuptools, the :doc:`quickstart section <quickstart>` is a good
+ place to start.
+
+``Setuptools`` provide powerful tools to handle package discovery, including
+support for namespace package.
+
+Normally, you would specify the package to be included manually in the following manner:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ #...
+ packages =
+ mypkg1
+ mypkg2
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ packages=['mypkg1', 'mypkg2']
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ # ...
+ [tool.setuptools]
+ packages = ["mypkg1", "mypkg2"]
+ # ...
+
+
+If your packages are not in the root of the repository you also need to
+configure ``package_dir``:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ # ...
+ package_dir =
+ = src
+ # directory containing all the packages (e.g. src/mypkg1, src/mypkg2)
+ # OR
+ package_dir =
+ mypkg1 = lib1
+ # mypkg1.mod corresponds to lib1/mod.py
+ # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py
+ mypkg2 = lib2
+ # mypkg2.mod corresponds to lib2/mod.py
+ mypkg2.subpkg = lib3
+ # mypkg2.subpkg.mod corresponds to lib3/mod.py
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ package_dir = {"": "src"}
+ # directory containing all the packages (e.g. src/mypkg1, src/mypkg2)
+ )
+
+ # OR
+
+ setup(
+ # ...
+ package_dir = {
+ "mypkg1": "lib1", # mypkg1.mod corresponds to lib1/mod.py
+ # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py
+ "mypkg2": "lib2", # mypkg2.mod corresponds to lib2/mod.py
+ "mypkg2.subpkg": "lib3" # mypkg2.subpkg.mod corresponds to lib3/mod.py
+ # ...
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [tool.setuptools]
+ # ...
+ package-dir = {"" = "src"}
+ # directory containing all the packages (e.g. src/mypkg1, src/mypkg2)
+
+ # OR
+
+ [tool.setuptools.package-dir]
+ mypkg1 = "lib1"
+ # mypkg1.mod corresponds to lib1/mod.py
+ # mypkg1.subpkg.mod corresponds to lib1/subpkg/mod.py
+ mypkg2 = "lib2"
+ # mypkg2.mod corresponds to lib2/mod.py
+ "mypkg2.subpkg" = "lib3"
+ # mypkg2.subpkg.mod corresponds to lib3/mod.py
+ # ...
+
+This can get tiresome really quickly. To speed things up, you can rely on
+setuptools automatic discovery, or use the provided tools, as explained in
+the following sections.
+
+
+.. _auto-discovery:
+
+Automatic discovery
+===================
+
+.. warning:: Automatic discovery is an **experimental** feature and might change
+ (or be completely removed) in the future.
+ See :ref:`custom-discovery` for a stable way of configuring ``setuptools``.
+
+By default ``setuptools`` will consider 2 popular project layouts, each one with
+its own set of advantages and disadvantages [#layout1]_ [#layout2]_ as
+discussed in the following sections.
+
+Setuptools will automatically scan your project directory looking for these
+layouts and try to guess the correct values for the :ref:`packages <declarative
+config>` and :doc:`py_modules </references/keywords>` configuration.
+
+.. important::
+ Automatic discovery will **only** be enabled if you **don't** provide any
+ configuration for ``packages`` and ``py_modules``.
+ If at least one of them is explicitly set, automatic discovery will not take place.
+
+ **Note**: specifying ``ext_modules`` might also prevent auto-discover from
+ taking place, unless your opt into :doc:`pyproject_config` (which will
+ disable the backward compatible behaviour).
+
+.. _src-layout:
+
+src-layout
+----------
+The project should contain a ``src`` directory under the project root and
+all modules and packages meant for distribution are placed inside this
+directory::
+
+ project_root_directory
+ ├── pyproject.toml
+ ├── setup.cfg # or setup.py
+ ├── ...
+ └── src/
+ └── mypkg/
+ ├── __init__.py
+ ├── ...
+ └── mymodule.py
+
+This layout is very handy when you wish to use automatic discovery,
+since you don't have to worry about other Python files or folders in your
+project root being distributed by mistake. In some circumstances it can be
+also less error-prone for testing or when using :pep:`420`-style packages.
+On the other hand you cannot rely on the implicit ``PYTHONPATH=.`` to fire
+up the Python REPL and play with your package (you will need an
+`editable install`_ to be able to do that).
+
+.. _flat-layout:
+
+flat-layout
+-----------
+*(also known as "adhoc")*
+
+The package folder(s) are placed directly under the project root::
+
+ project_root_directory
+ ├── pyproject.toml
+ ├── setup.cfg # or setup.py
+ ├── ...
+ └── mypkg/
+ ├── __init__.py
+ ├── ...
+ └── mymodule.py
+
+This layout is very practical for using the REPL, but in some situations
+it can be can be more error-prone (e.g. during tests or if you have a bunch
+of folders or Python files hanging around your project root)
+
+To avoid confusion, file and folder names that are used by popular tools (or
+that correspond to well-known conventions, such as distributing documentation
+alongside the project code) are automatically filtered out in the case of
+*flat-layout*:
+
+.. autoattribute:: setuptools.discovery.FlatLayoutPackageFinder.DEFAULT_EXCLUDE
+
+.. autoattribute:: setuptools.discovery.FlatLayoutModuleFinder.DEFAULT_EXCLUDE
+
+.. warning::
+ If you are using auto-discovery with *flat-layout*, ``setuptools`` will
+ refuse to create :term:`distribution archives <Distribution Package>` with
+ multiple top-level packages or modules.
+
+ This is done to prevent common errors such as accidentally publishing code
+ not meant for distribution (e.g. maintenance-related scripts).
+
+ Users that purposefully want to create multi-package distributions are
+ advised to use :ref:`custom-discovery` or the ``src-layout``.
+
+There is also a handy variation of the *flat-layout* for utilities/libraries
+that can be implemented with a single Python file:
+
+single-module distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A standalone module is placed directly under the project root, instead of
+inside a package folder::
+
+ project_root_directory
+ ├── pyproject.toml
+ ├── setup.cfg # or setup.py
+ ├── ...
+ └── single_file_lib.py
+
+
+.. _custom-discovery:
+
+Custom discovery
+================
+
+If the automatic discovery does not work for you
+(e.g., you want to *include* in the distribution top-level packages with
+reserved names such as ``tasks``, ``example`` or ``docs``, or you want to
+*exclude* nested packages that would be otherwise included), you can use
+the provided tools for package discovery:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ packages = find:
+ #or
+ packages = find_namespace:
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ from setuptools import find_packages
+ # or
+ from setuptools import find_namespace_packages
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ # ...
+ [tool.setuptools.packages]
+ find = {} # Scanning implicit namespaces is active by default
+ # OR
+ find = {namespace = false} # Disable implicit namespaces
+
+
+Finding simple packages
+-----------------------
+Let's start with the first tool. ``find:`` (``find_packages()``) takes a source
+directory and two lists of package name patterns to exclude and include, and
+then return a list of ``str`` representing the packages it could find. To use
+it, consider the following directory::
+
+ mypkg
+ ├── setup.cfg # and/or setup.py, pyproject.toml
+ └── src
+ ├── pkg1
+ │   └── __init__.py
+ ├── pkg2
+ │   └── __init__.py
+ ├── aditional
+ │   └── __init__.py
+ └── pkg
+ └── namespace
+ └── __init__.py
+
+To have setuptools to automatically include packages found
+in ``src`` that starts with the name ``pkg`` and not ``additional``:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ packages = find:
+ package_dir =
+ =src
+
+ [options.packages.find]
+ where = src
+ include = pkg*
+ exclude = additional
+
+ .. note::
+ ``pkg`` does not contain an ``__init__.py`` file, therefore
+ ``pkg.namespace`` is ignored by ``find:`` (see ``find_namespace:`` below).
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ packages=find_packages(
+ where='src',
+ include=['pkg*'],
+ exclude=['additional'],
+ ),
+ package_dir={"": "src"}
+ # ...
+ )
+
+
+ .. note::
+ ``pkg`` does not contain an ``__init__.py`` file, therefore
+ ``pkg.namespace`` is ignored by ``find_packages()``
+ (see ``find_namespace_packages()`` below).
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [tool.setuptools.packages.find]
+ where = ["src"]
+ include = ["pkg*"]
+ exclude = ["additional"]
+ namespaces = false
+
+ .. note::
+ When using ``tool.setuptools.packages.find`` in ``pyproject.toml``,
+ setuptools will consider :pep:`implicit namespaces <420>` by default when
+ scanning your project directory.
+ To avoid ``pkg.namespace`` from being added to your package list
+ you can set ``namespaces = false``. This will prevent any folder
+ without an ``__init__.py`` file from being scanned.
+
+.. important::
+ ``include`` and ``exclude`` accept strings representing :mod:`glob` patterns.
+ These patterns should match the **full** name of the Python module (as if it
+ was written in an ``import`` statement).
+
+ For example if you have ``util`` pattern, it will match
+ ``util/__init__.py`` but not ``util/files/__init__.py``.
+
+ The fact that the parent package is matched by the pattern will not dictate
+ if the submodule will be included or excluded from the distribution.
+ You will need to explicitly add a wildcard (e.g. ``util*``)
+ if you want the pattern to also match submodules.
+
+.. _Namespace Packages:
+
+Finding namespace packages
+--------------------------
+``setuptools`` provides the ``find_namespace:`` (``find_namespace_packages()``)
+which behaves similarly to ``find:`` but works with namespace package.
+
+Before diving in, it is important to have a good understanding of what
+:pep:`namespace packages <420>` are. Here is a quick recap.
+
+When you have two packages organized as follows:
+
+.. code-block:: bash
+
+ /Users/Desktop/timmins/foo/__init__.py
+ /Library/timmins/bar/__init__.py
+
+If both ``Desktop`` and ``Library`` are on your ``PYTHONPATH``, then a
+namespace package called ``timmins`` will be created automatically for you when
+you invoke the import mechanism, allowing you to accomplish the following:
+
+.. code-block:: pycon
+
+ >>> import timmins.foo
+ >>> import timmins.bar
+
+as if there is only one ``timmins`` on your system. The two packages can then
+be distributed separately and installed individually without affecting the
+other one.
+
+Now, suppose you decide to package the ``foo`` part for distribution and start
+by creating a project directory organized as follows::
+
+ foo
+ ├── setup.cfg # and/or setup.py, pyproject.toml
+ └── src
+ └── timmins
+ └── foo
+ └── __init__.py
+
+If you want the ``timmins.foo`` to be automatically included in the
+distribution, then you will need to specify:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ package_dir =
+ =src
+ packages = find_namespace:
+
+ [options.packages.find]
+ where = src
+
+ ``find:`` won't work because timmins doesn't contain ``__init__.py``
+ directly, instead, you have to use ``find_namespace:``.
+
+ You can think of ``find_namespace:`` as identical to ``find:`` except it
+ would count a directory as a package even if it doesn't contain ``__init__.py``
+ file directly.
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ packages=find_namespace_packages(where='src'),
+ package_dir={"": "src"}
+ # ...
+ )
+
+ When you use ``find_packages()``, all directories without an
+ ``__init__.py`` file will be disconsidered.
+ On the other hand, ``find_namespace_packages()`` will scan all
+ directories.
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [tool.setuptools.packages.find]
+ where = ["src"]
+
+ When using ``tool.setuptools.packages.find`` in ``pyproject.toml``,
+ setuptools will consider :pep:`implicit namespaces <420>` by default when
+ scanning your project directory.
+
+After installing the package distribution, ``timmins.foo`` would become
+available to your interpreter.
+
+.. warning::
+ Please have in mind that ``find_namespace:`` (setup.cfg),
+ ``find_namespace_packages()`` (setup.py) and ``find`` (pyproject.toml) will
+ scan **all** folders that you have in your project directory if you use a
+ :ref:`flat-layout`.
+
+ If used naïvely, this might result in unwanted files being added to your
+ final wheel. For example, with a project directory organized as follows::
+
+ foo
+ ├── docs
+ │ └── conf.py
+ ├── timmins
+ │ └── foo
+ │ └── __init__.py
+ └── tests
+ └── tests_foo
+ └── __init__.py
+
+ final users will end up installing not only ``timmins.foo``, but also
+ ``docs`` and ``tests.tests_foo``.
+
+ A simple way to fix this is to adopt the aforementioned :ref:`src-layout`,
+ or make sure to properly configure the ``include`` and/or ``exclude``
+ accordingly.
+
+.. tip::
+ After :ref:`building your package <building>`, you can have a look if all
+ the files are correct (nothing missing or extra), by running the following
+ commands:
+
+ .. code-block:: bash
+
+ tar tf dist/*.tar.gz
+ unzip -l dist/*.whl
+
+ This requires the ``tar`` and ``unzip`` to be installed in your OS.
+ On Windows you can also use a GUI program such as 7zip_.
+
+
+Legacy Namespace Packages
+=========================
+The fact you can create namespace package so effortlessly above is credited
+to `PEP 420 <https://www.python.org/dev/peps/pep-0420/>`_. It use to be more
+cumbersome to accomplish the same result. Historically, there were two methods
+to create namespace packages. One is the ``pkg_resources`` style supported by
+``setuptools`` and the other one being ``pkgutils`` style offered by
+``pkgutils`` module in Python. Both are now considered deprecated despite the
+fact they still linger in many existing packages. These two differ in many
+subtle yet significant aspects and you can find out more on `Python packaging
+user guide <https://packaging.python.org/guides/packaging-namespace-packages/>`_
+
+
+``pkg_resource`` style namespace package
+----------------------------------------
+This is the method ``setuptools`` directly supports. Starting with the same
+layout, there are two pieces you need to add to it. First, an ``__init__.py``
+file directly under your namespace package directory that contains the
+following:
+
+.. code-block:: python
+
+ __import__("pkg_resources").declare_namespace(__name__)
+
+And the ``namespace_packages`` keyword in your ``setup.cfg`` or ``setup.py``:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ namespace_packages = timmins
+
+.. tab:: setup.py
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ namespace_packages=['timmins']
+ )
+
+And your directory should look like this
+
+.. code-block:: bash
+
+ foo
+ ├── setup.cfg # and/or setup.py, pyproject.toml
+ └── src
+ └── timmins
+ ├── __init__.py
+ └── foo
+ └── __init__.py
+
+Repeat the same for other packages and you can achieve the same result as
+the previous section.
+
+``pkgutil`` style namespace package
+-----------------------------------
+This method is almost identical to the ``pkg_resource`` except that the
+``namespace_packages`` declaration is omitted and the ``__init__.py``
+file contains the following:
+
+.. code-block:: python
+
+ __path__ = __import__('pkgutil').extend_path(__path__, __name__)
+
+The project layout remains the same and ``setup.cfg`` remains the same.
+
+
+----
+
+
+.. [#experimental]
+ Support for specifying package metadata and build configuration options via
+ ``pyproject.toml`` is experimental and might change (or be completely
+ removed) in the future. See :doc:`/userguide/pyproject_config`.
+.. [#layout1] https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure
+.. [#layout2] https://blog.ionelmc.ro/2017/09/25/rehashing-the-src-layout/
+
+.. _editable install: https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs
+.. _7zip: https://www.7-zip.org
diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
new file mode 100644
index 0000000..47c4511
--- /dev/null
+++ b/docs/userguide/pyproject_config.rst
@@ -0,0 +1,218 @@
+.. _pyproject.toml config:
+
+-----------------------------------------------------
+Configuring setuptools using ``pyproject.toml`` files
+-----------------------------------------------------
+
+.. note:: New in 61.0.0 (**experimental**)
+
+.. warning::
+ Support for declaring :doc:`project metadata
+ <PyPUG:specifications/declaring-project-metadata>` or configuring
+ ``setuptools`` via ``pyproject.toml`` files is still experimental and might
+ change (or be removed) in future releases.
+
+.. important::
+ For the time being, ``pip`` still might require a ``setup.py`` file
+ to support :doc:`editable installs <pip:cli/pip_install>`.
+
+ A simple script will suffice, for example:
+
+ .. code-block:: python
+
+ from setuptools import setup
+
+ setup()
+
+Starting with :pep:`621`, the Python community selected ``pyproject.toml`` as
+a standard way of specifying *project metadata*.
+``Setuptools`` has adopted this standard and will use the information contained
+in this file as an input in the build process.
+
+The example below illustrates how to write a ``pyproject.toml`` file that can
+be used with ``setuptools``. It contains two TOML tables (identified by the
+``[table-header]`` syntax): ``build-system`` and ``project``.
+The ``build-system`` table is used to tell the build frontend (e.g.
+:pypi:`build` or :pypi:`pip`) to use ``setuptools`` and any other plugins (e.g.
+``setuptools-scm``) to build the package.
+The ``project`` table contains metadata fields as described by
+:doc:`PyPUG:specifications/declaring-project-metadata` guide.
+
+.. _example-pyproject-config:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["setuptools", "setuptools-scm"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "my_package"
+ description = "My package description"
+ readme = "README.rst"
+ keywords = ["one", "two"]
+ license = {text = "BSD 3-Clause License"}
+ classifiers = [
+ "Framework :: Django",
+ "Programming Language :: Python :: 3",
+ ]
+ dependencies = [
+ "requests",
+ 'importlib-metadata; python_version<"3.8"',
+ ]
+ dynamic = ["version"]
+
+ [project.optional-dependencies]
+ pdf = ["ReportLab>=1.2", "RXP"]
+ rest = ["docutils>=0.3", "pack ==1.1, ==1.3"]
+
+ [project.scripts]
+ my-script = "my_package.module:function"
+
+
+.. _setuptools-table:
+
+Setuptools-specific configuration
+=================================
+
+While the standard ``project`` table in the ``pyproject.toml`` file covers most
+of the metadata used during the packaging process, there are still some
+``setuptools``-specific configurations that can be set by users that require
+customization.
+These configurations are completely optional and probably can be skipped when
+creating simple packages.
+They are equivalent to the :doc:`/references/keywords` used by the ``setup.py``
+file, and can be set via the ``tool.setuptools`` table:
+
+========================= =========================== =========================
+Key Value Type (TOML) Notes
+========================= =========================== =========================
+``platforms`` array
+``zip-safe`` boolean If not specified, ``setuptools`` will try to guess
+ a reasonable default for the package
+``eager-resources`` array
+``py-modules`` array See tip below
+``packages`` array or ``find`` directive See tip below
+``package-dir`` table/inline-table Used when explicitly listing ``packages``
+``namespace-packages`` array Not necessary if you use :pep:`420`
+``package-data`` table/inline-table See :doc:`/userguide/datafiles`
+``include-package-data`` boolean ``True`` by default
+``exclude-package-data`` table/inline-table
+``license-files`` array of glob patterns **Provisional** - likely to change with :pep:`639`
+ (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``)
+``data-files`` table/inline-table **Deprecated** - check :doc:`/userguide/datafiles`
+``script-files`` array **Deprecated** - equivalent to the ``script`` keyword in ``setup.py``
+ (should be avoided in favour of ``project.scripts``)
+``provides`` array **Ignored by pip**
+``obsoletes`` array **Ignored by pip**
+========================= =========================== =========================
+
+.. note::
+ The `TOML value types`_ ``array`` and ``table/inline-table`` are roughly
+ equivalent to the Python's :obj:`dict` and :obj:`list` data types.
+
+Please note that some of these configurations are deprecated or at least
+discouraged, but they are made available to ensure portability.
+New packages should avoid relying on deprecated/discouraged fields, and
+existing packages should consider alternatives.
+
+.. tip::
+ When both ``py-modules`` and ``packages`` are left unspecified,
+ ``setuptools`` will attempt to perform :ref:`auto-discovery`, which should
+ cover most popular project directory organization techniques, such as the
+ :ref:`src-layout` and the :ref:`flat-layout`.
+
+ However if your project does not follow these conventional layouts
+ (e.g. you want to use a ``flat-layout`` but at the same time have custom
+ directories at the root of your project), you might need to use the ``find``
+ directive [#directives]_ as shown below:
+
+ .. code-block:: toml
+
+ [tool.setuptools.packages.find]
+ where = ["src"] # list of folders that contain the packages (["."] by default)
+ include = ["my_package*"] # package names should match these glob patterns (["*"] by default)
+ exclude = ["my_package.tests*"] # exclude packages matching these glob patterns (empty by default)
+ namespaces = false # to disable scanning PEP 420 namespaces (true by default)
+
+ Note that the glob patterns in the example above need to be matched
+ by the **entire** package name. This means that if you specify ``exclude = ["tests"]``,
+ modules like ``tests.my_package.test1`` will still be included in the distribution
+ (to remove them, add a wildcard to the end of the pattern: ``"tests*"``).
+
+ Alternatively, you can explicitly list the packages in modules:
+
+ .. code-block:: toml
+
+ [tool.setuptools]
+ packages = ["my_package"]
+
+
+.. _dynamic-pyproject-config:
+
+Dynamic Metadata
+================
+
+Note that in the first example of this page we use ``dynamic`` to identify
+which metadata fields are dynamically computed during the build by either
+``setuptools`` itself or the plugins installed via ``build-system.requires``
+(e.g. ``setuptools-scm`` is capable of deriving the current project version
+directly from the ``git`` :wiki:`version control` system).
+
+Currently the following fields can be listed as dynamic: ``version``,
+``classifiers``, ``description``, ``entry-points``, ``scripts``,
+``gui-scripts`` and ``readme``.
+When these fields are expected to be provided by ``setuptools`` a
+corresponding entry is required in the ``tool.setuptools.dynamic`` table
+[#entry-points]_. For example:
+
+.. code-block:: toml
+
+ # ...
+ [project]
+ name = "my_package"
+ dynamic = ["version", "readme"]
+ # ...
+ [tool.setuptools.dynamic]
+ version = {attr = "my_package.VERSION"}
+ readme = {file = ["README.rst", "USAGE.rst"]}
+
+In the ``dynamic`` table, the ``attr`` directive [#directives]_ will read an
+attribute from the given module [#attr]_, while ``file`` will read the contents
+of all given files and concatenate them in a single string.
+
+================= =================== =========================
+Key Directive Notes
+================= =================== =========================
+``version`` ``attr``, ``file``
+``readme`` ``file``
+``description`` ``file`` One-line text
+``classifiers`` ``file`` Multi-line text with one classifier per line
+``entry-points`` ``file`` INI format following :doc:`PyPUG:specifications/entry-points`
+ (``console_scripts`` and ``gui_scripts`` can be included)
+================= =================== =========================
+
+----
+
+.. rubric:: Notes
+
+.. [#entry-points] Dynamic ``scripts`` and ``gui-scripts`` are a special case.
+ When resolving these metadata keys, ``setuptools`` will look for
+ ``tool.setuptool.dynamic.entry-points``, and use the values of the
+ ``console_scripts`` and ``gui_scripts`` :doc:`entry-point groups
+ <PyPUG:specifications/entry-points>`.
+
+.. [#directives] In the context of this document, *directives* are special TOML
+ values that are interpreted differently by ``setuptools`` (usually triggering an
+ associated function). Most of the times they correspond to a special TOML table
+ (or inline-table) with a single top-level key.
+ For example, you can have the ``{find = {where = ["src"], exclude=["tests*"]}}``
+ directive for ``tool.setuptools.packages``, or ``{attr = "mymodule.attr"}``
+ directive for ``tool.setuptools.dynamic.version``.
+
+.. [#attr] ``attr`` is meant to be used when the module attribute is statically
+ specified (e.g. as a string, list or tuple). As a rule of thumb, the
+ attribute should be able to be parsed with :func:`ast.literal_eval`, and
+ should not be modified or re-assigned.
+
+.. _TOML value types: https://toml.io/en/v1.0.0
diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
new file mode 100644
index 0000000..5be1078
--- /dev/null
+++ b/docs/userguide/quickstart.rst
@@ -0,0 +1,414 @@
+==========================
+``setuptools`` Quickstart
+==========================
+
+Installation
+============
+
+To install the latest version of setuptools, use::
+
+ pip install --upgrade setuptools
+
+
+Python packaging at a glance
+============================
+The landscape of Python packaging is shifting and ``Setuptools`` has evolved to
+only provide backend support, no longer being the de-facto packaging tool in
+the market. Every python package must provide a ``pyproject.toml`` and specify
+the backend (build system) it wants to use. The distribution can then
+be generated with whatever tool that provides a ``build sdist``-like
+functionality. While this may appear cumbersome, given the added pieces,
+it in fact tremendously enhances the portability of your package. The
+change is driven under :pep:`PEP 517 <517#build-requirements>`. To learn more about Python packaging in general,
+navigate to the :ref:`bottom <packaging-resources>` of this page.
+
+
+Basic Use
+=========
+For basic use of setuptools, you will need a ``pyproject.toml`` with the
+exact following info, which declares you want to use ``setuptools`` to
+package your project:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+Then, you will need to specify your package information such as metadata,
+contents, dependencies, etc.
+
+Setuptools currently supports configurations from either ``setup.cfg``,
+``setup.py`` or ``pyproject.toml`` [#experimental]_ files, however, configuring new
+projects via ``setup.py`` is discouraged [#setup.py]_.
+
+The following example demonstrates a minimum configuration:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [metadata]
+ name = mypackage
+ version = 0.0.1
+
+ [options]
+ packages = mypackage
+ install_requires =
+ requests
+ importlib-metadata; python_version < "3.8"
+
+ See :doc:`/userguide/declarative_config` for more information.
+
+.. tab:: setup.py [#setup.py]_
+
+ .. code-block:: python
+
+ from setuptools import setup
+
+ setup(
+ name='mypackage',
+ version='0.0.1',
+ packages=['mypackage'],
+ install_requires=[
+ 'requests',
+ 'importlib-metadata; python_version == "3.8"',
+ ],
+ )
+
+ See :doc:`/references/keywords` for more information.
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ name = "mypackage"
+ version = "0.0.1"
+ dependencies = [
+ "requests",
+ 'importlib-metadata; python_version<"3.8"',
+ ]
+
+ See :doc:`/userguide/pyproject_config` for more information.
+
+This is what your project would look like::
+
+ ~/mypackage/
+ pyproject.toml
+ setup.cfg # or setup.py
+ mypackage/__init__.py
+
+Then, you need a builder, such as :std:doc:`PyPA build <pypa-build:index>`
+which you can obtain via ``pip install build``. After downloading it, invoke
+the builder::
+
+ python -m build
+
+You now have your distribution ready (e.g. a ``tar.gz`` file and a ``.whl``
+file in the ``dist`` directory), which you can upload to PyPI!
+
+Of course, before you release your project to PyPI, you'll want to add a bit
+more information to your setup script to help people find or learn about your
+project. And maybe your project will have grown by then to include a few
+dependencies, and perhaps some data files and scripts. In the next few sections,
+we will walk through the additional but essential information you need
+to specify to properly package your project.
+
+
+Automatic package discovery
+===========================
+For simple projects, it's usually easy enough to manually add packages to
+the ``packages`` keyword in ``setup.cfg``. However, for very large projects,
+it can be a big burden to keep the package list updated.
+Therefore, ``setuptoops`` provides a convenient way to automatically list all
+the packages in your project directory:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ packages = find: # OR `find_namespaces:` if you want to use namespaces
+
+ [options.packages.find] (always `find` even if `find_namespaces:` was used before)
+ # This section is optional
+ # Each entry in this section is optional, and if not specified, the default values are:
+ # `where=.`, `include=*` and `exclude=` (empty).
+ include=mypackage*
+ exclude=mypackage.tests*
+
+.. tab:: setup.py [#setup.py]_
+
+ .. code-block:: python
+
+ from setuptools import find_packages # or find_namespace_packages
+
+ setup(
+ # ...
+ packages=find_packages(
+ where='.',
+ include=['mypackage*'], # ["*"] by default
+ exclude=['mypackage.tests'], # empty by default
+ ),
+ # ...
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ # ...
+ [tool.setuptools.packages]
+ find = {} # Scan the project directory with the default parameters
+
+ # OR
+ [tool.setuptools.packages.find]
+ where = ["src"] # ["."] by default
+ include = ["mypackage*"] # ["*"] by default
+ exclude = ["mypackage.tests*"] # empty by default
+ namespaces = false # true by default
+
+When you pass the above information, alongside other necessary information,
+``setuptools`` walks through the directory specified in ``where`` (omitted
+here as the package resides in the current directory) and filters the packages
+it can find following the ``include`` (defaults to none), then removes
+those that match the ``exclude`` and returns a list of Python packages. The above
+setup also allows you to adopt a ``src/`` layout. For more details and advanced
+use, go to :ref:`package_discovery`.
+
+.. tip::
+ Starting with version 61.0.0, setuptools' automatic discovery capabilities
+ have been improved to detect popular project layouts (such as the
+ :ref:`flat-layout` and :ref:`src-layout`) without requiring any
+ special configuration. Check out our :ref:`reference docs <package_discovery>`
+ for more information, but please keep in mind that this functionality is
+ still considered **experimental** and might change (or even be removed) in
+ future releases.
+
+
+Entry points and automatic script creation
+===========================================
+Setuptools supports automatic creation of scripts upon installation, that runs
+code within your package if you specify them as :doc:`entry points
+<PyPUG:specifications/entry-points>`.
+This is what allows you to run commands like ``pip install`` instead of having
+to type ``python -m pip install``.
+The following configuration examples show how to accomplish this:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options.entry_points]
+ console_scripts =
+ cli-name = mypkg.mymodule:some_func
+
+.. tab:: setup.py [#setup.py]_
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ entry_points={
+ 'console_scripts': [
+ 'cli-name = mypkg.mymodule:some_func',
+ ]
+ }
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project.scripts]
+ cli-name = mypkg.mymodule:some_func
+
+When this project is installed, a ``cli-name`` executable will be created.
+``cli-name`` will invoke the function ``some_func`` in the
+``mypkg/mymodule.py`` file when called by the user.
+Note that you can also use the ``entry-points`` mechanism to advertise
+components between installed packages and implement plugin systems.
+For detailed usage, go to :doc:`entry_point`.
+
+
+Dependency management
+=====================
+Packages built with ``setuptools`` can specify dependencies to be automatically
+installed when the package itself is installed.
+The example below show how to configure this kind of dependencies:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ install_requires =
+ docutils
+ requests <= 0.4
+
+.. tab:: setup.py [#setup.py]_
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ install_requires=["docutils", "requests <= 0.4"],
+ # ...
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [project]
+ # ...
+ dependencies = [
+ "docutils",
+ "requires <= 0.4",
+ ]
+ # ...
+
+Each dependency is represented by a string that can optionally contain version requirements
+(e.g. one of the operators <, >, <=, >=, == or !=, followed by a version identifier),
+and/or conditional environment markers, e.g. ``sys_platform == "win32"``
+(see :doc:`PyPUG:specifications/version-specifiers` for more information).
+
+When your project is installed, all of the dependencies not already installed
+will be located (via PyPI), downloaded, built (if necessary), and installed.
+This, of course, is a simplified scenario. You can also specify groups of
+extra dependencies that are not strictly required by your package to work, but
+that will provide additional functionalities.
+For more advanced use, see :doc:`dependency_management`.
+
+
+.. _Including Data Files:
+
+Including Data Files
+====================
+The distutils have traditionally allowed installation of "data files", which
+are placed in a platform-specific location. Setuptools offers three ways to
+specify data files to be included in your packages. For the simplest use, you
+can simply use the ``include_package_data`` keyword:
+
+.. tab:: setup.cfg
+
+ .. code-block:: ini
+
+ [options]
+ include_package_data = True
+
+.. tab:: setup.py [#setup.py]_
+
+ .. code-block:: python
+
+ setup(
+ # ...
+ include_package_data=True,
+ # ...
+ )
+
+.. tab:: pyproject.toml (**EXPERIMENTAL**) [#experimental]_
+
+ .. code-block:: toml
+
+ [tool.setuptools]
+ include-package-data = true
+ # This is already the default behaviour if your are using
+ # pyproject.toml to configure your build.
+ # You can deactivate that with `include-package-data = false`
+
+This tells setuptools to install any data files it finds in your packages.
+The data files must be specified via the distutils' |MANIFEST.in|_ file
+or automatically added by a :ref:`Revision Control System plugin
+<Adding Support for Revision Control Systems>`.
+For more details, see :doc:`datafiles`.
+
+
+Development mode
+================
+
+``setuptools`` allows you to install a package without copying any files
+to your interpreter directory (e.g. the ``site-packages`` directory).
+This allows you to modify your source code and have the changes take
+effect without you having to rebuild and reinstall.
+Here's how to do it::
+
+ pip install --editable .
+
+This creates a link file in your interpreter site package directory which
+associate with your source code. For more information, see :doc:`development_mode`.
+
+.. tip::
+
+ Prior to :ref:`pip v21.1 <pip:v21-1>`, a ``setup.py`` script was
+ required to be compatible with development mode. With late
+ versions of pip, ``setup.cfg``-only projects may be installed in this mode.
+
+ If you are experimenting with :doc:`configuration using <pyproject_config>`,
+ or have version of ``pip`` older than v21.1, you might need to keep a
+ ``setup.py`` file in file in your repository if you want to use editable
+ installs (for the time being).
+
+ A simple script will suffice, for example:
+
+ .. code-block:: python
+
+ from setuptools import setup
+
+ setup()
+
+ You can still keep all the configuration in :doc:`setup.cfg </userguide/declarative_config>`
+ (or :doc:`pyproject.toml </userguide/pyproject_config>`).
+
+
+Uploading your package to PyPI
+==============================
+After generating the distribution files, the next step would be to upload your
+distribution so others can use it. This functionality is provided by
+:pypi:`twine` and is documented in the :doc:`Python packaging tutorial
+<PyPUG:tutorials/packaging-projects>`.
+
+
+Transitioning from ``setup.py`` to ``setup.cfg``
+================================================
+To avoid executing arbitrary scripts and boilerplate code, we are transitioning
+into a full-fledged ``setup.cfg`` to declare your package information instead
+of running ``setup()``. This inevitably brings challenges due to a different
+syntax. :doc:`Here </userguide/declarative_config>` we provide a quick guide to
+understanding how ``setup.cfg`` is parsed by ``setuptools`` to ease the pain of
+transition.
+
+.. _packaging-resources:
+
+Resources on Python packaging
+=============================
+Packaging in Python can be hard and is constantly evolving.
+`Python Packaging User Guide <https://packaging.python.org>`_ has tutorials and
+up-to-date references that can help you when it is time to distribute your work.
+
+
+.. |MANIFEST.in| replace:: ``MANIFEST.in``
+.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/
+
+
+----
+
+.. rubric:: Notes
+
+.. [#setup.py]
+ The ``setup.py`` file should be used only when custom scripting during the
+ build is necessary.
+ Examples are kept in this document to help people interested in maintaining or
+ contributing to existing packages that use ``setup.py``.
+ Note that you can still keep most of configuration declarative in
+ :doc:`setup.cfg <declarative_config>` or :doc:`pyproject.toml
+ <pyproject_config>` and use ``setup.py`` only for the parts not
+ supported in those files (e.g. C extensions).
+
+.. [#experimental]
+ While the ``[build-system]`` table should always be specified in the
+ ``pyproject.toml`` file, support for adding package metadata and build configuration
+ options via the ``[project]`` and ``[tool.setuptools]`` tables is still
+ experimental and might change (or be completely removed) in future releases.
+ See :doc:`/userguide/pyproject_config`.
diff --git a/easy_install.py b/easy_install.py
deleted file mode 100755
index d87e984..0000000
--- a/easy_install.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Run the EasyInstall command"""
-
-if __name__ == '__main__':
- from setuptools.command.easy_install import main
- main()
diff --git a/exercises.py b/exercises.py
new file mode 100644
index 0000000..76176be
--- /dev/null
+++ b/exercises.py
@@ -0,0 +1,6 @@
+def measure_startup_perf():
+ # run by pytest_perf
+ import subprocess
+ import sys # end warmup
+
+ subprocess.check_call([sys.executable, '-c', 'pass'])
diff --git a/launcher.c b/launcher.c
index be69f0c..23ef3ac 100755..100644
--- a/launcher.c
+++ b/launcher.c
@@ -37,6 +37,7 @@
#include <windows.h>
#include <tchar.h>
#include <fcntl.h>
+#include <process.h>
int child_pid=0;
diff --git a/pavement.py b/pavement.py
deleted file mode 100644
index 84e5825..0000000
--- a/pavement.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import re
-
-from paver.easy import task, path as Path
-import pip
-
-
-def remove_all(paths):
- for path in paths:
- path.rmtree() if path.isdir() else path.remove()
-
-
-@task
-def update_vendored():
- update_pkg_resources()
- update_setuptools()
-
-
-def rewrite_packaging(pkg_files, new_root):
- """
- Rewrite imports in packaging to redirect to vendored copies.
- """
- for file in pkg_files.glob('*.py'):
- text = file.text()
- text = re.sub(r' (pyparsing|six)', rf' {new_root}.\1', text)
- file.write_text(text)
-
-
-def clean(vendor):
- """
- Remove all files out of the vendor directory except the meta
- data (as pip uninstall doesn't support -t).
- """
- remove_all(
- path
- for path in vendor.glob('*')
- if path.basename() != 'vendored.txt'
- )
-
-
-def install(vendor):
- clean(vendor)
- install_args = [
- 'install',
- '-r', str(vendor / 'vendored.txt'),
- '-t', str(vendor),
- ]
- pip.main(install_args)
- remove_all(vendor.glob('*.dist-info'))
- remove_all(vendor.glob('*.egg-info'))
- (vendor / '__init__.py').write_text('')
-
-
-def update_pkg_resources():
- vendor = Path('pkg_resources/_vendor')
- install(vendor)
- rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern')
-
-
-def update_setuptools():
- vendor = Path('setuptools/_vendor')
- install(vendor)
- rewrite_packaging(vendor / 'packaging', 'setuptools.extern')
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index d5b0fe9..852476e 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1,4 +1,3 @@
-# coding: utf-8
"""
Package resource API
--------------------
@@ -15,8 +14,6 @@ The package resource API is designed to work with normal filesystem packages,
method.
"""
-from __future__ import absolute_import
-
import sys
import os
import io
@@ -39,6 +36,9 @@ import tempfile
import textwrap
import itertools
import inspect
+import ntpath
+import posixpath
+import importlib
from pkgutil import get_importer
try:
@@ -47,8 +47,10 @@ except ImportError:
# Python 3.2 compatibility
import imp as _imp
-from pkg_resources.extern import six
-from pkg_resources.extern.six.moves import urllib, map, filter
+try:
+ FileExistsError
+except NameError:
+ FileExistsError = OSError
# capture these to bypass sandboxing
from os import utime
@@ -69,7 +71,12 @@ try:
except ImportError:
importlib_machinery = None
-from . import py31compat
+from pkg_resources.extern.jaraco.text import (
+ yield_lines,
+ drop_comment,
+ join_continuation,
+)
+
from pkg_resources.extern import appdirs
from pkg_resources.extern import packaging
__import__('pkg_resources.extern.packaging.version')
@@ -77,14 +84,8 @@ __import__('pkg_resources.extern.packaging.specifiers')
__import__('pkg_resources.extern.packaging.requirements')
__import__('pkg_resources.extern.packaging.markers')
-
-if (3, 0) < sys.version_info < (3, 3):
- raise RuntimeError("Python 3.3 or later is required")
-
-if six.PY2:
- # Those builtin exceptions are only defined in Python 3
- PermissionError = None
- NotADirectoryError = None
+if sys.version_info < (3, 5):
+ raise RuntimeError("Python 3.5 or later is required")
# declare some globals that will be defined later to
# satisfy the linters.
@@ -118,6 +119,11 @@ def parse_version(v):
try:
return packaging.version.Version(v)
except packaging.version.InvalidVersion:
+ warnings.warn(
+ f"{v} is an invalid version and will not be supported in "
+ "a future release",
+ PkgResourcesDeprecationWarning,
+ )
return packaging.version.LegacyVersion(v)
@@ -168,10 +174,10 @@ def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
- of Mac OS X that would be required to *use* extensions produced by
+ of macOS that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
- version of Mac OS X that we are *running*. To allow usage of packages that
- explicitly require a newer version of Mac OS X, we must also know the
+ version of macOS that we are *running*. To allow usage of packages that
+ explicitly require a newer version of macOS, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
@@ -181,9 +187,9 @@ def get_supported_platform():
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
- plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+ plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
except ValueError:
- # not Mac OS X
+ # not macOS
pass
return plat
@@ -230,6 +236,9 @@ __all__ = [
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
+ # Warnings
+ 'PkgResourcesDeprecationWarning',
+
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
@@ -320,7 +329,7 @@ class UnknownExtra(ResolutionError):
_provider_factories = {}
-PY_MAJOR = sys.version[:3]
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
@@ -351,7 +360,7 @@ def get_provider(moduleOrReq):
return _find_adapter(_provider_factories, loader)(module)
-def _macosx_vers(_cache=[]):
+def _macos_vers(_cache=[]):
if not _cache:
version = platform.mac_ver()[0]
# fallback for MacPorts
@@ -367,7 +376,7 @@ def _macosx_vers(_cache=[]):
return _cache[0]
-def _macosx_arch(machine):
+def _macos_arch(machine):
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
@@ -375,22 +384,18 @@ def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
- needs some hacks for Linux and Mac OS X.
+ needs some hacks for Linux and macOS.
"""
- try:
- # Python 2.7 or >=3.2
- from sysconfig import get_platform
- except ImportError:
- from distutils.util import get_platform
+ from sysconfig import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
- version = _macosx_vers()
+ version = _macos_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (
int(version[0]), int(version[1]),
- _macosx_arch(machine),
+ _macos_arch(machine),
)
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
@@ -416,7 +421,7 @@ def compatible_platforms(provided, required):
# easy case
return True
- # Mac OS X special cases
+ # macOS special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
@@ -425,7 +430,7 @@ def compatible_platforms(provided, required):
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
- # use the new macosx designation.
+ # use the new macOS designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
@@ -433,7 +438,7 @@ def compatible_platforms(provided, required):
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
return True
- # egg isn't macosx or legacy darwin
+ # egg isn't macOS or legacy darwin
return False
# are they the same major version and machine type?
@@ -466,7 +471,7 @@ run_main = run_script
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
- if isinstance(dist, six.string_types):
+ if isinstance(dist, str):
dist = Requirement.parse(dist)
if isinstance(dist, Requirement):
dist = get_provider(dist)
@@ -541,7 +546,7 @@ class IResourceProvider(IMetadataProvider):
"""List of resource names in the directory (like ``os.listdir()``)"""
-class WorkingSet(object):
+class WorkingSet:
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
@@ -641,13 +646,12 @@ class WorkingSet(object):
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
- for dist in self:
- entries = dist.get_entry_map(group)
- if name is None:
- for ep in entries.values():
- yield ep
- elif name in entries:
- yield entries[name]
+ return (
+ entry
+ for dist in self
+ for entry in dist.get_entry_map(group).values()
+ if name is None or name == entry.name
+ )
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
@@ -704,7 +708,8 @@ class WorkingSet(object):
keys2.append(dist.key)
self._added_new(dist)
- def resolve(self, requirements, env=None, installer=None,
+ # FIXME: 'WorkingSet.resolve' is too complex (11)
+ def resolve(self, requirements, env=None, installer=None, # noqa: C901
replace_conflicting=False, extras=None):
"""List all distributions needed to (recursively) meet `requirements`
@@ -948,7 +953,7 @@ class _ReqExtras(dict):
return not req.marker or any(extra_evals)
-class Environment(object):
+class Environment:
"""Searchable snapshot of distributions on a search path"""
def __init__(
@@ -963,7 +968,7 @@ class Environment(object):
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
- optional string naming the desired version of Python (e.g. ``'3.3'``);
+ optional string naming the desired version of Python (e.g. ``'3.6'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
@@ -1226,12 +1231,13 @@ class ResourceManager:
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = (
- "%s is writable by group/others and vulnerable to attack "
- "when "
- "used with get_resource_filename. Consider a more secure "
+ "Extraction path is writable by group/others "
+ "and vulnerable to attack when "
+ "used with get_resource_filename ({path}). "
+ "Consider a more secure "
"location (set with .set_extraction_path or the "
- "PYTHON_EGG_CACHE environment variable)." % path
- )
+ "PYTHON_EGG_CACHE environment variable)."
+ ).format(**locals())
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
@@ -1369,7 +1375,7 @@ def evaluate_marker(text, extra=None):
marker = packaging.markers.Marker(text)
return marker.evaluate()
except packaging.markers.InvalidMarker as e:
- raise SyntaxError(e)
+ raise SyntaxError(e) from e
class NullProvider:
@@ -1395,14 +1401,28 @@ class NullProvider:
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
+ def _get_metadata_path(self, name):
+ return self._fn(self.egg_info, name)
+
def has_metadata(self, name):
- return self.egg_info and self._has(self._fn(self.egg_info, name))
+ if not self.egg_info:
+ return self.egg_info
+
+ path = self._get_metadata_path(name)
+ return self._has(path)
def get_metadata(self, name):
if not self.egg_info:
return ""
- value = self._get(self._fn(self.egg_info, name))
- return value.decode('utf-8') if six.PY3 else value
+ path = self._get_metadata_path(name)
+ value = self._get(path)
+ try:
+ return value.decode('utf-8')
+ except UnicodeDecodeError as exc:
+ # Include the path in the error message to simplify
+ # troubleshooting, and without changing the exception type.
+ exc.reason += ' in {} file at path: {}'.format(name, path)
+ raise
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
@@ -1433,7 +1453,8 @@ class NullProvider:
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
- source = open(script_filename).read()
+ with open(script_filename) as fid:
+ source = fid.read()
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
@@ -1460,10 +1481,86 @@ class NullProvider:
)
def _fn(self, base, resource_name):
+ self._validate_resource_path(resource_name)
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
+ @staticmethod
+ def _validate_resource_path(path):
+ """
+ Validate the resource paths according to the docs.
+ https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
+
+ >>> warned = getfixture('recwarn')
+ >>> warnings.simplefilter('always')
+ >>> vrp = NullProvider._validate_resource_path
+ >>> vrp('foo/bar.txt')
+ >>> bool(warned)
+ False
+ >>> vrp('../foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('/foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> vrp('foo/../../bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('foo/f../bar.txt')
+ >>> bool(warned)
+ False
+
+ Windows path separators are straight-up disallowed.
+ >>> vrp(r'\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ >>> vrp(r'C:\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ Blank values are allowed
+
+ >>> vrp('')
+ >>> bool(warned)
+ False
+
+ Non-string values are not.
+
+ >>> vrp(None)
+ Traceback (most recent call last):
+ ...
+ AttributeError: ...
+ """
+ invalid = (
+ os.path.pardir in path.split(posixpath.sep) or
+ posixpath.isabs(path) or
+ ntpath.isabs(path)
+ )
+ if not invalid:
+ return
+
+ msg = "Use of .. or absolute path in a resource path is not allowed."
+
+ # Aggressively disallow Windows absolute paths
+ if ntpath.isabs(path) and not posixpath.isabs(path):
+ raise ValueError(msg)
+
+ # for compatibility, warn; in future
+ # raise ValueError(msg)
+ warnings.warn(
+ msg[:-1] + " and will raise exceptions in a future release.",
+ DeprecationWarning,
+ stacklevel=4,
+ )
+
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
@@ -1475,26 +1572,35 @@ class NullProvider:
register_loader_type(object, NullProvider)
+def _parents(path):
+ """
+ yield all parents of path including path
+ """
+ last = None
+ while path != last:
+ yield path
+ last = path
+ path, _ = os.path.split(path)
+
+
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self, module):
- NullProvider.__init__(self, module)
+ super().__init__(module)
self._setup_prefix()
def _setup_prefix(self):
- # we assume here that our metadata may be nested inside a "basket"
- # of multiple eggs; that's why we use module_path instead of .archive
- path = self.module_path
- old = None
- while path != old:
- if _is_egg_path(path):
- self.egg_name = os.path.basename(path)
- self.egg_info = os.path.join(path, 'EGG-INFO')
- self.egg_root = path
- break
- old = path
- path, base = os.path.split(path)
+ # Assume that metadata may be nested inside a "basket"
+ # of multiple eggs and use module_path instead of .archive.
+ eggs = filter(_is_egg_path, _parents(self.module_path))
+ egg = next(eggs, None)
+ egg and self._set_egg(egg)
+
+ def _set_egg(self, path):
+ self.egg_name = os.path.basename(path)
+ self.egg_info = os.path.join(path, 'EGG-INFO')
+ self.egg_root = path
class DefaultProvider(EggProvider):
@@ -1601,7 +1707,7 @@ class ZipProvider(EggProvider):
_zip_manifests = MemoizedZipManifests()
def __init__(self, module):
- EggProvider.__init__(self, module)
+ super().__init__(module)
self.zip_pre = self.loader.archive + os.sep
def _zipinfo_name(self, fspath):
@@ -1652,7 +1758,8 @@ class ZipProvider(EggProvider):
timestamp = time.mktime(date_time)
return timestamp, size
- def _extract_resource(self, manager, zip_path):
+ # FIXME: 'ZipProvider._extract_resource' is too complex (12)
+ def _extract_resource(self, manager, zip_path): # noqa: C901
if zip_path in self._index():
for name in self._index()[zip_path]:
@@ -1784,6 +1891,9 @@ class FileMetadata(EmptyProvider):
def __init__(self, path):
self.path = path
+ def _get_metadata_path(self, name):
+ return self.path
+
def has_metadata(self, name):
return name == 'PKG-INFO' and os.path.isfile(self.path)
@@ -1797,8 +1907,7 @@ class FileMetadata(EmptyProvider):
return metadata
def _warn_on_replacement(self, metadata):
- # Python 2.7 compat for: replacement_char = '�'
- replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
+ replacement_char = '�'
if replacement_char in metadata:
tmpl = "{self.path} could not be properly decoded in UTF-8"
msg = tmpl.format(**locals())
@@ -1882,13 +1991,13 @@ def find_eggs_in_zip(importer, path_item, only=False):
if only:
# don't yield nested distros
return
- for subitem in metadata.resource_listdir('/'):
+ for subitem in metadata.resource_listdir(''):
if _is_egg_path(subitem):
subpath = os.path.join(path_item, subitem)
dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
for dist in dists:
yield dist
- elif subitem.lower().endswith('.dist-info'):
+ elif subitem.lower().endswith(('.dist-info', '.egg-info')):
subpath = os.path.join(path_item, subitem)
submeta = EggMetadata(zipimport.zipimporter(subpath))
submeta.egg_info = subpath
@@ -1912,7 +2021,7 @@ def _by_version_descending(names):
>>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
>>> _by_version_descending(names)
- ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
+ ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo']
>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
>>> _by_version_descending(names)
['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
@@ -1920,13 +2029,22 @@ def _by_version_descending(names):
>>> _by_version_descending(names)
['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
"""
+ def try_parse(name):
+ """
+ Attempt to parse as a version or return a null version.
+ """
+ try:
+ return packaging.version.Version(name)
+ except Exception:
+ return packaging.version.Version('0')
+
def _by_version(name):
"""
Parse each component of the filename
"""
name, ext = os.path.splitext(name)
parts = itertools.chain(name.split('-'), [ext])
- return [packaging.version.parse(part) for part in parts]
+ return [try_parse(part) for part in parts]
return sorted(names, key=_by_version, reverse=True)
@@ -1943,7 +2061,10 @@ def find_on_path(importer, path_item, only=False):
)
return
- entries = safe_listdir(path_item)
+ entries = (
+ os.path.join(path_item, child)
+ for child in safe_listdir(path_item)
+ )
# for performance, before sorting by version,
# screen entries for only those that will yield
@@ -1964,11 +2085,14 @@ def find_on_path(importer, path_item, only=False):
def dist_factory(path_item, entry, only):
- """
- Return a dist_factory for a path_item and entry
- """
+ """Return a dist_factory for the given entry."""
lower = entry.lower()
- is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
+ is_egg_info = lower.endswith('.egg-info')
+ is_dist_info = (
+ lower.endswith('.dist-info') and
+ os.path.isdir(os.path.join(path_item, entry))
+ )
+ is_meta = is_egg_info or is_dist_info
return (
distributions_from_metadata
if is_meta else
@@ -1990,8 +2114,6 @@ class NoDists:
"""
def __bool__(self):
return False
- if six.PY2:
- __nonzero__ = __bool__
def __call__(self, fullpath):
return iter(())
@@ -2008,12 +2130,7 @@ def safe_listdir(path):
except OSError as e:
# Ignore the directory if does not exist, not a directory or
# permission denied
- ignorable = (
- e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
- # Python 2 on Windows needs to be handled this way :(
- or getattr(e, "winerror", None) == 267
- )
- if not ignorable:
+ if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
raise
return ()
@@ -2091,7 +2208,18 @@ def _handle_ns(packageName, path_item):
importer = get_importer(path_item)
if importer is None:
return None
- loader = importer.find_module(packageName)
+
+ # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
+ try:
+ spec = importer.find_spec(packageName)
+ except AttributeError:
+ # capture warnings due to #1111
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ loader = importer.find_module(packageName)
+ else:
+ loader = spec.loader if spec else None
+
if loader is None:
return None
module = sys.modules.get(packageName)
@@ -2106,7 +2234,7 @@ def _handle_ns(packageName, path_item):
if subpath is not None:
path = module.__path__
path.append(subpath)
- loader.load_module(packageName)
+ importlib.import_module(packageName)
_rebuild_mod_path(path, packageName, module)
return subpath
@@ -2136,12 +2264,13 @@ def _rebuild_mod_path(orig_path, package_name, module):
parts = path_parts[:-module_parts]
return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
- if not isinstance(orig_path, list):
- # Is this behavior useful when module.__path__ is not a list?
- return
+ new_path = sorted(orig_path, key=position_in_sys_path)
+ new_path = [_normalize_cached(p) for p in new_path]
- orig_path.sort(key=position_in_sys_path)
- module.__path__[:] = [_normalize_cached(p) for p in orig_path]
+ if isinstance(module.__path__, list):
+ module.__path__[:] = new_path
+ else:
+ module.__path__ = new_path
def declare_namespace(packageName):
@@ -2152,20 +2281,21 @@ def declare_namespace(packageName):
if packageName in _namespace_packages:
return
- path, parent = sys.path, None
- if '.' in packageName:
- parent = '.'.join(packageName.split('.')[:-1])
+ path = sys.path
+ parent, _, _ = packageName.rpartition('.')
+
+ if parent:
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
- except AttributeError:
- raise TypeError("Not a package:", parent)
+ except AttributeError as e:
+ raise TypeError("Not a package:", parent) from e
# Track what packages are namespaces, so when new path items are added,
# they can be updated
- _namespace_packages.setdefault(parent, []).append(packageName)
+ _namespace_packages.setdefault(parent or None, []).append(packageName)
_namespace_packages.setdefault(packageName, [])
for path_item in path:
@@ -2218,7 +2348,19 @@ register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
- return os.path.normcase(os.path.realpath(filename))
+ return os.path.normcase(os.path.realpath(os.path.normpath(
+ _cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename): # pragma: nocover
+ """
+ Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+ symlink components. Using
+ os.path.abspath() works around this limitation. A fix in os.getcwd()
+ would probably better, in Cygwin even more so, except
+ that this seems to be by design...
+ """
+ return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
def _normalize_cached(filename, _cache={}):
@@ -2233,7 +2375,15 @@ def _is_egg_path(path):
"""
Determine if given path appears to be an egg.
"""
- return path.lower().endswith('.egg')
+ return _is_zip_egg(path) or _is_unpacked_egg(path)
+
+
+def _is_zip_egg(path):
+ return (
+ path.lower().endswith('.egg') and
+ os.path.isfile(path) and
+ zipfile.is_zipfile(path)
+ )
def _is_unpacked_egg(path):
@@ -2241,7 +2391,7 @@ def _is_unpacked_egg(path):
Determine if given path appears to be an unpacked egg.
"""
return (
- _is_egg_path(path) and
+ path.lower().endswith('.egg') and
os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
)
@@ -2254,20 +2404,6 @@ def _set_parent_ns(packageName):
setattr(sys.modules[parent], name, sys.modules[packageName])
-def yield_lines(strs):
- """Yield non-empty/non-comment lines of a string or sequence"""
- if isinstance(strs, six.string_types):
- for s in strs.splitlines():
- s = s.strip()
- # skip blank lines/comments
- if s and not s.startswith('#'):
- yield s
- else:
- for ss in strs:
- for s in yield_lines(ss):
- yield s
-
-
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"""
@@ -2283,7 +2419,7 @@ EGG_NAME = re.compile(
).match
-class EntryPoint(object):
+class EntryPoint:
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
@@ -2314,7 +2450,7 @@ class EntryPoint(object):
warnings.warn(
"Parameters to load are deprecated. Call .resolve and "
".require separately.",
- DeprecationWarning,
+ PkgResourcesDeprecationWarning,
stacklevel=2,
)
if require:
@@ -2329,7 +2465,7 @@ class EntryPoint(object):
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
- raise ImportError(str(exc))
+ raise ImportError(str(exc)) from exc
def require(self, env=None, installer=None):
if self.extras and not self.dist:
@@ -2415,15 +2551,6 @@ class EntryPoint(object):
return maps
-def _remove_md5_fragment(location):
- if not location:
- return ''
- parsed = urllib.parse.urlparse(location)
- if parsed[-1].startswith('md5='):
- return urllib.parse.urlunparse(parsed[:-1] + ('',))
- return location
-
-
def _version_from_file(lines):
"""
Given an iterable of lines from a Metadata file, return
@@ -2437,7 +2564,7 @@ def _version_from_file(lines):
return safe_version(value.strip()) or None
-class Distribution(object):
+class Distribution:
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
@@ -2480,7 +2607,7 @@ class Distribution(object):
self.parsed_version,
self.precedence,
self.key,
- _remove_md5_fragment(self.location),
+ self.location,
self.py_version or '',
self.platform or '',
)
@@ -2558,11 +2685,15 @@ class Distribution(object):
def version(self):
try:
return self._version
- except AttributeError:
- version = _version_from_file(self._get_metadata(self.PKG_INFO))
+ except AttributeError as e:
+ version = self._get_version()
if version is None:
- tmpl = "Missing 'Version:' header and/or %s file"
- raise ValueError(tmpl % self.PKG_INFO, self)
+ path = self._get_metadata_path_for_display(self.PKG_INFO)
+ msg = (
+ "Missing 'Version:' header and/or {} file at path: {}"
+ ).format(self.PKG_INFO, path)
+ raise ValueError(msg, self) from e
+
return version
@property
@@ -2614,17 +2745,40 @@ class Distribution(object):
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
- except KeyError:
+ except KeyError as e:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
- )
+ ) from e
return deps
+ def _get_metadata_path_for_display(self, name):
+ """
+ Return the path to the given metadata file, if available.
+ """
+ try:
+ # We need to access _get_metadata_path() on the provider object
+ # directly rather than through this class's __getattr__()
+ # since _get_metadata_path() is marked private.
+ path = self._provider._get_metadata_path(name)
+
+ # Handle exceptions e.g. in case the distribution's metadata
+ # provider doesn't support _get_metadata_path().
+ except Exception:
+ return '[could not detect]'
+
+ return path
+
def _get_metadata(self, name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
+ def _get_version(self):
+ lines = self._get_metadata(self.PKG_INFO)
+ version = _version_from_file(lines)
+
+ return version
+
def activate(self, path=None, replace=False):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
@@ -2667,6 +2821,15 @@ class Distribution(object):
raise AttributeError(attr)
return getattr(self._provider, attr)
+ def __dir__(self):
+ return list(
+ set(super(Distribution, self).__dir__())
+ | set(
+ attr for attr in self._provider.__dir__()
+ if not attr.startswith('_')
+ )
+ )
+
@classmethod
def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location(
@@ -2706,7 +2869,8 @@ class Distribution(object):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
- def insert_on(self, path, loc=None, replace=False):
+ # FIXME: 'Distribution.insert_on' is too complex (13)
+ def insert_on(self, path, loc=None, replace=False): # noqa: C901
"""Ensure self.location is on path
If replace=False (default):
@@ -2830,7 +2994,7 @@ class EggInfoDistribution(Distribution):
take an extra step and try to get the version number from
the metadata file itself instead of the filename.
"""
- md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
+ md_version = self._get_version()
if md_version:
self._version = md_version
return self
@@ -2876,12 +3040,12 @@ class DistInfoDistribution(Distribution):
if not req.marker or req.marker.evaluate({'extra': extra}):
yield req
- common = frozenset(reqs_for_extra(None))
+ common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
s_extra = safe_extra(extra.strip())
- dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
+ dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
return dm
@@ -2906,40 +3070,23 @@ def issue_warning(*args, **kw):
warnings.warn(stacklevel=level + 1, *args, **kw)
-class RequirementParseError(ValueError):
- def __str__(self):
- return ' '.join(self.args)
-
-
def parse_requirements(strs):
- """Yield ``Requirement`` objects for each specification in `strs`
+ """
+ Yield ``Requirement`` objects for each specification in `strs`.
`strs` must be a string, or a (possibly-nested) iterable thereof.
"""
- # create a steppable iterator, so we can handle \-continuations
- lines = iter(yield_lines(strs))
+ return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
- for line in lines:
- # Drop comments -- a hash without a space may be in a URL.
- if ' #' in line:
- line = line[:line.find(' #')]
- # If there is a line continuation, drop it, and append the next line.
- if line.endswith('\\'):
- line = line[:-2].strip()
- try:
- line += next(lines)
- except StopIteration:
- return
- yield Requirement(line)
+
+class RequirementParseError(packaging.requirements.InvalidRequirement):
+ "Compatibility wrapper for InvalidRequirement"
class Requirement(packaging.requirements.Requirement):
def __init__(self, requirement_string):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
- try:
- super(Requirement, self).__init__(requirement_string)
- except packaging.requirements.InvalidRequirement as e:
- raise RequirementParseError(str(e))
+ super(Requirement, self).__init__(requirement_string)
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
@@ -2948,6 +3095,7 @@ class Requirement(packaging.requirements.Requirement):
self.extras = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
+ self.url,
self.specifier,
frozenset(self.extras),
str(self.marker) if self.marker else None,
@@ -3008,7 +3156,7 @@ def _find_adapter(registry, ob):
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
- py31compat.makedirs(dirname, exist_ok=True)
+ os.makedirs(dirname, exist_ok=True)
def _bypass_ensure_directory(path):
@@ -3018,7 +3166,10 @@ def _bypass_ensure_directory(path):
dirname, filename = split(path)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
- mkdir(dirname, 0o755)
+ try:
+ mkdir(dirname, 0o755)
+ except FileExistsError:
+ pass
def split_sections(s):
@@ -3083,6 +3234,15 @@ def _initialize(g=globals()):
)
+class PkgResourcesDeprecationWarning(Warning):
+ """
+ Base class for warning about deprecations in ``pkg_resources``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
+
+
@_call_aside
def _initialize_master_working_set():
"""
diff --git a/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt
new file mode 100644
index 0000000..d64bc32
--- /dev/null
+++ b/pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+appdirs
diff --git a/pkg_resources/_vendor/appdirs.py b/pkg_resources/_vendor/appdirs.py
index f4dba09..ae67001 100644
--- a/pkg_resources/_vendor/appdirs.py
+++ b/pkg_resources/_vendor/appdirs.py
@@ -13,7 +13,7 @@ See <http://github.com/ActiveState/appdirs> for details and usage.
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
-__version_info__ = (1, 4, 0)
+__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__))
@@ -98,7 +98,7 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
- """Return full path to the user-shared data dir for this application.
+ r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
@@ -117,7 +117,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
- Typical user data directories are:
+ Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
@@ -184,13 +184,13 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
- Typical user data directories are:
+ Typical user config directories are:
Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by deafult "~/.config/<AppName>".
+ That means, by default "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
@@ -204,7 +204,7 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
- """Return full path to the user-shared data dir for this application.
+ r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
@@ -222,7 +222,7 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False)
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
- Typical user data directories are:
+ Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
@@ -311,6 +311,48 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
return path
+def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific state dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user state directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
+ to extend the XDG spec and support $XDG_STATE_HOME.
+
+ That means, by default "~/.local/state/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application.
@@ -329,7 +371,7 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
- Typical user cache directories are:
+ Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
@@ -364,8 +406,8 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
class AppDirs(object):
"""Convenience wrapper for getting application dirs."""
- def __init__(self, appname, appauthor=None, version=None, roaming=False,
- multipath=False):
+ def __init__(self, appname=None, appauthor=None, version=None,
+ roaming=False, multipath=False):
self.appname = appname
self.appauthor = appauthor
self.version = version
@@ -398,6 +440,11 @@ class AppDirs(object):
version=self.version)
@property
+ def user_state_dir(self):
+ return user_state_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor,
version=self.version)
@@ -410,7 +457,10 @@ def _get_win_folder_from_registry(csidl_name):
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
- import _winreg
+ if PY3:
+ import winreg as _winreg
+ else:
+ import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
@@ -500,7 +550,7 @@ def _get_win_folder_with_jna(csidl_name):
if has_high_char:
buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE
- if kernal.GetShortPathName(dir, buf, buf_size):
+ if kernel.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
return dir
@@ -527,9 +577,15 @@ if __name__ == "__main__":
appname = "MyApp"
appauthor = "MyCompany"
- props = ("user_data_dir", "site_data_dir",
- "user_config_dir", "site_config_dir",
- "user_cache_dir", "user_log_dir")
+ props = ("user_data_dir",
+ "user_config_dir",
+ "user_cache_dir",
+ "user_state_dir",
+ "user_log_dir",
+ "site_data_dir",
+ "site_config_dir")
+
+ print("-- app dirs %s --" % __version__)
print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0")
diff --git a/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000..58ad1bd
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/pkg_resources/_vendor/importlib_resources/__init__.py b/pkg_resources/_vendor/importlib_resources/__init__.py
new file mode 100644
index 0000000..34e3a99
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/__init__.py
@@ -0,0 +1,36 @@
+"""Read resources contained within a package."""
+
+from ._common import (
+ as_file,
+ files,
+ Package,
+)
+
+from ._legacy import (
+ contents,
+ open_binary,
+ read_binary,
+ open_text,
+ read_text,
+ is_resource,
+ path,
+ Resource,
+)
+
+from .abc import ResourceReader
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'ResourceReader',
+ 'as_file',
+ 'contents',
+ 'files',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+]
diff --git a/pkg_resources/_vendor/importlib_resources/_adapters.py b/pkg_resources/_vendor/importlib_resources/_adapters.py
new file mode 100644
index 0000000..ea363d8
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/_adapters.py
@@ -0,0 +1,170 @@
+from contextlib import suppress
+from io import TextIOWrapper
+
+from . import abc
+
+
+class SpecLoaderAdapter:
+ """
+ Adapt a package spec to adapt the underlying loader.
+ """
+
+ def __init__(self, spec, adapter=lambda spec: spec.loader):
+ self.spec = spec
+ self.loader = adapter(spec)
+
+ def __getattr__(self, name):
+ return getattr(self.spec, name)
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt a loader to provide TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ def get_resource_reader(self, name):
+ return CompatibilityFiles(self.spec)._native()
+
+
+def _io_wrapper(file, mode='r', *args, **kwargs):
+ if mode == 'r':
+ return TextIOWrapper(file, *args, **kwargs)
+ elif mode == 'rb':
+ return file
+ raise ValueError(
+ "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
+ )
+
+
+class CompatibilityFiles:
+ """
+ Adapter for an existing or non-existent resource reader
+ to provide a compatibility .files().
+ """
+
+ class SpecPath(abc.Traversable):
+ """
+ Path tied to a module spec.
+ Can be read and exposes the resource reader children.
+ """
+
+ def __init__(self, spec, reader):
+ self._spec = spec
+ self._reader = reader
+
+ def iterdir(self):
+ if not self._reader:
+ return iter(())
+ return iter(
+ CompatibilityFiles.ChildPath(self._reader, path)
+ for path in self._reader.contents()
+ )
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ if not self._reader:
+ return CompatibilityFiles.OrphanPath(other)
+ return CompatibilityFiles.ChildPath(self._reader, other)
+
+ @property
+ def name(self):
+ return self._spec.name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
+
+ class ChildPath(abc.Traversable):
+ """
+ Path tied to a resource reader child.
+ Can be read but doesn't expose any meaningful children.
+ """
+
+ def __init__(self, reader, name):
+ self._reader = reader
+ self._name = name
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return self._reader.is_resource(self.name)
+
+ def is_dir(self):
+ return not self.is_file()
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(self.name, other)
+
+ @property
+ def name(self):
+ return self._name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(
+ self._reader.open_resource(self.name), mode, *args, **kwargs
+ )
+
+ class OrphanPath(abc.Traversable):
+ """
+ Orphan path, not tied to a module spec or resource reader.
+ Can't be read and doesn't expose any meaningful children.
+ """
+
+ def __init__(self, *path_parts):
+ if len(path_parts) < 1:
+ raise ValueError('Need at least one path part to construct a path')
+ self._path = path_parts
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(*self._path, other)
+
+ @property
+ def name(self):
+ return self._path[-1]
+
+ def open(self, mode='r', *args, **kwargs):
+ raise FileNotFoundError("Can't open orphan path")
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def _reader(self):
+ with suppress(AttributeError):
+ return self.spec.loader.get_resource_reader(self.spec.name)
+
+ def _native(self):
+ """
+ Return the native reader if it supports files().
+ """
+ reader = self._reader
+ return reader if hasattr(reader, 'files') else self
+
+ def __getattr__(self, attr):
+ return getattr(self._reader, attr)
+
+ def files(self):
+ return CompatibilityFiles.SpecPath(self.spec, self._reader)
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+ """
+ return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/pkg_resources/_vendor/importlib_resources/_common.py b/pkg_resources/_vendor/importlib_resources/_common.py
new file mode 100644
index 0000000..a12e2c7
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/_common.py
@@ -0,0 +1,104 @@
+import os
+import pathlib
+import tempfile
+import functools
+import contextlib
+import types
+import importlib
+
+from typing import Union, Optional
+from .abc import ResourceReader, Traversable
+
+from ._compat import wrap_spec
+
+Package = Union[types.ModuleType, str]
+
+
+def files(package):
+ # type: (Package) -> Traversable
+ """
+ Get a Traversable resource from a package
+ """
+ return from_package(get_package(package))
+
+
+def get_resource_reader(package):
+ # type: (types.ModuleType) -> Optional[ResourceReader]
+ """
+ Return the package's loader if it's a ResourceReader.
+ """
+ # We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
+ if reader is None:
+ return None
+ return reader(spec.name) # type: ignore
+
+
+def resolve(cand):
+ # type: (Package) -> types.ModuleType
+ return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand)
+
+
+def get_package(package):
+ # type: (Package) -> types.ModuleType
+ """Take a package name or module object and return the module.
+
+ Raise an exception if the resolved module is not a package.
+ """
+ resolved = resolve(package)
+ if wrap_spec(resolved).submodule_search_locations is None:
+ raise TypeError(f'{package!r} is not a package')
+ return resolved
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ spec = wrap_spec(package)
+ reader = spec.loader.get_resource_reader(spec.name)
+ return reader.files()
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ try:
+ os.write(fd, reader())
+ finally:
+ os.close(fd)
+ del reader
+ yield pathlib.Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@functools.singledispatch
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ return _tempfile(path.read_bytes, suffix=path.name)
+
+
+@as_file.register(pathlib.Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/pkg_resources/_vendor/importlib_resources/_compat.py b/pkg_resources/_vendor/importlib_resources/_compat.py
new file mode 100644
index 0000000..cb9fc82
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/_compat.py
@@ -0,0 +1,98 @@
+# flake8: noqa
+
+import abc
+import sys
+import pathlib
+from contextlib import suppress
+
+if sys.version_info >= (3, 10):
+ from zipfile import Path as ZipPath # type: ignore
+else:
+ from ..zipp import Path as ZipPath # type: ignore
+
+
+try:
+ from typing import runtime_checkable # type: ignore
+except ImportError:
+
+ def runtime_checkable(cls): # type: ignore
+ return cls
+
+
+try:
+ from typing import Protocol # type: ignore
+except ImportError:
+ Protocol = abc.ABC # type: ignore
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt loaders to provide TraversableResources and other
+ compatibility.
+
+ Used primarily for Python 3.9 and earlier where the native
+ loaders do not yet implement TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def path(self):
+ return self.spec.origin
+
+ def get_resource_reader(self, name):
+ from . import readers, _adapters
+
+ def _zip_reader(spec):
+ with suppress(AttributeError):
+ return readers.ZipReader(spec.loader, spec.name)
+
+ def _namespace_reader(spec):
+ with suppress(AttributeError, ValueError):
+ return readers.NamespaceReader(spec.submodule_search_locations)
+
+ def _available_reader(spec):
+ with suppress(AttributeError):
+ return spec.loader.get_resource_reader(spec.name)
+
+ def _native_reader(spec):
+ reader = _available_reader(spec)
+ return reader if hasattr(reader, 'files') else None
+
+ def _file_reader(spec):
+ try:
+ path = pathlib.Path(self.path)
+ except TypeError:
+ return None
+ if path.exists():
+ return readers.FileReader(self)
+
+ return (
+ # native reader if it supplies 'files'
+ _native_reader(self.spec)
+ or
+ # local ZipReader if a zip module
+ _zip_reader(self.spec)
+ or
+ # local NamespaceReader if a namespace module
+ _namespace_reader(self.spec)
+ or
+ # local FileReader
+ _file_reader(self.spec)
+ # fallback - adapt the spec ResourceReader to TraversableReader
+ or _adapters.CompatibilityFiles(self.spec)
+ )
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+
+ Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
+ from above for older Python compatibility (<3.10).
+ """
+ from . import _adapters
+
+ return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/pkg_resources/_vendor/importlib_resources/_itertools.py b/pkg_resources/_vendor/importlib_resources/_itertools.py
new file mode 100644
index 0000000..cce0558
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/_itertools.py
@@ -0,0 +1,35 @@
+from itertools import filterfalse
+
+from typing import (
+ Callable,
+ Iterable,
+ Iterator,
+ Optional,
+ Set,
+ TypeVar,
+ Union,
+)
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_U = TypeVar('_U')
+
+
+def unique_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
+) -> Iterator[_T]:
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen: Set[Union[_T, _U]] = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/pkg_resources/_vendor/importlib_resources/_legacy.py b/pkg_resources/_vendor/importlib_resources/_legacy.py
new file mode 100644
index 0000000..1d5d3f1
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/_legacy.py
@@ -0,0 +1,121 @@
+import functools
+import os
+import pathlib
+import types
+import warnings
+
+from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
+
+from . import _common
+
+Package = Union[types.ModuleType, str]
+Resource = str
+
+
+def deprecated(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ warnings.warn(
+ f"{func.__name__} is deprecated. Use files() instead. "
+ "Refer to https://importlib-resources.readthedocs.io"
+ "/en/latest/using.html#migrating-from-legacy for migration advice.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def normalize_path(path):
+ # type: (Any) -> str
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError(f'{path!r} must be only a file name')
+ return file_name
+
+
+@deprecated
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open('rb')
+
+
+@deprecated
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ return (_common.files(package) / normalize_path(resource)).read_bytes()
+
+
+@deprecated
+def open_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open(
+ 'r', encoding=encoding, errors=errors
+ )
+
+
+@deprecated
+def read_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@deprecated
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ return [path.name for path in _common.files(package).iterdir()]
+
+
+@deprecated
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ resource = normalize_path(name)
+ return any(
+ traversable.name == resource and traversable.is_file()
+ for traversable in _common.files(package).iterdir()
+ )
+
+
+@deprecated
+def path(
+ package: Package,
+ resource: Resource,
+) -> ContextManager[pathlib.Path]:
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ return _common.as_file(_common.files(package) / normalize_path(resource))
diff --git a/pkg_resources/_vendor/importlib_resources/abc.py b/pkg_resources/_vendor/importlib_resources/abc.py
new file mode 100644
index 0000000..d39dc1a
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/abc.py
@@ -0,0 +1,137 @@
+import abc
+from typing import BinaryIO, Iterable, Text
+
+from ._compat import runtime_checkable, Protocol
+
+
+class ResourceReader(metaclass=abc.ABCMeta):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abc.abstractmethod
+ def open_resource(self, resource: Text) -> BinaryIO:
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource: Text) -> Text:
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, path: Text) -> bool:
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self) -> Iterable[str]:
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as text
+ """
+ with self.open(encoding=encoding) as strm:
+ return strm.read()
+
+ @abc.abstractmethod
+ def is_dir(self) -> bool:
+ """
+ Return True if self is a directory
+ """
+
+ @abc.abstractmethod
+ def is_file(self) -> bool:
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+ return self.joinpath(child)
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self) -> str:
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ """
+ The required interface for providing traversable
+ resources.
+ """
+
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).is_file()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/pkg_resources/_vendor/importlib_resources/readers.py b/pkg_resources/_vendor/importlib_resources/readers.py
new file mode 100644
index 0000000..f1190ca
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/readers.py
@@ -0,0 +1,122 @@
+import collections
+import pathlib
+import operator
+
+from . import abc
+
+from ._itertools import unique_everseen
+from ._compat import ZipPath
+
+
+def remove_duplicates(items):
+ return iter(collections.OrderedDict.fromkeys(items))
+
+
+class FileReader(abc.TraversableResources):
+ def __init__(self, loader):
+ self.path = pathlib.Path(loader.path).parent
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
+
+
+class ZipReader(abc.TraversableResources):
+ def __init__(self, loader, module):
+ _, _, name = module.rpartition('.')
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
+ self.archive = loader.archive
+
+ def open_resource(self, resource):
+ try:
+ return super().open_resource(resource)
+ except KeyError as exc:
+ raise FileNotFoundError(exc.args[0])
+
+ def is_resource(self, path):
+ # workaround for `zipfile.Path.is_file` returning true
+ # for non-existent paths.
+ target = self.files().joinpath(path)
+ return target.is_file() and target.exists()
+
+ def files(self):
+ return ZipPath(self.archive, self.prefix)
+
+
+class MultiplexedPath(abc.Traversable):
+ """
+ Given a series of Traversable objects, implement a merged
+ version of the interface across all objects. Useful for
+ namespace packages which may be multihomed at a single
+ name.
+ """
+
+ def __init__(self, *paths):
+ self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
+ if not self._paths:
+ message = 'MultiplexedPath must contain at least one path'
+ raise FileNotFoundError(message)
+ if not all(path.is_dir() for path in self._paths):
+ raise NotADirectoryError('MultiplexedPath only supports directories')
+
+ def iterdir(self):
+ files = (file for path in self._paths for file in path.iterdir())
+ return unique_everseen(files, key=operator.attrgetter('name'))
+
+ def read_bytes(self):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def read_text(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def joinpath(self, child):
+ # first try to find child in current paths
+ for file in self.iterdir():
+ if file.name == child:
+ return file
+ # if it does not exist, construct it with the first path
+ return self._paths[0] / child
+
+ __truediv__ = joinpath
+
+ def open(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ @property
+ def name(self):
+ return self._paths[0].name
+
+ def __repr__(self):
+ paths = ', '.join(f"'{path}'" for path in self._paths)
+ return f'MultiplexedPath({paths})'
+
+
+class NamespaceReader(abc.TraversableResources):
+ def __init__(self, namespace_path):
+ if 'NamespacePath' not in str(namespace_path):
+ raise ValueError('Invalid path')
+ self.path = MultiplexedPath(*list(namespace_path))
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
diff --git a/pkg_resources/_vendor/importlib_resources/simple.py b/pkg_resources/_vendor/importlib_resources/simple.py
new file mode 100644
index 0000000..da073cb
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/simple.py
@@ -0,0 +1,116 @@
+"""
+Interface adapters for low-level readers.
+"""
+
+import abc
+import io
+import itertools
+from typing import BinaryIO, List
+
+from .abc import Traversable, TraversableResources
+
+
+class SimpleReader(abc.ABC):
+ """
+ The minimum, low-level interface required from a resource
+ provider.
+ """
+
+ @abc.abstractproperty
+ def package(self):
+ # type: () -> str
+ """
+ The name of the package for which this reader loads resources.
+ """
+
+ @abc.abstractmethod
+ def children(self):
+ # type: () -> List['SimpleReader']
+ """
+ Obtain an iterable of SimpleReader for available
+ child containers (e.g. directories).
+ """
+
+ @abc.abstractmethod
+ def resources(self):
+ # type: () -> List[str]
+ """
+ Obtain available named resources for this virtual package.
+ """
+
+ @abc.abstractmethod
+ def open_binary(self, resource):
+ # type: (str) -> BinaryIO
+ """
+ Obtain a File-like for a named resource.
+ """
+
+ @property
+ def name(self):
+ return self.package.split('.')[-1]
+
+
+class ResourceHandle(Traversable):
+ """
+ Handle to a named resource in a ResourceReader.
+ """
+
+ def __init__(self, parent, name):
+ # type: (ResourceContainer, str) -> None
+ self.parent = parent
+ self.name = name # type: ignore
+
+ def is_file(self):
+ return True
+
+ def is_dir(self):
+ return False
+
+ def open(self, mode='r', *args, **kwargs):
+ stream = self.parent.reader.open_binary(self.name)
+ if 'b' not in mode:
+ stream = io.TextIOWrapper(*args, **kwargs)
+ return stream
+
+ def joinpath(self, name):
+ raise RuntimeError("Cannot traverse into a resource")
+
+
+class ResourceContainer(Traversable):
+ """
+ Traversable container for a package's resources via its reader.
+ """
+
+ def __init__(self, reader):
+ # type: (SimpleReader) -> None
+ self.reader = reader
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def iterdir(self):
+ files = (ResourceHandle(self, name) for name in self.reader.resources)
+ dirs = map(ResourceContainer, self.reader.children())
+ return itertools.chain(files, dirs)
+
+ def open(self, *args, **kwargs):
+ raise IsADirectoryError()
+
+ def joinpath(self, name):
+ return next(
+ traversable for traversable in self.iterdir() if traversable.name == name
+ )
+
+
+class TraversableReader(TraversableResources, SimpleReader):
+ """
+ A TraversableResources based on SimpleReader. Resource providers
+ may derive from this class to provide the TraversableResources
+ interface by supplying the SimpleReader interface.
+ """
+
+ def files(self):
+ return ResourceContainer(self)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/_compat.py b/pkg_resources/_vendor/importlib_resources/tests/_compat.py
new file mode 100644
index 0000000..4c99cff
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/_compat.py
@@ -0,0 +1,19 @@
+import os
+
+
+try:
+ from test.support import import_helper # type: ignore
+except ImportError:
+ # Python 3.9 and earlier
+ class import_helper: # type: ignore
+ from test.support import modules_setup, modules_cleanup
+
+
+try:
+ # Python 3.10
+ from test.support.os_helper import unlink
+except ImportError:
+ from test.support import unlink as _unlink
+
+ def unlink(target):
+ return _unlink(os.fspath(target))
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
new file mode 100644
index 0000000..61a813e
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
@@ -0,0 +1 @@
+one resource
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
new file mode 100644
index 0000000..a80ce46
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
@@ -0,0 +1 @@
+two resource
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
new file mode 100644
index 0000000..d92c7c5
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
@@ -0,0 +1,102 @@
+import io
+import unittest
+
+import importlib_resources as resources
+
+from importlib_resources._adapters import (
+ CompatibilityFiles,
+ wrap_spec,
+)
+
+from . import util
+
+
+class CompatibilityFilesTests(unittest.TestCase):
+ @property
+ def package(self):
+ bytes_data = io.BytesIO(b'Hello, world!')
+ return util.create_package(
+ file=bytes_data,
+ path='some_path',
+ contents=('a', 'b', 'c'),
+ )
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_iter(self):
+ self.assertEqual(
+ sorted(path.name for path in self.files.iterdir()),
+ ['a', 'b', 'c'],
+ )
+
+ def test_child_path_iter(self):
+ self.assertEqual(list((self.files / 'a').iterdir()), [])
+
+ def test_orphan_path_iter(self):
+ self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
+ self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
+
+ def test_spec_path_is(self):
+ self.assertFalse(self.files.is_file())
+ self.assertFalse(self.files.is_dir())
+
+ def test_child_path_is(self):
+ self.assertTrue((self.files / 'a').is_file())
+ self.assertFalse((self.files / 'a').is_dir())
+
+ def test_orphan_path_is(self):
+ self.assertFalse((self.files / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a').is_dir())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
+
+ def test_spec_path_name(self):
+ self.assertEqual(self.files.name, 'testingpackage')
+
+ def test_child_path_name(self):
+ self.assertEqual((self.files / 'a').name, 'a')
+
+ def test_orphan_path_name(self):
+ self.assertEqual((self.files / 'a' / 'b').name, 'b')
+ self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
+
+ def test_spec_path_open(self):
+ self.assertEqual(self.files.read_bytes(), b'Hello, world!')
+ self.assertEqual(self.files.read_text(), 'Hello, world!')
+
+ def test_child_path_open(self):
+ self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
+ self.assertEqual((self.files / 'a').read_text(), 'Hello, world!')
+
+ def test_orphan_path_open(self):
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b').read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b' / 'c').read_bytes()
+
+ def test_open_invalid_mode(self):
+ with self.assertRaises(ValueError):
+ self.files.open('0')
+
+ def test_orphan_path_invalid(self):
+ with self.assertRaises(ValueError):
+ CompatibilityFiles.OrphanPath()
+
+ def test_wrap_spec(self):
+ spec = wrap_spec(self.package)
+ self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
+
+
+class CompatibilityFilesNoReaderTests(unittest.TestCase):
+ @property
+ def package(self):
+ return util.create_package_from_loader(None)
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_joinpath(self):
+ self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
new file mode 100644
index 0000000..525568e
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
@@ -0,0 +1,43 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+
+
+class ContentsTests:
+ expected = {
+ '__init__.py',
+ 'binary.file',
+ 'subdirectory',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def test_contents(self):
+ contents = {path.name for path in resources.files(self.data).iterdir()}
+ assert self.expected <= contents
+
+
+class ContentsDiskTests(ContentsTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
+ expected = {
+ # no __init__ because of namespace design
+ # no subdirectory as incidental difference in fixture
+ 'binary.file',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py
new file mode 100644
index 0000000..2676b49
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_files.py
@@ -0,0 +1,46 @@
+import typing
+import unittest
+
+import importlib_resources as resources
+from importlib_resources.abc import Traversable
+from . import data01
+from . import util
+
+
+class FilesTests:
+ def test_read_bytes(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_bytes()
+ assert actual == b'Hello, UTF-8 world!\n'
+
+ def test_read_text(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
+ assert actual == 'Hello, UTF-8 world!\n'
+
+ @unittest.skipUnless(
+ hasattr(typing, 'runtime_checkable'),
+ "Only suitable when typing supports runtime_checkable",
+ )
+ def test_traversable(self):
+ assert isinstance(resources.files(self.data), Traversable)
+
+
+class OpenDiskTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class OpenNamespaceTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_open.py b/pkg_resources/_vendor/importlib_resources/tests/test_open.py
new file mode 100644
index 0000000..87b42c3
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_open.py
@@ -0,0 +1,81 @@
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open('rb'):
+ pass
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open():
+ pass
+
+
+class OpenTests:
+ def test_open_binary(self):
+ target = resources.files(self.data) / 'binary.file'
+ with target.open('rb') as fp:
+ result = fp.read()
+ self.assertEqual(result, b'\x00\x01\x02\x03')
+
+ def test_open_text_default_encoding(self):
+ target = resources.files(self.data) / 'utf-8.file'
+ with target.open() as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_open_text_given_encoding(self):
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-16', errors='strict') as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_open_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-8', errors='strict') as fp:
+ self.assertRaises(UnicodeError, fp.read)
+ with target.open(encoding='utf-8', errors='ignore') as fp:
+ result = fp.read()
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+ def test_open_binary_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open, 'rb')
+
+ def test_open_text_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open)
+
+
+class OpenDiskTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_path.py b/pkg_resources/_vendor/importlib_resources/tests/test_path.py
new file mode 100644
index 0000000..4f4d394
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_path.py
@@ -0,0 +1,64 @@
+import io
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ with resources.as_file(resources.files(package).joinpath(path)):
+ pass
+
+
+class PathTests:
+ def test_reading(self):
+ # Path should be readable.
+ # Test also implicitly verifies the returned object is a pathlib.Path
+ # instance.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
+ # pathlib.Path.read_text() was introduced in Python 3.5.
+ with path.open('r', encoding='utf-8') as file:
+ text = file.read()
+ self.assertEqual('Hello, UTF-8 world!\n', text)
+
+
+class PathDiskTests(PathTests, unittest.TestCase):
+ data = data01
+
+ def test_natural_path(self):
+ """
+ Guarantee the internal implementation detail that
+ file-system-backed resources do not get the tempdir
+ treatment.
+ """
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ assert 'data' in str(path)
+
+
+class PathMemoryTests(PathTests, unittest.TestCase):
+ def setUp(self):
+ file = io.BytesIO(b'Hello, UTF-8 world!\n')
+ self.addCleanup(file.close)
+ self.data = util.create_package(
+ file=file, path=FileNotFoundError("package exists only in memory")
+ )
+ self.data.__spec__.origin = None
+ self.data.__spec__.has_location = False
+
+
+class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
+ def test_remove_in_context_manager(self):
+ # It is not an error if the file that was temporarily stashed on the
+ # file system is removed inside the `with` stanza.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ path.unlink()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_read.py b/pkg_resources/_vendor/importlib_resources/tests/test_read.py
new file mode 100644
index 0000000..41dd6db
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_read.py
@@ -0,0 +1,76 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+from importlib import import_module
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_bytes()
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_text()
+
+
+class ReadTests:
+ def test_read_bytes(self):
+ result = resources.files(self.data).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_text_default_encoding(self):
+ result = resources.files(self.data).joinpath('utf-8.file').read_text()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_read_text_given_encoding(self):
+ result = (
+ resources.files(self.data)
+ .joinpath('utf-16.file')
+ .read_text(encoding='utf-16')
+ )
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_read_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
+ result = target.read_text(encoding='utf-8', errors='ignore')
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+
+class ReadDiskTests(ReadTests, unittest.TestCase):
+ data = data01
+
+
+class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
+ def test_read_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ result = resources.files(submodule).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_submodule_resource_by_name(self):
+ result = (
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .read_bytes()
+ )
+ self.assertEqual(result, b'\0\1\2\3')
+
+
+class ReadNamespaceTests(ReadTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
new file mode 100644
index 0000000..16841a5
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
@@ -0,0 +1,128 @@
+import os.path
+import sys
+import pathlib
+import unittest
+
+from importlib import import_module
+from importlib_resources.readers import MultiplexedPath, NamespaceReader
+
+
+class MultiplexedPathTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ path = pathlib.Path(__file__).parent / 'namespacedata01'
+ cls.folder = str(path)
+
+ def test_init_no_paths(self):
+ with self.assertRaises(FileNotFoundError):
+ MultiplexedPath()
+
+ def test_init_file(self):
+ with self.assertRaises(NotADirectoryError):
+ MultiplexedPath(os.path.join(self.folder, 'binary.file'))
+
+ def test_iterdir(self):
+ contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
+ try:
+ contents.remove('__pycache__')
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'})
+
+ def test_iterdir_duplicate(self):
+ data01 = os.path.abspath(os.path.join(__file__, '..', 'data01'))
+ contents = {
+ path.name for path in MultiplexedPath(self.folder, data01).iterdir()
+ }
+ for remove in ('__pycache__', '__init__.pyc'):
+ try:
+ contents.remove(remove)
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(
+ contents,
+ {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
+ )
+
+ def test_is_dir(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
+
+ def test_is_file(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
+
+ def test_open_file(self):
+ path = MultiplexedPath(self.folder)
+ with self.assertRaises(FileNotFoundError):
+ path.read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ path.read_text()
+ with self.assertRaises(FileNotFoundError):
+ path.open()
+
+ def test_join_path(self):
+ prefix = os.path.abspath(os.path.join(__file__, '..'))
+ data01 = os.path.join(prefix, 'data01')
+ path = MultiplexedPath(self.folder, data01)
+ self.assertEqual(
+ str(path.joinpath('binary.file'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'binary.file'),
+ )
+ self.assertEqual(
+ str(path.joinpath('subdirectory'))[len(prefix) + 1 :],
+ os.path.join('data01', 'subdirectory'),
+ )
+ self.assertEqual(
+ str(path.joinpath('imaginary'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'imaginary'),
+ )
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(MultiplexedPath(self.folder)),
+ f"MultiplexedPath('{self.folder}')",
+ )
+
+ def test_name(self):
+ self.assertEqual(
+ MultiplexedPath(self.folder).name,
+ os.path.basename(self.folder),
+ )
+
+
+class NamespaceReaderTest(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_init_error(self):
+ with self.assertRaises(ValueError):
+ NamespaceReader(['path1', 'path2'])
+
+ def test_resource_path(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertEqual(
+ reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
+ )
+ self.assertEqual(
+ reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
+ )
+
+ def test_files(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertIsInstance(reader.files(), MultiplexedPath)
+ self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
new file mode 100644
index 0000000..5affd8b
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
@@ -0,0 +1,252 @@
+import sys
+import unittest
+import importlib_resources as resources
+import uuid
+import pathlib
+
+from . import data01
+from . import zipdata01, zipdata02
+from . import util
+from importlib import import_module
+from ._compat import import_helper, unlink
+
+
+class ResourceTests:
+ # Subclasses are expected to set the `data` attribute.
+
+ def test_is_file_exists(self):
+ target = resources.files(self.data) / 'binary.file'
+ self.assertTrue(target.is_file())
+
+ def test_is_file_missing(self):
+ target = resources.files(self.data) / 'not-a-file'
+ self.assertFalse(target.is_file())
+
+ def test_is_dir(self):
+ target = resources.files(self.data) / 'subdirectory'
+ self.assertFalse(target.is_file())
+ self.assertTrue(target.is_dir())
+
+
+class ResourceDiskTests(ResourceTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+def names(traversable):
+ return {item.name for item in traversable.iterdir()}
+
+
+class ResourceLoaderTests(unittest.TestCase):
+ def test_resource_contents(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
+
+ def test_is_file(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('B').is_file())
+
+ def test_is_dir(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('D').is_dir())
+
+ def test_resource_missing(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertFalse(resources.files(package).joinpath('Z').is_file())
+
+
+class ResourceCornerCaseTests(unittest.TestCase):
+ def test_package_has_no_reader_fallback(self):
+ # Test odd ball packages which:
+ # 1. Do not have a ResourceReader as a loader
+ # 2. Are not on the file system
+ # 3. Are not in a zip file
+ module = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ # Give the module a dummy loader.
+ module.__loader__ = object()
+ # Give the module a dummy origin.
+ module.__file__ = '/path/which/shall/not/be/named'
+ module.__spec__.loader = module.__loader__
+ module.__spec__.origin = module.__file__
+ self.assertFalse(resources.files(module).joinpath('A').is_file())
+
+
+class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata01 # type: ignore
+
+ def test_is_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_submodule_contents(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertEqual(
+ names(resources.files(submodule)), {'__init__.py', 'binary.file'}
+ )
+
+ def test_submodule_contents_by_name(self):
+ self.assertEqual(
+ names(resources.files('ziptestdata.subdirectory')),
+ {'__init__.py', 'binary.file'},
+ )
+
+
+class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata02 # type: ignore
+
+ def test_unrelated_contents(self):
+ """
+ Test thata zip with two unrelated subpackages return
+ distinct resources. Ref python/importlib_resources#44.
+ """
+ self.assertEqual(
+ names(resources.files('ziptestdata.one')),
+ {'__init__.py', 'resource1.txt'},
+ )
+ self.assertEqual(
+ names(resources.files('ziptestdata.two')),
+ {'__init__.py', 'resource2.txt'},
+ )
+
+
+class DeletingZipsTest(unittest.TestCase):
+ """Having accessed resources in a zip file should not keep an open
+ reference to the zip.
+ """
+
+ ZIP_MODULE = zipdata01
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+ data_path = pathlib.Path(self.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ self.source_zip_path = data_dir / 'ziptestdata.zip'
+ self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute()
+ self.zip_path.write_bytes(self.source_zip_path.read_bytes())
+ sys.path.append(str(self.zip_path))
+ self.data = import_module('ziptestdata')
+
+ def tearDown(self):
+ try:
+ sys.path.remove(str(self.zip_path))
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[str(self.zip_path)]
+ del sys.modules[self.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ unlink(self.zip_path)
+ except OSError:
+ # If the test fails, this will probably fail too
+ pass
+
+ def test_iterdir_does_not_keep_open(self):
+ c = [item.name for item in resources.files('ziptestdata').iterdir()]
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').is_file()
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_failure_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('not-present').is_file()
+ self.zip_path.unlink()
+ del c
+
+ @unittest.skip("Desired but not supported.")
+ def test_as_file_does_not_keep_open(self): # pragma: no cover
+ c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
+ self.zip_path.unlink()
+ del c
+
+ def test_entered_path_does_not_keep_open(self):
+ # This is what certifi does on import to make its bundle
+ # available for the process duration.
+ c = resources.as_file(
+ resources.files('ziptestdata') / 'binary.file'
+ ).__enter__()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_binary_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').read_bytes()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_text_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('utf-8.file').read_text()
+ self.zip_path.unlink()
+ del c
+
+
+class ResourceFromNamespaceTest01(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_is_submodule_resource(self):
+ self.assertTrue(
+ resources.files(import_module('namespacedata01'))
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('namespacedata01').joinpath('binary.file').is_file()
+ )
+
+ def test_submodule_contents(self):
+ contents = names(resources.files(import_module('namespacedata01')))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+ def test_submodule_contents_by_name(self):
+ contents = names(resources.files('namespacedata01'))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py
new file mode 100644
index 0000000..9ef0224
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py
@@ -0,0 +1,53 @@
+"""
+Generate the zip test data files.
+
+Run to build the tests/zipdataNN/ziptestdata.zip files from
+files in tests/dataNN.
+
+Replaces the file with the working copy, but does commit anything
+to the source repo.
+"""
+
+import contextlib
+import os
+import pathlib
+import zipfile
+
+
+def main():
+ """
+ >>> from unittest import mock
+ >>> monkeypatch = getfixture('monkeypatch')
+ >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock())
+ >>> print(); main() # print workaround for bpo-32509
+ <BLANKLINE>
+ ...data01... -> ziptestdata/...
+ ...
+ ...data02... -> ziptestdata/...
+ ...
+ """
+ suffixes = '01', '02'
+ tuple(map(generate, suffixes))
+
+
+def generate(suffix):
+ root = pathlib.Path(__file__).parent.relative_to(os.getcwd())
+ zfpath = root / f'zipdata{suffix}/ziptestdata.zip'
+ with zipfile.ZipFile(zfpath, 'w') as zf:
+ for src, rel in walk(root / f'data{suffix}'):
+ dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix())
+ print(src, '->', dst)
+ zf.write(src, dst)
+
+
+def walk(datapath):
+ for dirpath, dirnames, filenames in os.walk(datapath):
+ with contextlib.suppress(KeyError):
+ dirnames.remove('__pycache__')
+ for filename in filenames:
+ res = pathlib.Path(dirpath) / filename
+ rel = res.relative_to(datapath)
+ yield res, rel
+
+
+__name__ == '__main__' and main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/util.py b/pkg_resources/_vendor/importlib_resources/tests/util.py
new file mode 100644
index 0000000..c6d83e4
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/util.py
@@ -0,0 +1,178 @@
+import abc
+import importlib
+import io
+import sys
+import types
+from pathlib import Path, PurePath
+
+from . import data01
+from . import zipdata01
+from ..abc import ResourceReader
+from ._compat import import_helper
+
+
+from importlib.machinery import ModuleSpec
+
+
+class Reader(ResourceReader):
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ def get_resource_reader(self, package):
+ return self
+
+ def open_resource(self, path):
+ self._path = path
+ if isinstance(self.file, Exception):
+ raise self.file
+ return self.file
+
+ def resource_path(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+ return self.path
+
+ def is_resource(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+
+ def part(entry):
+ return entry.split('/')
+
+ return any(
+ len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents)
+ )
+
+ def contents(self):
+ if isinstance(self.path, Exception):
+ raise self.path
+ yield from self._contents
+
+
+def create_package_from_loader(loader, is_package=True):
+ name = 'testingpackage'
+ module = types.ModuleType(name)
+ spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package)
+ module.__spec__ = spec
+ module.__loader__ = loader
+ return module
+
+
+def create_package(file=None, path=None, is_package=True, contents=()):
+ return create_package_from_loader(
+ Reader(file=file, path=path, _contents=contents),
+ is_package,
+ )
+
+
+class CommonTests(metaclass=abc.ABCMeta):
+ """
+ Tests shared by test_open, test_path, and test_read.
+ """
+
+ @abc.abstractmethod
+ def execute(self, package, path):
+ """
+ Call the pertinent legacy API function (e.g. open_text, path)
+ on package and path.
+ """
+
+ def test_package_name(self):
+ # Passing in the package name should succeed.
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_package_object(self):
+ # Passing in the package itself should succeed.
+ self.execute(data01, 'utf-8.file')
+
+ def test_string_path(self):
+ # Passing in a string for the path should succeed.
+ path = 'utf-8.file'
+ self.execute(data01, path)
+
+ def test_pathlib_path(self):
+ # Passing in a pathlib.PurePath object for the path should succeed.
+ path = PurePath('utf-8.file')
+ self.execute(data01, path)
+
+ def test_importing_module_as_side_effect(self):
+ # The anchor package can already be imported.
+ del sys.modules[data01.__name__]
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_non_package_by_name(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ self.execute(__name__, 'utf-8.file')
+
+ def test_non_package_by_package(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ module = sys.modules['importlib_resources.tests.util']
+ self.execute(module, 'utf-8.file')
+
+ def test_missing_path(self):
+ # Attempting to open or read or request the path for a
+ # non-existent path should succeed if open_resource
+ # can return a viable data stream.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ package = create_package(file=bytes_data, path=FileNotFoundError())
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_extant_path(self):
+ # Attempting to open or read or request the path when the
+ # path does exist should still succeed. Does not assert
+ # anything about the result.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ # any path that exists
+ path = __file__
+ package = create_package(file=bytes_data, path=path)
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_useless_loader(self):
+ package = create_package(file=FileNotFoundError(), path=FileNotFoundError())
+ with self.assertRaises(FileNotFoundError):
+ self.execute(package, 'utf-8.file')
+
+
+class ZipSetupBase:
+ ZIP_MODULE = None
+
+ @classmethod
+ def setUpClass(cls):
+ data_path = Path(cls.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ cls._zip_path = str(data_dir / 'ziptestdata.zip')
+ sys.path.append(cls._zip_path)
+ cls.data = importlib.import_module('ziptestdata')
+
+ @classmethod
+ def tearDownClass(cls):
+ try:
+ sys.path.remove(cls._zip_path)
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[cls._zip_path]
+ del sys.modules[cls.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ del cls.data
+ del cls._zip_path
+ except AttributeError:
+ pass
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+
+class ZipSetup(ZipSetupBase):
+ ZIP_MODULE = zipdata01 # type: ignore
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py
diff --git a/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/pkg_resources/_vendor/jaraco/__init__.py b/pkg_resources/_vendor/jaraco/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/__init__.py
diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py
new file mode 100644
index 0000000..87a4e3d
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/context.py
@@ -0,0 +1,213 @@
+import os
+import subprocess
+import contextlib
+import functools
+import tempfile
+import shutil
+import operator
+
+
+@contextlib.contextmanager
+def pushd(dir):
+ orig = os.getcwd()
+ os.chdir(dir)
+ try:
+ yield dir
+ finally:
+ os.chdir(orig)
+
+
+@contextlib.contextmanager
+def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+ """
+ Get a tarball, extract it, change to that directory, yield, then
+ clean up.
+ `runner` is the function to invoke commands.
+ `pushd` is a context manager for changing the directory.
+ """
+ if target_dir is None:
+ target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
+ if runner is None:
+ runner = functools.partial(subprocess.check_call, shell=True)
+ # In the tar command, use --strip-components=1 to strip the first path and
+ # then
+ # use -C to cause the files to be extracted to {target_dir}. This ensures
+ # that we always know where the files were extracted.
+ runner('mkdir {target_dir}'.format(**vars()))
+ try:
+ getter = 'wget {url} -O -'
+ extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
+ cmd = ' | '.join((getter, extract))
+ runner(cmd.format(compression=infer_compression(url), **vars()))
+ with pushd(target_dir):
+ yield target_dir
+ finally:
+ runner('rm -Rf {target_dir}'.format(**vars()))
+
+
+def infer_compression(url):
+ """
+ Given a URL or filename, infer the compression code for tar.
+ """
+ # cheat and just assume it's the last two characters
+ compression_indicator = url[-2:]
+ mapping = dict(gz='z', bz='j', xz='J')
+ # Assume 'z' (gzip) if no match
+ return mapping.get(compression_indicator, 'z')
+
+
+@contextlib.contextmanager
+def temp_dir(remover=shutil.rmtree):
+ """
+ Create a temporary directory context. Pass a custom remover
+ to override the removal behavior.
+ """
+ temp_dir = tempfile.mkdtemp()
+ try:
+ yield temp_dir
+ finally:
+ remover(temp_dir)
+
+
+@contextlib.contextmanager
+def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
+ """
+ Check out the repo indicated by url.
+
+ If dest_ctx is supplied, it should be a context manager
+ to yield the target directory for the check out.
+ """
+ exe = 'git' if 'git' in url else 'hg'
+ with dest_ctx() as repo_dir:
+ cmd = [exe, 'clone', url, repo_dir]
+ if branch:
+ cmd.extend(['--branch', branch])
+ devnull = open(os.path.devnull, 'w')
+ stdout = devnull if quiet else None
+ subprocess.check_call(cmd, stdout=stdout)
+ yield repo_dir
+
+
+@contextlib.contextmanager
+def null():
+ yield
+
+
+class ExceptionTrap:
+ """
+ A context manager that will catch certain exceptions and provide an
+ indication they occurred.
+
+ >>> with ExceptionTrap() as trap:
+ ... raise Exception()
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap() as trap:
+ ... pass
+ >>> bool(trap)
+ False
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise ValueError("1 + 1 is not 3")
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise Exception()
+ Traceback (most recent call last):
+ ...
+ Exception
+
+ >>> bool(trap)
+ False
+ """
+
+ exc_info = None, None, None
+
+ def __init__(self, exceptions=(Exception,)):
+ self.exceptions = exceptions
+
+ def __enter__(self):
+ return self
+
+ @property
+ def type(self):
+ return self.exc_info[0]
+
+ @property
+ def value(self):
+ return self.exc_info[1]
+
+ @property
+ def tb(self):
+ return self.exc_info[2]
+
+ def __exit__(self, *exc_info):
+ type = exc_info[0]
+ matches = type and issubclass(type, self.exceptions)
+ if matches:
+ self.exc_info = exc_info
+ return matches
+
+ def __bool__(self):
+ return bool(self.type)
+
+ def raises(self, func, *, _test=bool):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if an exception occurred).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> raises = ExceptionTrap(ValueError).raises
+
+ Now decorate a function that always fails.
+
+ >>> @raises
+ ... def fail():
+ ... raise ValueError('failed')
+ >>> fail()
+ True
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ with ExceptionTrap(self.exceptions) as trap:
+ func(*args, **kwargs)
+ return _test(trap)
+
+ return wrapper
+
+ def passes(self, func):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if no exception).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> passes = ExceptionTrap(ValueError).passes
+
+ Now decorate a function that always fails.
+
+ >>> @passes
+ ... def fail():
+ ... raise ValueError('failed')
+
+ >>> fail()
+ False
+ """
+ return self.raises(func, _test=operator.not_)
+
+
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/pkg_resources/_vendor/jaraco/functools.py b/pkg_resources/_vendor/jaraco/functools.py
new file mode 100644
index 0000000..a3fea3a
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/functools.py
@@ -0,0 +1,525 @@
+import functools
+import time
+import inspect
+import collections
+import types
+import itertools
+
+import pkg_resources.extern.more_itertools
+
+from typing import Callable, TypeVar
+
+
+CallableT = TypeVar("CallableT", bound=Callable[..., object])
+
+
+def compose(*funcs):
+ """
+ Compose any number of unary functions into a single unary function.
+
+ >>> import textwrap
+ >>> expected = str.strip(textwrap.dedent(compose.__doc__))
+ >>> strip_and_dedent = compose(str.strip, textwrap.dedent)
+ >>> strip_and_dedent(compose.__doc__) == expected
+ True
+
+ Compose also allows the innermost function to take arbitrary arguments.
+
+ >>> round_three = lambda x: round(x, ndigits=3)
+ >>> f = compose(round_three, int.__truediv__)
+ >>> [f(3*x, x+1) for x in range(1,10)]
+ [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
+ """
+
+ def compose_two(f1, f2):
+ return lambda *args, **kwargs: f1(f2(*args, **kwargs))
+
+ return functools.reduce(compose_two, funcs)
+
+
+def method_caller(method_name, *args, **kwargs):
+ """
+ Return a function that will call a named method on the
+ target object with optional positional and keyword
+ arguments.
+
+ >>> lower = method_caller('lower')
+ >>> lower('MyString')
+ 'mystring'
+ """
+
+ def call_method(target):
+ func = getattr(target, method_name)
+ return func(*args, **kwargs)
+
+ return call_method
+
+
+def once(func):
+ """
+ Decorate func so it's only ever called the first time.
+
+ This decorator can ensure that an expensive or non-idempotent function
+ will not be expensive on subsequent calls and is idempotent.
+
+ >>> add_three = once(lambda a: a+3)
+ >>> add_three(3)
+ 6
+ >>> add_three(9)
+ 6
+ >>> add_three('12')
+ 6
+
+ To reset the stored value, simply clear the property ``saved_result``.
+
+ >>> del add_three.saved_result
+ >>> add_three(9)
+ 12
+ >>> add_three(8)
+ 12
+
+ Or invoke 'reset()' on it.
+
+ >>> add_three.reset()
+ >>> add_three(-3)
+ 0
+ >>> add_three(0)
+ 0
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not hasattr(wrapper, 'saved_result'):
+ wrapper.saved_result = func(*args, **kwargs)
+ return wrapper.saved_result
+
+ wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
+ return wrapper
+
+
+def method_cache(
+ method: CallableT,
+ cache_wrapper: Callable[
+ [CallableT], CallableT
+ ] = functools.lru_cache(), # type: ignore[assignment]
+) -> CallableT:
+ """
+ Wrap lru_cache to support storing the cache data in the object instances.
+
+ Abstracts the common paradigm where the method explicitly saves an
+ underscore-prefixed protected property on first call and returns that
+ subsequently.
+
+ >>> class MyClass:
+ ... calls = 0
+ ...
+ ... @method_cache
+ ... def method(self, value):
+ ... self.calls += 1
+ ... return value
+
+ >>> a = MyClass()
+ >>> a.method(3)
+ 3
+ >>> for x in range(75):
+ ... res = a.method(x)
+ >>> a.calls
+ 75
+
+ Note that the apparent behavior will be exactly like that of lru_cache
+ except that the cache is stored on each instance, so values in one
+ instance will not flush values from another, and when an instance is
+ deleted, so are the cached values for that instance.
+
+ >>> b = MyClass()
+ >>> for x in range(35):
+ ... res = b.method(x)
+ >>> b.calls
+ 35
+ >>> a.method(0)
+ 0
+ >>> a.calls
+ 75
+
+ Note that if method had been decorated with ``functools.lru_cache()``,
+ a.calls would have been 76 (due to the cached value of 0 having been
+ flushed by the 'b' instance).
+
+ Clear the cache with ``.cache_clear()``
+
+ >>> a.method.cache_clear()
+
+ Same for a method that hasn't yet been called.
+
+ >>> c = MyClass()
+ >>> c.method.cache_clear()
+
+ Another cache wrapper may be supplied:
+
+ >>> cache = functools.lru_cache(maxsize=2)
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
+ >>> a = MyClass()
+ >>> a.method2()
+ 3
+
+ Caution - do not subsequently wrap the method with another decorator, such
+ as ``@property``, which changes the semantics of the function.
+
+ See also
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
+ for another implementation and additional justification.
+ """
+
+ def wrapper(self: object, *args: object, **kwargs: object) -> object:
+ # it's the first call, replace the method with a cached, bound method
+ bound_method: CallableT = types.MethodType( # type: ignore[assignment]
+ method, self
+ )
+ cached_method = cache_wrapper(bound_method)
+ setattr(self, method.__name__, cached_method)
+ return cached_method(*args, **kwargs)
+
+ # Support cache clear even before cache has been created.
+ wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
+
+ return ( # type: ignore[return-value]
+ _special_method_cache(method, cache_wrapper) or wrapper
+ )
+
+
+def _special_method_cache(method, cache_wrapper):
+ """
+ Because Python treats special methods differently, it's not
+ possible to use instance attributes to implement the cached
+ methods.
+
+ Instead, install the wrapper method under a different name
+ and return a simple proxy to that wrapper.
+
+ https://github.com/jaraco/jaraco.functools/issues/5
+ """
+ name = method.__name__
+ special_names = '__getattr__', '__getitem__'
+ if name not in special_names:
+ return
+
+ wrapper_name = '__cached' + name
+
+ def proxy(self, *args, **kwargs):
+ if wrapper_name not in vars(self):
+ bound = types.MethodType(method, self)
+ cache = cache_wrapper(bound)
+ setattr(self, wrapper_name, cache)
+ else:
+ cache = getattr(self, wrapper_name)
+ return cache(*args, **kwargs)
+
+ return proxy
+
+
+def apply(transform):
+ """
+ Decorate a function with a transform function that is
+ invoked on results returned from the decorated function.
+
+ >>> @apply(reversed)
+ ... def get_numbers(start):
+ ... "doc for get_numbers"
+ ... return range(start, start+3)
+ >>> list(get_numbers(4))
+ [6, 5, 4]
+ >>> get_numbers.__doc__
+ 'doc for get_numbers'
+ """
+
+ def wrap(func):
+ return functools.wraps(func)(compose(transform, func))
+
+ return wrap
+
+
+def result_invoke(action):
+ r"""
+ Decorate a function with an action function that is
+ invoked on the results returned from the decorated
+ function (for its side-effect), then return the original
+ result.
+
+ >>> @result_invoke(print)
+ ... def add_two(a, b):
+ ... return a + b
+ >>> x = add_two(2, 3)
+ 5
+ >>> x
+ 5
+ """
+
+ def wrap(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ action(result)
+ return result
+
+ return wrapper
+
+ return wrap
+
+
+def call_aside(f, *args, **kwargs):
+ """
+ Call a function for its side effect after initialization.
+
+ >>> @call_aside
+ ... def func(): print("called")
+ called
+ >>> func()
+ called
+
+ Use functools.partial to pass parameters to the initial call
+
+ >>> @functools.partial(call_aside, name='bingo')
+ ... def func(name): print("called with", name)
+ called with bingo
+ """
+ f(*args, **kwargs)
+ return f
+
+
+class Throttler:
+ """
+ Rate-limit a function (or other callable)
+ """
+
+ def __init__(self, func, max_rate=float('Inf')):
+ if isinstance(func, Throttler):
+ func = func.func
+ self.func = func
+ self.max_rate = max_rate
+ self.reset()
+
+ def reset(self):
+ self.last_called = 0
+
+ def __call__(self, *args, **kwargs):
+ self._wait()
+ return self.func(*args, **kwargs)
+
+ def _wait(self):
+ "ensure at least 1/max_rate seconds from last call"
+ elapsed = time.time() - self.last_called
+ must_wait = 1 / self.max_rate - elapsed
+ time.sleep(max(0, must_wait))
+ self.last_called = time.time()
+
+ def __get__(self, obj, type=None):
+ return first_invoke(self._wait, functools.partial(self.func, obj))
+
+
+def first_invoke(func1, func2):
+ """
+ Return a function that when invoked will invoke func1 without
+ any parameters (for its side-effect) and then invoke func2
+ with whatever parameters were passed, returning its result.
+ """
+
+ def wrapper(*args, **kwargs):
+ func1()
+ return func2(*args, **kwargs)
+
+ return wrapper
+
+
+def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
+ """
+ Given a callable func, trap the indicated exceptions
+ for up to 'retries' times, invoking cleanup on the
+ exception. On the final attempt, allow any exceptions
+ to propagate.
+ """
+ attempts = itertools.count() if retries == float('inf') else range(retries)
+ for attempt in attempts:
+ try:
+ return func()
+ except trap:
+ cleanup()
+
+ return func()
+
+
+def retry(*r_args, **r_kwargs):
+ """
+ Decorator wrapper for retry_call. Accepts arguments to retry_call
+ except func and then returns a decorator for the decorated function.
+
+ Ex:
+
+ >>> @retry(retries=3)
+ ... def my_func(a, b):
+ ... "this is my funk"
+ ... print(a, b)
+ >>> my_func.__doc__
+ 'this is my funk'
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*f_args, **f_kwargs):
+ bound = functools.partial(func, *f_args, **f_kwargs)
+ return retry_call(bound, *r_args, **r_kwargs)
+
+ return wrapper
+
+ return decorate
+
+
+def print_yielded(func):
+ """
+ Convert a generator into a function that prints all yielded elements
+
+ >>> @print_yielded
+ ... def x():
+ ... yield 3; yield None
+ >>> x()
+ 3
+ None
+ """
+ print_all = functools.partial(map, print)
+ print_results = compose(more_itertools.consume, print_all, func)
+ return functools.wraps(func)(print_results)
+
+
+def pass_none(func):
+ """
+ Wrap func so it's not called if its first param is None
+
+ >>> print_text = pass_none(print)
+ >>> print_text('text')
+ text
+ >>> print_text(None)
+ """
+
+ @functools.wraps(func)
+ def wrapper(param, *args, **kwargs):
+ if param is not None:
+ return func(param, *args, **kwargs)
+
+ return wrapper
+
+
+def assign_params(func, namespace):
+ """
+ Assign parameters from namespace where func solicits.
+
+ >>> def func(x, y=3):
+ ... print(x, y)
+ >>> assigned = assign_params(func, dict(x=2, z=4))
+ >>> assigned()
+ 2 3
+
+ The usual errors are raised if a function doesn't receive
+ its required parameters:
+
+ >>> assigned = assign_params(func, dict(y=3, z=4))
+ >>> assigned()
+ Traceback (most recent call last):
+ TypeError: func() ...argument...
+
+ It even works on methods:
+
+ >>> class Handler:
+ ... def meth(self, arg):
+ ... print(arg)
+ >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
+ crystal
+ """
+ sig = inspect.signature(func)
+ params = sig.parameters.keys()
+ call_ns = {k: namespace[k] for k in params if k in namespace}
+ return functools.partial(func, **call_ns)
+
+
+def save_method_args(method):
+ """
+ Wrap a method such that when it is called, the args and kwargs are
+ saved on the method.
+
+ >>> class MyClass:
+ ... @save_method_args
+ ... def method(self, a, b):
+ ... print(a, b)
+ >>> my_ob = MyClass()
+ >>> my_ob.method(1, 2)
+ 1 2
+ >>> my_ob._saved_method.args
+ (1, 2)
+ >>> my_ob._saved_method.kwargs
+ {}
+ >>> my_ob.method(a=3, b='foo')
+ 3 foo
+ >>> my_ob._saved_method.args
+ ()
+ >>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
+ True
+
+ The arguments are stored on the instance, allowing for
+ different instance to save different args.
+
+ >>> your_ob = MyClass()
+ >>> your_ob.method({str('x'): 3}, b=[4])
+ {'x': 3} [4]
+ >>> your_ob._saved_method.args
+ ({'x': 3},)
+ >>> my_ob._saved_method.args
+ ()
+ """
+ args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
+
+ @functools.wraps(method)
+ def wrapper(self, *args, **kwargs):
+ attr_name = '_saved_' + method.__name__
+ attr = args_and_kwargs(args, kwargs)
+ setattr(self, attr_name, attr)
+ return method(self, *args, **kwargs)
+
+ return wrapper
+
+
+def except_(*exceptions, replace=None, use=None):
+ """
+ Replace the indicated exceptions, if raised, with the indicated
+ literal replacement or evaluated expression (if present).
+
+ >>> safe_int = except_(ValueError)(int)
+ >>> safe_int('five')
+ >>> safe_int('5')
+ 5
+
+ Specify a literal replacement with ``replace``.
+
+ >>> safe_int_r = except_(ValueError, replace=0)(int)
+ >>> safe_int_r('five')
+ 0
+
+ Provide an expression to ``use`` to pass through particular parameters.
+
+ >>> safe_int_pt = except_(ValueError, use='args[0]')(int)
+ >>> safe_int_pt('five')
+ 'five'
+
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except exceptions:
+ try:
+ return eval(use)
+ except TypeError:
+ return replace
+
+ return wrapper
+
+ return decorate
diff --git a/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt b/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt
new file mode 100644
index 0000000..986f944
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt
@@ -0,0 +1,2 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
diff --git a/pkg_resources/_vendor/jaraco/text/__init__.py b/pkg_resources/_vendor/jaraco/text/__init__.py
new file mode 100644
index 0000000..c466378
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/__init__.py
@@ -0,0 +1,599 @@
+import re
+import itertools
+import textwrap
+import functools
+
+try:
+ from importlib.resources import files # type: ignore
+except ImportError: # pragma: nocover
+ from pkg_resources.extern.importlib_resources import files # type: ignore
+
+from pkg_resources.extern.jaraco.functools import compose, method_cache
+from pkg_resources.extern.jaraco.context import ExceptionTrap
+
+
+def substitution(old, new):
+ """
+ Return a function that will perform a substitution on a string
+ """
+ return lambda s: s.replace(old, new)
+
+
+def multi_substitution(*substitutions):
+ """
+ Take a sequence of pairs specifying substitutions, and create
+ a function that performs those substitutions.
+
+ >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
+ 'baz'
+ """
+ substitutions = itertools.starmap(substitution, substitutions)
+ # compose function applies last function first, so reverse the
+ # substitutions to get the expected order.
+ substitutions = reversed(tuple(substitutions))
+ return compose(*substitutions)
+
+
+class FoldedCase(str):
+ """
+ A case insensitive string class; behaves just like str
+ except compares equal when the only variation is case.
+
+ >>> s = FoldedCase('hello world')
+
+ >>> s == 'Hello World'
+ True
+
+ >>> 'Hello World' == s
+ True
+
+ >>> s != 'Hello World'
+ False
+
+ >>> s.index('O')
+ 4
+
+ >>> s.split('O')
+ ['hell', ' w', 'rld']
+
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
+ ['alpha', 'Beta', 'GAMMA']
+
+ Sequence membership is straightforward.
+
+ >>> "Hello World" in [s]
+ True
+ >>> s in ["Hello World"]
+ True
+
+ You may test for set inclusion, but candidate and elements
+ must both be folded.
+
+ >>> FoldedCase("Hello World") in {s}
+ True
+ >>> s in {FoldedCase("Hello World")}
+ True
+
+ String inclusion works as long as the FoldedCase object
+ is on the right.
+
+ >>> "hello" in FoldedCase("Hello World")
+ True
+
+ But not if the FoldedCase object is on the left:
+
+ >>> FoldedCase('hello') in 'Hello World'
+ False
+
+ In that case, use ``in_``:
+
+ >>> FoldedCase('hello').in_('Hello World')
+ True
+
+ >>> FoldedCase('hello') > FoldedCase('Hello')
+ False
+ """
+
+ def __lt__(self, other):
+ return self.lower() < other.lower()
+
+ def __gt__(self, other):
+ return self.lower() > other.lower()
+
+ def __eq__(self, other):
+ return self.lower() == other.lower()
+
+ def __ne__(self, other):
+ return self.lower() != other.lower()
+
+ def __hash__(self):
+ return hash(self.lower())
+
+ def __contains__(self, other):
+ return super().lower().__contains__(other.lower())
+
+ def in_(self, other):
+ "Does self appear in other?"
+ return self in FoldedCase(other)
+
+ # cache lower since it's likely to be called frequently.
+ @method_cache
+ def lower(self):
+ return super().lower()
+
+ def index(self, sub):
+ return self.lower().index(sub.lower())
+
+ def split(self, splitter=' ', maxsplit=0):
+ pattern = re.compile(re.escape(splitter), re.I)
+ return pattern.split(self, maxsplit)
+
+
+# Python 3.8 compatibility
+_unicode_trap = ExceptionTrap(UnicodeDecodeError)
+
+
+@_unicode_trap.passes
+def is_decodable(value):
+ r"""
+ Return True if the supplied value is decodable (using the default
+ encoding).
+
+ >>> is_decodable(b'\xff')
+ False
+ >>> is_decodable(b'\x32')
+ True
+ """
+ value.decode()
+
+
+def is_binary(value):
+ r"""
+ Return True if the value appears to be binary (that is, it's a byte
+ string and isn't decodable).
+
+ >>> is_binary(b'\xff')
+ True
+ >>> is_binary('\xff')
+ False
+ """
+ return isinstance(value, bytes) and not is_decodable(value)
+
+
+def trim(s):
+ r"""
+ Trim something like a docstring to remove the whitespace that
+ is common due to indentation and formatting.
+
+ >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
+ 'foo = bar\n\tbar = baz'
+ """
+ return textwrap.dedent(s).strip()
+
+
+def wrap(s):
+ """
+ Wrap lines of text, retaining existing newlines as
+ paragraph markers.
+
+ >>> print(wrap(lorem_ipsum))
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
+ eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
+ minim veniam, quis nostrud exercitation ullamco laboris nisi ut
+ aliquip ex ea commodo consequat. Duis aute irure dolor in
+ reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
+ pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
+ culpa qui officia deserunt mollit anim id est laborum.
+ <BLANKLINE>
+ Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
+ varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
+ magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
+ gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
+ risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
+ eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
+ fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
+ a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
+ neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
+ sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
+ nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
+ quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
+ molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
+ """
+ paragraphs = s.splitlines()
+ wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
+ return '\n\n'.join(wrapped)
+
+
+def unwrap(s):
+ r"""
+ Given a multi-line string, return an unwrapped version.
+
+ >>> wrapped = wrap(lorem_ipsum)
+ >>> wrapped.count('\n')
+ 20
+ >>> unwrapped = unwrap(wrapped)
+ >>> unwrapped.count('\n')
+ 1
+ >>> print(unwrapped)
+ Lorem ipsum dolor sit amet, consectetur adipiscing ...
+ Curabitur pretium tincidunt lacus. Nulla gravida orci ...
+
+ """
+ paragraphs = re.split(r'\n\n+', s)
+ cleaned = (para.replace('\n', ' ') for para in paragraphs)
+ return '\n'.join(cleaned)
+
+
+
+
+class Splitter(object):
+ """object that will split a string with the given arguments for each call
+
+ >>> s = Splitter(',')
+ >>> s('hello, world, this is your, master calling')
+ ['hello', ' world', ' this is your', ' master calling']
+ """
+
+ def __init__(self, *args):
+ self.args = args
+
+ def __call__(self, s):
+ return s.split(*self.args)
+
+
+def indent(string, prefix=' ' * 4):
+ """
+ >>> indent('foo')
+ ' foo'
+ """
+ return prefix + string
+
+
+class WordSet(tuple):
+ """
+ Given an identifier, return the words that identifier represents,
+ whether in camel case, underscore-separated, etc.
+
+ >>> WordSet.parse("camelCase")
+ ('camel', 'Case')
+
+ >>> WordSet.parse("under_sep")
+ ('under', 'sep')
+
+ Acronyms should be retained
+
+ >>> WordSet.parse("firstSNL")
+ ('first', 'SNL')
+
+ >>> WordSet.parse("you_and_I")
+ ('you', 'and', 'I')
+
+ >>> WordSet.parse("A simple test")
+ ('A', 'simple', 'test')
+
+ Multiple caps should not interfere with the first cap of another word.
+
+ >>> WordSet.parse("myABCClass")
+ ('my', 'ABC', 'Class')
+
+ The result is a WordSet, so you can get the form you need.
+
+ >>> WordSet.parse("myABCClass").underscore_separated()
+ 'my_ABC_Class'
+
+ >>> WordSet.parse('a-command').camel_case()
+ 'ACommand'
+
+ >>> WordSet.parse('someIdentifier').lowered().space_separated()
+ 'some identifier'
+
+ Slices of the result should return another WordSet.
+
+ >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
+ 'out_of_context'
+
+ >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
+ 'word set'
+
+ >>> example = WordSet.parse('figured it out')
+ >>> example.headless_camel_case()
+ 'figuredItOut'
+ >>> example.dash_separated()
+ 'figured-it-out'
+
+ """
+
+ _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
+
+ def capitalized(self):
+ return WordSet(word.capitalize() for word in self)
+
+ def lowered(self):
+ return WordSet(word.lower() for word in self)
+
+ def camel_case(self):
+ return ''.join(self.capitalized())
+
+ def headless_camel_case(self):
+ words = iter(self)
+ first = next(words).lower()
+ new_words = itertools.chain((first,), WordSet(words).camel_case())
+ return ''.join(new_words)
+
+ def underscore_separated(self):
+ return '_'.join(self)
+
+ def dash_separated(self):
+ return '-'.join(self)
+
+ def space_separated(self):
+ return ' '.join(self)
+
+ def trim_right(self, item):
+ """
+ Remove the item from the end of the set.
+
+ >>> WordSet.parse('foo bar').trim_right('foo')
+ ('foo', 'bar')
+ >>> WordSet.parse('foo bar').trim_right('bar')
+ ('foo',)
+ >>> WordSet.parse('').trim_right('bar')
+ ()
+ """
+ return self[:-1] if self and self[-1] == item else self
+
+ def trim_left(self, item):
+ """
+ Remove the item from the beginning of the set.
+
+ >>> WordSet.parse('foo bar').trim_left('foo')
+ ('bar',)
+ >>> WordSet.parse('foo bar').trim_left('bar')
+ ('foo', 'bar')
+ >>> WordSet.parse('').trim_left('bar')
+ ()
+ """
+ return self[1:] if self and self[0] == item else self
+
+ def trim(self, item):
+ """
+ >>> WordSet.parse('foo bar').trim('foo')
+ ('bar',)
+ """
+ return self.trim_left(item).trim_right(item)
+
+ def __getitem__(self, item):
+ result = super(WordSet, self).__getitem__(item)
+ if isinstance(item, slice):
+ result = WordSet(result)
+ return result
+
+ @classmethod
+ def parse(cls, identifier):
+ matches = cls._pattern.finditer(identifier)
+ return WordSet(match.group(0) for match in matches)
+
+ @classmethod
+ def from_class_name(cls, subject):
+ return cls.parse(subject.__class__.__name__)
+
+
+# for backward compatibility
+words = WordSet.parse
+
+
+def simple_html_strip(s):
+ r"""
+ Remove HTML from the string `s`.
+
+ >>> str(simple_html_strip(''))
+ ''
+
+ >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
+ A stormy day in paradise
+
+ >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
+ Somebody tell the truth.
+
+ >>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
+ What about
+ multiple lines?
+ """
+ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
+ texts = (match.group(3) or '' for match in html_stripper.finditer(s))
+ return ''.join(texts)
+
+
+class SeparatedValues(str):
+ """
+ A string separated by a separator. Overrides __iter__ for getting
+ the values.
+
+ >>> list(SeparatedValues('a,b,c'))
+ ['a', 'b', 'c']
+
+ Whitespace is stripped and empty values are discarded.
+
+ >>> list(SeparatedValues(' a, b , c, '))
+ ['a', 'b', 'c']
+ """
+
+ separator = ','
+
+ def __iter__(self):
+ parts = self.split(self.separator)
+ return filter(None, (part.strip() for part in parts))
+
+
+class Stripper:
+ r"""
+ Given a series of lines, find the common prefix and strip it from them.
+
+ >>> lines = [
+ ... 'abcdefg\n',
+ ... 'abc\n',
+ ... 'abcde\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix
+ 'abc'
+ >>> list(res.lines)
+ ['defg\n', '\n', 'de\n']
+
+ If no prefix is common, nothing should be stripped.
+
+ >>> lines = [
+ ... 'abcd\n',
+ ... '1234\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix = ''
+ >>> list(res.lines)
+ ['abcd\n', '1234\n']
+ """
+
+ def __init__(self, prefix, lines):
+ self.prefix = prefix
+ self.lines = map(self, lines)
+
+ @classmethod
+ def strip_prefix(cls, lines):
+ prefix_lines, lines = itertools.tee(lines)
+ prefix = functools.reduce(cls.common_prefix, prefix_lines)
+ return cls(prefix, lines)
+
+ def __call__(self, line):
+ if not self.prefix:
+ return line
+ null, prefix, rest = line.partition(self.prefix)
+ return rest
+
+ @staticmethod
+ def common_prefix(s1, s2):
+ """
+ Return the common prefix of two lines.
+ """
+ index = min(len(s1), len(s2))
+ while s1[:index] != s2[:index]:
+ index -= 1
+ return s1[:index]
+
+
+def remove_prefix(text, prefix):
+ """
+ Remove the prefix from the text if it exists.
+
+ >>> remove_prefix('underwhelming performance', 'underwhelming ')
+ 'performance'
+
+ >>> remove_prefix('something special', 'sample')
+ 'something special'
+ """
+ null, prefix, rest = text.rpartition(prefix)
+ return rest
+
+
+def remove_suffix(text, suffix):
+ """
+ Remove the suffix from the text if it exists.
+
+ >>> remove_suffix('name.git', '.git')
+ 'name'
+
+ >>> remove_suffix('something special', 'sample')
+ 'something special'
+ """
+ rest, suffix, null = text.partition(suffix)
+ return rest
+
+
+def normalize_newlines(text):
+ r"""
+ Replace alternate newlines with the canonical newline.
+
+ >>> normalize_newlines('Lorem Ipsum\u2029')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\r\n')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\x85')
+ 'Lorem Ipsum\n'
+ """
+ newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
+ pattern = '|'.join(newlines)
+ return re.sub(pattern, '\n', text)
+
+
+def _nonblank(str):
+ return str and not str.startswith('#')
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+ r"""
+ Yield valid lines of a string or iterable.
+
+ >>> list(yield_lines(''))
+ []
+ >>> list(yield_lines(['foo', 'bar']))
+ ['foo', 'bar']
+ >>> list(yield_lines('foo\nbar'))
+ ['foo', 'bar']
+ >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+ ['foo', 'baz #comment']
+ >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+ ['foo', 'bar', 'baz', 'bing']
+ """
+ return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+ return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def drop_comment(line):
+ """
+ Drop comments.
+
+ >>> drop_comment('foo # bar')
+ 'foo'
+
+ A hash without a space may be in a URL.
+
+ >>> drop_comment('http://example.com/foo#bar')
+ 'http://example.com/foo#bar'
+ """
+ return line.partition(' #')[0]
+
+
+def join_continuation(lines):
+ r"""
+ Join lines continued by a trailing backslash.
+
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
+ ['foobarbaz']
+
+ Not sure why, but...
+ The character preceeding the backslash is also elided.
+
+ >>> list(join_continuation(['goo\\', 'dly']))
+ ['godly']
+
+ A terrible idea, but...
+ If no line is available to continue, suppress the lines.
+
+ >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
+ ['foo']
+ """
+ lines = iter(lines)
+ for item in lines:
+ while item.endswith('\\'):
+ try:
+ item = item[:-2].strip() + next(lines)
+ except StopIteration:
+ return
+ yield item
diff --git a/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt
new file mode 100644
index 0000000..a5035be
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+more_itertools
diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py
new file mode 100644
index 0000000..ea38bef
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools/__init__.py
@@ -0,0 +1,4 @@
+from .more import * # noqa
+from .recipes import * # noqa
+
+__version__ = '8.12.0'
diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py
new file mode 100644
index 0000000..6b6a5ca
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools/more.py
@@ -0,0 +1,4316 @@
+import warnings
+
+from collections import Counter, defaultdict, deque, abc
+from collections.abc import Sequence
+from functools import partial, reduce, wraps
+from heapq import merge, heapify, heapreplace, heappop
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee,
+ zip_longest,
+)
+from math import exp, factorial, floor, log
+from queue import Empty, Queue
+from random import random, randrange, uniform
+from operator import itemgetter, mul, sub, gt, lt, ge, le
+from sys import hexversion, maxsize
+from time import monotonic
+
+from .recipes import (
+ consume,
+ flatten,
+ pairwise,
+ powerset,
+ take,
+ unique_everseen,
+)
+
+__all__ = [
+ 'AbortThread',
+ 'SequenceView',
+ 'UnequalIterablesError',
+ 'adjacent',
+ 'all_unique',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'callback_iter',
+ 'chunked',
+ 'chunked_even',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'combination_index',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'countable',
+ 'difference',
+ 'distinct_combinations',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'duplicates_everseen',
+ 'duplicates_justseen',
+ 'exactly_n',
+ 'filter_except',
+ 'first',
+ 'groupby_transform',
+ 'ichunked',
+ 'ilen',
+ 'interleave',
+ 'interleave_evenly',
+ 'interleave_longest',
+ 'intersperse',
+ 'is_sorted',
+ 'islice_extended',
+ 'iterate',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_except',
+ 'map_if',
+ 'map_reduce',
+ 'mark_ends',
+ 'minmax',
+ 'nth_or_last',
+ 'nth_permutation',
+ 'nth_product',
+ 'numeric_range',
+ 'one',
+ 'only',
+ 'padded',
+ 'partitions',
+ 'peekable',
+ 'permutation_index',
+ 'product_index',
+ 'raise_',
+ 'repeat_each',
+ 'repeat_last',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'sample',
+ 'seekable',
+ 'set_partitions',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_after',
+ 'split_at',
+ 'split_before',
+ 'split_into',
+ 'split_when',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'strictly_n',
+ 'substrings',
+ 'substrings_indexes',
+ 'time_limited',
+ 'unique_in_window',
+ 'unique_to_each',
+ 'unzip',
+ 'value_chain',
+ 'windowed',
+ 'windowed_complete',
+ 'with_iter',
+ 'zip_broadcast',
+ 'zip_equal',
+ 'zip_offset',
+]
+
+
+_marker = object()
+
+
+def chunked(iterable, n, strict=False):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ By the default, the last yielded list will have fewer than *n* elements
+ if the length of *iterable* is not divisible by *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ If the length of *iterable* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ list is yielded.
+
+ """
+ iterator = iter(partial(take, n, iter(iterable)), [])
+ if strict:
+ if n is None:
+ raise ValueError('n must not be None when using strict mode.')
+
+ def ret():
+ for chunk in iterator:
+ if len(chunk) != n:
+ raise ValueError('iterable is not divisible by n.')
+ yield chunk
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ ) from e
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ if isinstance(iterable, Sequence):
+ return iterable[-1]
+ # Work around https://bugs.python.org/issue38525
+ elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0):
+ return next(reversed(iterable))
+ else:
+ return deque(iterable, maxlen=1)[-1]
+ except (IndexError, TypeError, StopIteration):
+ if default is _marker:
+ raise ValueError(
+ 'last() was called on an empty iterable, and no default was '
+ 'provided.'
+ )
+ return default
+
+
+def nth_or_last(iterable, n, default=_marker):
+ """Return the nth or the last item of *iterable*,
+ or *default* if *iterable* is empty.
+
+ >>> nth_or_last([0, 1, 2, 3], 2)
+ 2
+ >>> nth_or_last([0, 1], 2)
+ 1
+ >>> nth_or_last([], 0, 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ return last(islice(iterable, n + 1), default=default)
+
+
+class peekable:
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhausted
+ ... list(p)
+ []
+
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 3.5+, this function is an alias for :func:`heapq.merge`.
+
+ """
+ warnings.warn(
+ "collate is no longer part of more_itertools, use heapq.merge",
+ DeprecationWarning,
+ )
+ return merge(*iterables, **kwargs)
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # This approach was selected because benchmarks showed it's likely the
+ # fastest of the known implementations at the time of writing.
+ # See GitHub tracker: #236, #230.
+ counter = count()
+ deque(zip(iterable, counter), maxlen=0)
+ return next(counter)
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ yield from iterable
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 'too',
+ 'many', and perhaps more.
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check iterable
+ contents less destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ first_value = next(it)
+ except StopIteration as e:
+ raise (
+ too_short or ValueError('too few items in iterable (expected 1)')
+ ) from e
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def raise_(exception, *args):
+ raise exception(*args)
+
+
+def strictly_n(iterable, n, too_short=None, too_long=None):
+ """Validate that *iterable* has exactly *n* items and return them if
+ it does. If it has fewer than *n* items, call function *too_short*
+ with those items. If it has more than *n* items, call function
+ *too_long* with the first ``n + 1`` items.
+
+ >>> iterable = ['a', 'b', 'c', 'd']
+ >>> n = 4
+ >>> list(strictly_n(iterable, n))
+ ['a', 'b', 'c', 'd']
+
+ By default, *too_short* and *too_long* are functions that raise
+ ``ValueError``.
+
+ >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too few items in iterable (got 2)
+
+ >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (got at least 3)
+
+ You can instead supply functions that do something else.
+ *too_short* will be called with the number of items in *iterable*.
+ *too_long* will be called with `n + 1`.
+
+ >>> def too_short(item_count):
+ ... raise RuntimeError
+ >>> it = strictly_n('abcd', 6, too_short=too_short)
+ >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ >>> def too_long(item_count):
+ ... print('The boss is going to hear about this')
+ >>> it = strictly_n('abcdef', 4, too_long=too_long)
+ >>> list(it)
+ The boss is going to hear about this
+ ['a', 'b', 'c', 'd']
+
+ """
+ if too_short is None:
+ too_short = lambda item_count: raise_(
+ ValueError,
+ 'Too few items in iterable (got {})'.format(item_count),
+ )
+
+ if too_long is None:
+ too_long = lambda item_count: raise_(
+ ValueError,
+ 'Too many items in iterable (got at least {})'.format(item_count),
+ )
+
+ it = iter(iterable)
+ for i in range(n):
+ try:
+ item = next(it)
+ except StopIteration:
+ too_short(i)
+ return
+ else:
+ yield item
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ too_long(n + 1)
+
+
+def distinct_permutations(iterable, r=None):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ If *r* is given, only the *r*-length permutations are yielded.
+
+ >>> sorted(distinct_permutations([1, 0, 1], r=2))
+ [(0, 1), (1, 0), (1, 1)]
+ >>> sorted(distinct_permutations(range(3), r=2))
+ [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
+
+ """
+ # Algorithm: https://w.wiki/Qai
+ def _full(A):
+ while True:
+ # Yield the permutation we have
+ yield tuple(A)
+
+ # Find the largest index i such that A[i] < A[i + 1]
+ for i in range(size - 2, -1, -1):
+ if A[i] < A[i + 1]:
+ break
+ # If no such index exists, this permutation is the last one
+ else:
+ return
+
+ # Find the largest index j greater than j such that A[i] < A[j]
+ for j in range(size - 1, i, -1):
+ if A[i] < A[j]:
+ break
+
+ # Swap the value of A[i] with that of A[j], then reverse the
+ # sequence from A[i + 1] to form the new permutation
+ A[i], A[j] = A[j], A[i]
+ A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1]
+
+ # Algorithm: modified from the above
+ def _partial(A, r):
+ # Split A into the first r items and the last r items
+ head, tail = A[:r], A[r:]
+ right_head_indexes = range(r - 1, -1, -1)
+ left_tail_indexes = range(len(tail))
+
+ while True:
+ # Yield the permutation we have
+ yield tuple(head)
+
+ # Starting from the right, find the first index of the head with
+ # value smaller than the maximum value of the tail - call it i.
+ pivot = tail[-1]
+ for i in right_head_indexes:
+ if head[i] < pivot:
+ break
+ pivot = head[i]
+ else:
+ return
+
+ # Starting from the left, find the first value of the tail
+ # with a value greater than head[i] and swap.
+ for j in left_tail_indexes:
+ if tail[j] > head[i]:
+ head[i], tail[j] = tail[j], head[i]
+ break
+ # If we didn't find one, start from the right and find the first
+ # index of the head with a value greater than head[i] and swap.
+ else:
+ for j in right_head_indexes:
+ if head[j] > head[i]:
+ head[i], head[j] = head[j], head[i]
+ break
+
+ # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)]
+ tail += head[: i - r : -1] # head[i + 1:][::-1]
+ i += 1
+ head[i:], tail[:] = tail[: r - i], tail[r - i :]
+
+ items = sorted(iterable)
+
+ size = len(items)
+ if r is None:
+ r = size
+
+ if 0 < r <= size:
+ return _full(items) if (r == size) else _partial(items, r)
+
+ return iter(() if r else ((),))
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values:
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ To slide into the iterable's items, use :func:`chain` to add filler items
+ to the left:
+
+ >>> iterable = [1, 2, 3, 4]
+ >>> n = 3
+ >>> padding = [None] * (n - 1)
+ >>> list(windowed(chain(padding, iterable), 3))
+ [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)]
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ window = deque(maxlen=n)
+ i = n
+ for _ in map(window.append, seq):
+ i -= 1
+ if not i:
+ i = step
+ yield tuple(window)
+
+ size = len(window)
+ if size < n:
+ yield tuple(chain(window, repeat(fillvalue, n - size)))
+ elif 0 < i < min(step, n):
+ window += (fillvalue,) * i
+ yield tuple(window)
+
+
+def substrings(iterable):
+ """Yield all of the substrings of *iterable*.
+
+ >>> [''.join(s) for s in substrings('more')]
+ ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more']
+
+ Note that non-string iterables can also be subdivided.
+
+ >>> list(substrings([0, 1, 2]))
+ [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)]
+
+ """
+ # The length-1 substrings
+ seq = []
+ for item in iter(iterable):
+ seq.append(item)
+ yield (item,)
+ seq = tuple(seq)
+ item_count = len(seq)
+
+ # And the rest
+ for n in range(2, item_count + 1):
+ for i in range(item_count - n + 1):
+ yield seq[i : i + n]
+
+
+def substrings_indexes(seq, reverse=False):
+ """Yield all substrings and their positions in *seq*
+
+ The items yielded will be a tuple of the form ``(substr, i, j)``, where
+ ``substr == seq[i:j]``.
+
+ This function only works for iterables that support slicing, such as
+ ``str`` objects.
+
+ >>> for item in substrings_indexes('more'):
+ ... print(item)
+ ('m', 0, 1)
+ ('o', 1, 2)
+ ('r', 2, 3)
+ ('e', 3, 4)
+ ('mo', 0, 2)
+ ('or', 1, 3)
+ ('re', 2, 4)
+ ('mor', 0, 3)
+ ('ore', 1, 4)
+ ('more', 0, 4)
+
+ Set *reverse* to ``True`` to yield the same items in the opposite order.
+
+
+ """
+ r = range(1, len(seq) + 1)
+ if reverse:
+ r = reversed(r)
+ return (
+ (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1)
+ )
+
+
+class bucket:
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
+ >>> sorted(list(s)) # Get the keys
+ ['a', 'b', 'c']
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __iter__(self):
+ for item in self._it:
+ item_value = self._key(item)
+ if self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ yield from self._cache.keys()
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head.copy(), chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def interleave_evenly(iterables, lengths=None):
+ """
+ Interleave multiple iterables so that their elements are evenly distributed
+ throughout the output sequence.
+
+ >>> iterables = [1, 2, 3, 4, 5], ['a', 'b']
+ >>> list(interleave_evenly(iterables))
+ [1, 2, 'a', 3, 4, 'b', 5]
+
+ >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]]
+ >>> list(interleave_evenly(iterables))
+ [1, 6, 4, 2, 7, 3, 8, 5]
+
+ This function requires iterables of known length. Iterables without
+ ``__len__()`` can be used by manually specifying lengths with *lengths*:
+
+ >>> from itertools import combinations, repeat
+ >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']]
+ >>> lengths = [4 * (4 - 1) // 2, 3]
+ >>> list(interleave_evenly(iterables, lengths=lengths))
+ [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c']
+
+ Based on Bresenham's algorithm.
+ """
+ if lengths is None:
+ try:
+ lengths = [len(it) for it in iterables]
+ except TypeError:
+ raise ValueError(
+ 'Iterable lengths could not be determined automatically. '
+ 'Specify them with the lengths keyword.'
+ )
+ elif len(iterables) != len(lengths):
+ raise ValueError('Mismatching number of iterables and lengths.')
+
+ dims = len(lengths)
+
+ # sort iterables by length, descending
+ lengths_permute = sorted(
+ range(dims), key=lambda i: lengths[i], reverse=True
+ )
+ lengths_desc = [lengths[i] for i in lengths_permute]
+ iters_desc = [iter(iterables[i]) for i in lengths_permute]
+
+ # the longest iterable is the primary one (Bresenham: the longest
+ # distance along an axis)
+ delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:]
+ iter_primary, iters_secondary = iters_desc[0], iters_desc[1:]
+ errors = [delta_primary // dims] * len(deltas_secondary)
+
+ to_yield = sum(lengths)
+ while to_yield:
+ yield next(iter_primary)
+ to_yield -= 1
+ # update errors for each secondary iterable
+ errors = [e - delta for e, delta in zip(errors, deltas_secondary)]
+
+ # those iterables for which the error is negative are yielded
+ # ("diagonal step" in Bresenham)
+ for i, e in enumerate(errors):
+ if e < 0:
+ yield next(iters_secondary[i])
+ to_yield -= 1
+ errors[i] += delta_primary
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and
+ will not be collapsed.
+
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels))
+ or isinstance(node, (str, bytes))
+ or ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ yield from walk(child, level + 1)
+
+ yield from walk(iterable, 0)
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ yield from chunk
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n, strict=False):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ By the default, the last yielded slice will have fewer than *n* elements
+ if the length of *seq* is not divisible by *n*:
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ If the length of *seq* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ slice is yielded.
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ iterator = takewhile(len, (seq[i : i + n] for i in count(0, n)))
+ if strict:
+
+ def ret():
+ for _slice in iterator:
+ if len(_slice) != n:
+ raise ValueError("seq is not divisible by n.")
+ yield _slice
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def split_at(iterable, pred, maxsplit=-1, keep_separator=False):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2))
+ [[0], [2], [4, 5, 6, 7, 8, 9]]
+
+ By default, the delimiting items are not included in the output.
+ The include them, set *keep_separator* to ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True))
+ [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item):
+ yield buf
+ if keep_separator:
+ yield [item]
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends just before
+ an item for which callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield [item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ buf.append(item)
+ if buf:
+ yield buf
+
+
+def split_after(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ if buf:
+ yield buf
+
+
+def split_when(iterable, pred, maxsplit=-1):
+ """Split *iterable* into pieces based on the output of *pred*.
+ *pred* should be a function that takes successive pairs of items and
+ returns ``True`` if the iterable should be split in between them.
+
+ For example, to find runs of increasing numbers, split the iterable when
+ element ``i`` is larger than element ``i + 1``:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y))
+ [[1, 2, 3, 3], [2, 5], [2, 4], [2]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2],
+ ... lambda x, y: x > y, maxsplit=2))
+ [[1, 2, 3, 3], [2, 5], [2, 4, 2]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ it = iter(iterable)
+ try:
+ cur_item = next(it)
+ except StopIteration:
+ return
+
+ buf = [cur_item]
+ for next_item in it:
+ if pred(cur_item, next_item):
+ yield buf
+ if maxsplit == 1:
+ yield [next_item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+
+ buf.append(next_item)
+ cur_item = next_item
+
+ yield buf
+
+
+def split_into(iterable, sizes):
+ """Yield a list of sequential items from *iterable* of length 'n' for each
+ integer 'n' in *sizes*.
+
+ >>> list(split_into([1,2,3,4,5,6], [1,2,3]))
+ [[1], [2, 3], [4, 5, 6]]
+
+ If the sum of *sizes* is smaller than the length of *iterable*, then the
+ remaining items of *iterable* will not be returned.
+
+ >>> list(split_into([1,2,3,4,5,6], [2,3]))
+ [[1, 2], [3, 4, 5]]
+
+ If the sum of *sizes* is larger than the length of *iterable*, fewer items
+ will be returned in the iteration that overruns *iterable* and further
+ lists will be empty:
+
+ >>> list(split_into([1,2,3,4], [1,2,3,4]))
+ [[1], [2, 3], [4], []]
+
+ When a ``None`` object is encountered in *sizes*, the returned list will
+ contain items up to the end of *iterable* the same way that itertools.slice
+ does:
+
+ >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None]))
+ [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]
+
+ :func:`split_into` can be useful for grouping a series of items where the
+ sizes of the groups are not uniform. An example would be where in a row
+ from a table, multiple columns represent elements of the same feature
+ (e.g. a point represented by x,y,z) but, the format is not the same for
+ all columns.
+ """
+ # convert the iterable argument into an iterator so its contents can
+ # be consumed by islice in case it is a generator
+ it = iter(iterable)
+
+ for size in sizes:
+ if size is None:
+ yield list(it)
+ return
+ else:
+ yield list(islice(it, size))
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ yield from chain(it, repeat(fillvalue))
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def repeat_each(iterable, n=2):
+ """Repeat each element in *iterable* *n* times.
+
+ >>> list(repeat_each('ABC', 3))
+ ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C']
+ """
+ return chain.from_iterable(map(repeat, iterable, repeat(n)))
+
+
+def repeat_last(iterable, default=None):
+ """After the *iterable* is exhausted, keep yielding its last element.
+
+ >>> list(islice(repeat_last(range(3)), 5))
+ [0, 1, 2, 2, 2]
+
+ If the iterable is empty, yield *default* forever::
+
+ >>> list(islice(repeat_last(range(0), 42), 5))
+ [42, 42, 42, 42, 42]
+
+ """
+ item = _marker
+ for item in iterable:
+ yield item
+ final = default if item is _marker else item
+ yield from repeat(final)
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+class UnequalIterablesError(ValueError):
+ def __init__(self, details=None):
+ msg = 'Iterables have different lengths'
+ if details is not None:
+ msg += (': index 0 has length {}; index {} has length {}').format(
+ *details
+ )
+
+ super().__init__(msg)
+
+
+def _zip_equal_generator(iterables):
+ for combo in zip_longest(*iterables, fillvalue=_marker):
+ for val in combo:
+ if val is _marker:
+ raise UnequalIterablesError()
+ yield combo
+
+
+def _zip_equal(*iterables):
+ # Check whether the iterables are all the same size.
+ try:
+ first_size = len(iterables[0])
+ for i, it in enumerate(iterables[1:], 1):
+ size = len(it)
+ if size != first_size:
+ break
+ else:
+ # If we didn't break out, we can use the built-in zip.
+ return zip(*iterables)
+
+ # If we did break out, there was a mismatch.
+ raise UnequalIterablesError(details=(first_size, i, size))
+ # If any one of the iterables didn't have a length, start reading
+ # them until one runs out.
+ except TypeError:
+ return _zip_equal_generator(iterables)
+
+
+def zip_equal(*iterables):
+ """``zip`` the input *iterables* together, but raise
+ ``UnequalIterablesError`` if they aren't all the same length.
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abc')
+ >>> list(zip_equal(it_1, it_2))
+ [(0, 'a'), (1, 'b'), (2, 'c')]
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abcd')
+ >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ more_itertools.more.UnequalIterablesError: Iterables have different
+ lengths
+
+ """
+ if hexversion >= 0x30A00A6:
+ warnings.warn(
+ (
+ 'zip_equal will be removed in a future version of '
+ 'more-itertools. Use the builtin zip function with '
+ 'strict=True instead.'
+ ),
+ DeprecationWarning,
+ )
+
+ return _zip_equal(*iterables)
+
+
+def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which some series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), key=None, reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying multiple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ To sort by a function of the elements of the iterable, pass a *key*
+ function. Its arguments are the elements of the iterables corresponding to
+ the key list::
+
+ >>> names = ('a', 'b', 'c')
+ >>> lengths = (1, 2, 3)
+ >>> widths = (5, 2, 1)
+ >>> def area(length, width):
+ ... return length * width
+ >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area)
+ [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ if key is None:
+ # if there is no key function, the key argument to sorted is an
+ # itemgetter
+ key_argument = itemgetter(*key_list)
+ else:
+ # if there is a key function, call it with the items at the offsets
+ # specified by the key function as arguments
+ key_list = list(key_list)
+ if len(key_list) == 1:
+ # if key_list contains a single item, pass the item at that offset
+ # as the only argument to the key function
+ key_offset = key_list[0]
+ key_argument = lambda zipped_items: key(zipped_items[key_offset])
+ else:
+ # if key_list contains multiple items, use itemgetter to return a
+ # tuple of items, which we pass as *args to the key function
+ get_key_items = itemgetter(*key_list)
+ key_argument = lambda zipped_items: key(
+ *get_key_items(zipped_items)
+ )
+
+ return list(
+ zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
+ )
+
+
+def unzip(iterable):
+ """The inverse of :func:`zip`, this function disaggregates the elements
+ of the zipped *iterable*.
+
+ The ``i``-th iterable contains the ``i``-th element from each element
+ of the zipped iterable. The first element is used to to determine the
+ length of the remaining elements.
+
+ >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> letters, numbers = unzip(iterable)
+ >>> list(letters)
+ ['a', 'b', 'c', 'd']
+ >>> list(numbers)
+ [1, 2, 3, 4]
+
+ This is similar to using ``zip(*iterable)``, but it avoids reading
+ *iterable* into memory. Note, however, that this function uses
+ :func:`itertools.tee` and thus may require significant storage.
+
+ """
+ head, iterable = spy(iter(iterable))
+ if not head:
+ # empty iterable, e.g. zip([], [], [])
+ return ()
+ # spy returns a one-length iterable as head
+ head = head[0]
+ iterables = tee(iterable, len(head))
+
+ def itemgetter(i):
+ def getter(obj):
+ try:
+ return obj[i]
+ except IndexError:
+ # basically if we have an iterable like
+ # iter([(1, 2, 3), (4, 5), (6,)])
+ # the second unzipped iterable would fail at the third tuple
+ # since it would try to access tup[1]
+ # same with the third unzipped iterable and the second tuple
+ # to support these "improperly zipped" iterables,
+ # we create a custom itemgetter
+ # which just stops the unzipped iterables
+ # at first length mismatch
+ raise StopIteration
+
+ return getter
+
+ return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ try:
+ iterable[:0]
+ except TypeError:
+ seq = tuple(iterable)
+ else:
+ seq = iterable
+
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ stop = 0
+ for i in range(1, n + 1):
+ start = stop
+ stop += q + 1 if i <= r else q
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None):
+ """An extension of :func:`itertools.groupby` that can apply transformations
+ to the grouped data.
+
+ * *keyfunc* is a function computing a key value for each item in *iterable*
+ * *valuefunc* is a function that transforms the individual items from
+ *iterable* after grouping
+ * *reducefunc* is a function that transforms each group of items
+
+ >>> iterable = 'aAAbBBcCC'
+ >>> keyfunc = lambda k: k.upper()
+ >>> valuefunc = lambda v: v.lower()
+ >>> reducefunc = lambda g: ''.join(g)
+ >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc))
+ [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
+
+ Each optional argument defaults to an identity function if not specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ ret = groupby(iterable, keyfunc)
+ if valuefunc:
+ ret = ((k, map(valuefunc, g)) for k, g in ret)
+ if reducefunc:
+ ret = ((k, reducefunc(g)) for k, g in ret)
+
+ return ret
+
+
+class numeric_range(abc.Sequence, abc.Hashable):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ ``datetime.datetime`` objects can be used for *start* and *stop*, if *step*
+ is a ``datetime.timedelta`` object:
+
+ >>> import datetime
+ >>> start = datetime.datetime(2019, 1, 1)
+ >>> stop = datetime.datetime(2019, 1, 3)
+ >>> step = datetime.timedelta(days=1)
+ >>> items = iter(numeric_range(start, stop, step))
+ >>> next(items)
+ datetime.datetime(2019, 1, 1, 0, 0)
+ >>> next(items)
+ datetime.datetime(2019, 1, 2, 0, 0)
+
+ """
+
+ _EMPTY_HASH = hash(range(0, 0))
+
+ def __init__(self, *args):
+ argc = len(args)
+ if argc == 1:
+ (self._stop,) = args
+ self._start = type(self._stop)(0)
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 2:
+ self._start, self._stop = args
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 3:
+ self._start, self._stop, self._step = args
+ elif argc == 0:
+ raise TypeError(
+ 'numeric_range expected at least '
+ '1 argument, got {}'.format(argc)
+ )
+ else:
+ raise TypeError(
+ 'numeric_range expected at most '
+ '3 arguments, got {}'.format(argc)
+ )
+
+ self._zero = type(self._step)(0)
+ if self._step == self._zero:
+ raise ValueError('numeric_range() arg 3 must not be zero')
+ self._growing = self._step > self._zero
+ self._init_len()
+
+ def __bool__(self):
+ if self._growing:
+ return self._start < self._stop
+ else:
+ return self._start > self._stop
+
+ def __contains__(self, elem):
+ if self._growing:
+ if self._start <= elem < self._stop:
+ return (elem - self._start) % self._step == self._zero
+ else:
+ if self._start >= elem > self._stop:
+ return (self._start - elem) % (-self._step) == self._zero
+
+ return False
+
+ def __eq__(self, other):
+ if isinstance(other, numeric_range):
+ empty_self = not bool(self)
+ empty_other = not bool(other)
+ if empty_self or empty_other:
+ return empty_self and empty_other # True if both empty
+ else:
+ return (
+ self._start == other._start
+ and self._step == other._step
+ and self._get_by_index(-1) == other._get_by_index(-1)
+ )
+ else:
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self._get_by_index(key)
+ elif isinstance(key, slice):
+ step = self._step if key.step is None else key.step * self._step
+
+ if key.start is None or key.start <= -self._len:
+ start = self._start
+ elif key.start >= self._len:
+ start = self._stop
+ else: # -self._len < key.start < self._len
+ start = self._get_by_index(key.start)
+
+ if key.stop is None or key.stop >= self._len:
+ stop = self._stop
+ elif key.stop <= -self._len:
+ stop = self._start
+ else: # -self._len < key.stop < self._len
+ stop = self._get_by_index(key.stop)
+
+ return numeric_range(start, stop, step)
+ else:
+ raise TypeError(
+ 'numeric range indices must be '
+ 'integers or slices, not {}'.format(type(key).__name__)
+ )
+
+ def __hash__(self):
+ if self:
+ return hash((self._start, self._get_by_index(-1), self._step))
+ else:
+ return self._EMPTY_HASH
+
+ def __iter__(self):
+ values = (self._start + (n * self._step) for n in count())
+ if self._growing:
+ return takewhile(partial(gt, self._stop), values)
+ else:
+ return takewhile(partial(lt, self._stop), values)
+
+ def __len__(self):
+ return self._len
+
+ def _init_len(self):
+ if self._growing:
+ start = self._start
+ stop = self._stop
+ step = self._step
+ else:
+ start = self._stop
+ stop = self._start
+ step = -self._step
+ distance = stop - start
+ if distance <= self._zero:
+ self._len = 0
+ else: # distance > 0 and step > 0: regular euclidean division
+ q, r = divmod(distance, step)
+ self._len = int(q) + int(r != self._zero)
+
+ def __reduce__(self):
+ return numeric_range, (self._start, self._stop, self._step)
+
+ def __repr__(self):
+ if self._step == 1:
+ return "numeric_range({}, {})".format(
+ repr(self._start), repr(self._stop)
+ )
+ else:
+ return "numeric_range({}, {}, {})".format(
+ repr(self._start), repr(self._stop), repr(self._step)
+ )
+
+ def __reversed__(self):
+ return iter(
+ numeric_range(
+ self._get_by_index(-1), self._start - self._step, -self._step
+ )
+ )
+
+ def count(self, value):
+ return int(value in self)
+
+ def index(self, value):
+ if self._growing:
+ if self._start <= value < self._stop:
+ q, r = divmod(value - self._start, self._step)
+ if r == self._zero:
+ return int(q)
+ else:
+ if self._start >= value > self._stop:
+ q, r = divmod(self._start - value, -self._step)
+ if r == self._zero:
+ return int(q)
+
+ raise ValueError("{} is not in numeric range".format(value))
+
+ def _get_by_index(self, i):
+ if i < 0:
+ i += self._len
+ if i < 0 or i >= self._len:
+ raise IndexError("numeric range object index out of range")
+ return self._start + i * self._step
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def mark_ends(iterable):
+ """Yield 3-tuples of the form ``(is_first, is_last, item)``.
+
+ >>> list(mark_ends('ABC'))
+ [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
+
+ Use this when looping over an iterable to take special action on its first
+ and/or last items:
+
+ >>> iterable = ['Header', 100, 200, 'Footer']
+ >>> total = 0
+ >>> for is_first, is_last, item in mark_ends(iterable):
+ ... if is_first:
+ ... continue # Skip the header
+ ... if is_last:
+ ... continue # Skip the footer
+ ... total += item
+ >>> print(total)
+ 300
+ """
+ it = iter(iterable)
+
+ try:
+ b = next(it)
+ except StopIteration:
+ return
+
+ try:
+ for i in count():
+ a = b
+ b = next(it)
+ yield i == 0, False, a
+
+ except StopIteration:
+ yield i == 0, True, a
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ cache_clear = cache.clear
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ yield from cache
+ cache_clear()
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+class islice_extended:
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ You can also use slice notation directly:
+
+ >>> iterable = map(str, count())
+ >>> it = islice_extended(iterable)[10:20:2]
+ >>> list(it)
+ ['10', '12', '14', '16', '18']
+
+ """
+
+ def __init__(self, iterable, *args):
+ it = iter(iterable)
+ if args:
+ self._iterable = _islice_helper(it, slice(*args))
+ else:
+ self._iterable = it
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterable)
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return islice_extended(_islice_helper(self._iterable, key))
+
+ raise TypeError('islice_extended.__getitem__ argument must be a slice')
+
+
+def _islice_helper(it, s):
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if start < 0:
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ yield from islice(it, start, stop, step)
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ yield from cache[i::step]
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ Each group of consecutive items is an iterator that shares it source with
+ *iterable*. When an an output group is advanced, the previous group is
+ no longer available unless its elements are copied (e.g., into a ``list``).
+
+ >>> iterable = [1, 2, 11, 12, 21, 22]
+ >>> saved_groups = []
+ >>> for group in consecutive_groups(iterable):
+ ... saved_groups.append(list(group)) # Copy group elements
+ >>> saved_groups
+ [[1, 2], [11, 12], [21, 22]]
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub, *, initial=None):
+ """This function is the inverse of :func:`itertools.accumulate`. By default
+ it will compute the first difference of *iterable* using
+ :func:`operator.sub`:
+
+ >>> from itertools import accumulate
+ >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ *func* defaults to :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120]
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ If the *initial* keyword is set, the first element will be skipped when
+ computing successive differences.
+
+ >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10)
+ >>> list(difference(it, initial=10))
+ [1, 2, 3]
+
+ """
+ a, b = tee(iterable)
+ try:
+ first = [next(b)]
+ except StopIteration:
+ return iter([])
+
+ if initial is not None:
+ first = []
+
+ return chain(first, starmap(func, zip(b, a)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analogous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable:
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ Call :meth:`peek` to look ahead one item without advancing the iterator:
+
+ >>> it = seekable('1234')
+ >>> it.peek()
+ '1'
+ >>> list(it)
+ ['1', '2', '3', '4']
+ >>> it.peek(default='empty')
+ 'empty'
+
+ Before the iterator is at its end, calling :func:`bool` on it will return
+ ``True``. After it will return ``False``:
+
+ >>> it = seekable('5678')
+ >>> bool(it)
+ True
+ >>> list(it)
+ ['5', '6', '7', '8']
+ >>> bool(it)
+ False
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ By default, the cache grows as the source iterable progresses, so beware of
+ wrapping very large or infinite iterables. Supply *maxlen* to limit the
+ size of the cache (this of course limits how far back you can seek).
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()), maxlen=2)
+ >>> next(it), next(it), next(it), next(it)
+ ('0', '1', '2', '3')
+ >>> list(it.elements())
+ ['2', '3']
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it), next(it)
+ ('2', '3', '4', '5')
+ >>> next(it)
+ '6'
+
+ """
+
+ def __init__(self, iterable, maxlen=None):
+ self._source = iter(iterable)
+ if maxlen is None:
+ self._cache = []
+ else:
+ self._cache = deque([], maxlen)
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ try:
+ peeked = next(self)
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ if self._index is None:
+ self._index = len(self._cache)
+ self._index -= 1
+ return peeked
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length:
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (len_iter - i - 1 for i in locate(reversed(iterable), pred))
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ yield from substitutes
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
+
+
+def partitions(iterable):
+ """Yield all possible order-preserving partitions of *iterable*.
+
+ >>> iterable = 'abc'
+ >>> for part in partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['a', 'b', 'c']
+
+ This is unrelated to :func:`partition`.
+
+ """
+ sequence = list(iterable)
+ n = len(sequence)
+ for i in powerset(range(1, n)):
+ yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
+
+
+def set_partitions(iterable, k=None):
+ """
+ Yield the set partitions of *iterable* into *k* parts. Set partitions are
+ not order-preserving.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable, 2):
+ ... print([''.join(p) for p in part])
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+
+
+ If *k* is not given, every set partition is generated.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+ ['a', 'b', 'c']
+
+ """
+ L = list(iterable)
+ n = len(L)
+ if k is not None:
+ if k < 1:
+ raise ValueError(
+ "Can't partition in a negative or zero number of groups"
+ )
+ elif k > n:
+ return
+
+ def set_partitions_helper(L, k):
+ n = len(L)
+ if k == 1:
+ yield [L]
+ elif n == k:
+ yield [[s] for s in L]
+ else:
+ e, *M = L
+ for p in set_partitions_helper(M, k - 1):
+ yield [[e], *p]
+ for p in set_partitions_helper(M, k):
+ for i in range(len(p)):
+ yield p[:i] + [[e] + p[i]] + p[i + 1 :]
+
+ if k is None:
+ for k in range(1, n + 1):
+ yield from set_partitions_helper(L, k)
+ else:
+ yield from set_partitions_helper(L, k)
+
+
+class time_limited:
+ """
+ Yield items from *iterable* until *limit_seconds* have passed.
+ If the time limit expires before all items have been yielded, the
+ ``timed_out`` parameter will be set to ``True``.
+
+ >>> from time import sleep
+ >>> def generator():
+ ... yield 1
+ ... yield 2
+ ... sleep(0.2)
+ ... yield 3
+ >>> iterable = time_limited(0.1, generator())
+ >>> list(iterable)
+ [1, 2]
+ >>> iterable.timed_out
+ True
+
+ Note that the time is checked before each item is yielded, and iteration
+ stops if the time elapsed is greater than *limit_seconds*. If your time
+ limit is 1 second, but it takes 2 seconds to generate the first item from
+ the iterable, the function will run for 2 seconds and not yield anything.
+
+ """
+
+ def __init__(self, limit_seconds, iterable):
+ if limit_seconds < 0:
+ raise ValueError('limit_seconds must be positive')
+ self.limit_seconds = limit_seconds
+ self._iterable = iter(iterable)
+ self._start_time = monotonic()
+ self.timed_out = False
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._iterable)
+ if monotonic() - self._start_time > self.limit_seconds:
+ self.timed_out = True
+ raise StopIteration
+
+ return item
+
+
+def only(iterable, default=None, too_long=None):
+ """If *iterable* has only one item, return it.
+ If it has zero items, return *default*.
+ If it has more than one item, raise the exception given by *too_long*,
+ which is ``ValueError`` by default.
+
+ >>> only([], default='missing')
+ 'missing'
+ >>> only([1])
+ 1
+ >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 1, 2,
+ and perhaps more.'
+ >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError
+
+ Note that :func:`only` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check
+ iterable contents less destructively.
+ """
+ it = iter(iterable)
+ first_value = next(it, default)
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def ichunked(iterable, n):
+ """Break *iterable* into sub-iterables with *n* elements each.
+ :func:`ichunked` is like :func:`chunked`, but it yields iterables
+ instead of lists.
+
+ If the sub-iterables are read in order, the elements of *iterable*
+ won't be stored in memory.
+ If they are read out of order, :func:`itertools.tee` is used to cache
+ elements as necessary.
+
+ >>> from itertools import count
+ >>> all_chunks = ichunked(count(), 4)
+ >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
+ >>> list(c_2) # c_1's elements have been cached; c_3's haven't been
+ [4, 5, 6, 7]
+ >>> list(c_1)
+ [0, 1, 2, 3]
+ >>> list(c_3)
+ [8, 9, 10, 11]
+
+ """
+ source = iter(iterable)
+
+ while True:
+ # Check to see whether we're at the end of the source iterable
+ item = next(source, _marker)
+ if item is _marker:
+ return
+
+ # Clone the source and yield an n-length slice
+ source, it = tee(chain([item], source))
+ yield islice(it, n)
+
+ # Advance the source iterable
+ consume(source, n)
+
+
+def distinct_combinations(iterable, r):
+ """Yield the distinct combinations of *r* items taken from *iterable*.
+
+ >>> list(distinct_combinations([0, 0, 1], 2))
+ [(0, 0), (0, 1)]
+
+ Equivalent to ``set(combinations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ """
+ if r < 0:
+ raise ValueError('r must be non-negative')
+ elif r == 0:
+ yield ()
+ return
+ pool = tuple(iterable)
+ generators = [unique_everseen(enumerate(pool), key=itemgetter(1))]
+ current_combo = [None] * r
+ level = 0
+ while generators:
+ try:
+ cur_idx, p = next(generators[-1])
+ except StopIteration:
+ generators.pop()
+ level -= 1
+ continue
+ current_combo[level] = p
+ if level + 1 == r:
+ yield tuple(current_combo)
+ else:
+ generators.append(
+ unique_everseen(
+ enumerate(pool[cur_idx + 1 :], cur_idx + 1),
+ key=itemgetter(1),
+ )
+ )
+ level += 1
+
+
+def filter_except(validator, iterable, *exceptions):
+ """Yield the items from *iterable* for which the *validator* function does
+ not raise one of the specified *exceptions*.
+
+ *validator* is called for each item in *iterable*.
+ It should be a function that accepts one argument and raises an exception
+ if that item is not valid.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(filter_except(int, iterable, ValueError, TypeError))
+ ['1', '2', '4']
+
+ If an exception other than one given by *exceptions* is raised by
+ *validator*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ validator(item)
+ except exceptions:
+ pass
+ else:
+ yield item
+
+
+def map_except(function, iterable, *exceptions):
+ """Transform each item from *iterable* with *function* and yield the
+ result, unless *function* raises one of the specified *exceptions*.
+
+ *function* is called to transform each item in *iterable*.
+ It should accept one argument.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(map_except(int, iterable, ValueError, TypeError))
+ [1, 2, 4]
+
+ If an exception other than one given by *exceptions* is raised by
+ *function*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ yield function(item)
+ except exceptions:
+ pass
+
+
+def map_if(iterable, pred, func, func_else=lambda x: x):
+ """Evaluate each item from *iterable* using *pred*. If the result is
+ equivalent to ``True``, transform the item with *func* and yield it.
+ Otherwise, transform the item with *func_else* and yield it.
+
+ *pred*, *func*, and *func_else* should each be functions that accept
+ one argument. By default, *func_else* is the identity function.
+
+ >>> from math import sqrt
+ >>> iterable = list(range(-5, 5))
+ >>> iterable
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
+ >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig'))
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
+ >>> list(map_if(iterable, lambda x: x >= 0,
+ ... lambda x: f'{sqrt(x):.2f}', lambda x: None))
+ [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']
+ """
+ for item in iterable:
+ yield func(item) if pred(item) else func_else(item)
+
+
+def _sample_unweighted(iterable, k):
+ # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
+ # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
+
+ # Fill up the reservoir (collection of samples) with the first `k` samples
+ reservoir = take(k, iterable)
+
+ # Generate random number that's the largest in a sample of k U(0,1) numbers
+ # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
+ W = exp(log(random()) / k)
+
+ # The number of elements to skip before changing the reservoir is a random
+ # number with a geometric distribution. Sample it using random() and logs.
+ next_index = k + floor(log(random()) / log(1 - W))
+
+ for index, element in enumerate(iterable, k):
+
+ if index == next_index:
+ reservoir[randrange(k)] = element
+ # The new W is the largest in a sample of k U(0, `old_W`) numbers
+ W *= exp(log(random()) / k)
+ next_index += floor(log(random()) / log(1 - W)) + 1
+
+ return reservoir
+
+
+def _sample_weighted(iterable, k, weights):
+ # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
+ # "Weighted random sampling with a reservoir".
+
+ # Log-transform for numerical stability for weights that are small/large
+ weight_keys = (log(random()) / weight for weight in weights)
+
+ # Fill up the reservoir (collection of samples) with the first `k`
+ # weight-keys and elements, then heapify the list.
+ reservoir = take(k, zip(weight_keys, iterable))
+ heapify(reservoir)
+
+ # The number of jumps before changing the reservoir is a random variable
+ # with an exponential distribution. Sample it using random() and logs.
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+
+ for weight, element in zip(weights, iterable):
+ if weight >= weights_to_skip:
+ # The notation here is consistent with the paper, but we store
+ # the weight-keys in log-space for better numerical stability.
+ smallest_weight_key, _ = reservoir[0]
+ t_w = exp(weight * smallest_weight_key)
+ r_2 = uniform(t_w, 1) # generate U(t_w, 1)
+ weight_key = log(r_2) / weight
+ heapreplace(reservoir, (weight_key, element))
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+ else:
+ weights_to_skip -= weight
+
+ # Equivalent to [element for weight_key, element in sorted(reservoir)]
+ return [heappop(reservoir)[1] for _ in range(k)]
+
+
+def sample(iterable, k, weights=None):
+ """Return a *k*-length list of elements chosen (without replacement)
+ from the *iterable*. Like :func:`random.sample`, but works on iterables
+ of unknown length.
+
+ >>> iterable = range(100)
+ >>> sample(iterable, 5) # doctest: +SKIP
+ [81, 60, 96, 16, 4]
+
+ An iterable with *weights* may also be given:
+
+ >>> iterable = range(100)
+ >>> weights = (i * i + 1 for i in range(100))
+ >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
+ [79, 67, 74, 66, 78]
+
+ The algorithm can also be used to generate weighted random permutations.
+ The relative weight of each item determines the probability that it
+ appears late in the permutation.
+
+ >>> data = "abcdefgh"
+ >>> weights = range(1, len(data) + 1)
+ >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
+ ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
+ """
+ if k == 0:
+ return []
+
+ iterable = iter(iterable)
+ if weights is None:
+ return _sample_unweighted(iterable, k)
+ else:
+ weights = iter(weights)
+ return _sample_weighted(iterable, k, weights)
+
+
+def is_sorted(iterable, key=None, reverse=False, strict=False):
+ """Returns ``True`` if the items of iterable are in sorted order, and
+ ``False`` otherwise. *key* and *reverse* have the same meaning that they do
+ in the built-in :func:`sorted` function.
+
+ >>> is_sorted(['1', '2', '3', '4', '5'], key=int)
+ True
+ >>> is_sorted([5, 4, 3, 1, 2], reverse=True)
+ False
+
+ If *strict*, tests for strict sorting, that is, returns ``False`` if equal
+ elements are found:
+
+ >>> is_sorted([1, 2, 2])
+ True
+ >>> is_sorted([1, 2, 2], strict=True)
+ False
+
+ The function returns ``False`` after encountering the first out-of-order
+ item. If there are no out-of-order items, the iterable is exhausted.
+ """
+
+ compare = (le if reverse else ge) if strict else (lt if reverse else gt)
+ it = iterable if key is None else map(key, iterable)
+ return not any(starmap(compare, pairwise(it)))
+
+
+class AbortThread(BaseException):
+ pass
+
+
+class callback_iter:
+ """Convert a function that uses callbacks to an iterator.
+
+ Let *func* be a function that takes a `callback` keyword argument.
+ For example:
+
+ >>> def func(callback=None):
+ ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]:
+ ... if callback:
+ ... callback(i, c)
+ ... return 4
+
+
+ Use ``with callback_iter(func)`` to get an iterator over the parameters
+ that are delivered to the callback.
+
+ >>> with callback_iter(func) as it:
+ ... for args, kwargs in it:
+ ... print(args)
+ (1, 'a')
+ (2, 'b')
+ (3, 'c')
+
+ The function will be called in a background thread. The ``done`` property
+ indicates whether it has completed execution.
+
+ >>> it.done
+ True
+
+ If it completes successfully, its return value will be available
+ in the ``result`` property.
+
+ >>> it.result
+ 4
+
+ Notes:
+
+ * If the function uses some keyword argument besides ``callback``, supply
+ *callback_kwd*.
+ * If it finished executing, but raised an exception, accessing the
+ ``result`` property will raise the same exception.
+ * If it hasn't finished executing, accessing the ``result``
+ property from within the ``with`` block will raise ``RuntimeError``.
+ * If it hasn't finished executing, accessing the ``result`` property from
+ outside the ``with`` block will raise a
+ ``more_itertools.AbortThread`` exception.
+ * Provide *wait_seconds* to adjust how frequently the it is polled for
+ output.
+
+ """
+
+ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
+ self._func = func
+ self._callback_kwd = callback_kwd
+ self._aborted = False
+ self._future = None
+ self._wait_seconds = wait_seconds
+ self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1)
+ self._iterator = self._reader()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._aborted = True
+ self._executor.shutdown()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterator)
+
+ @property
+ def done(self):
+ if self._future is None:
+ return False
+ return self._future.done()
+
+ @property
+ def result(self):
+ if not self.done:
+ raise RuntimeError('Function has not yet completed')
+
+ return self._future.result()
+
+ def _reader(self):
+ q = Queue()
+
+ def callback(*args, **kwargs):
+ if self._aborted:
+ raise AbortThread('canceled by user')
+
+ q.put((args, kwargs))
+
+ self._future = self._executor.submit(
+ self._func, **{self._callback_kwd: callback}
+ )
+
+ while True:
+ try:
+ item = q.get(timeout=self._wait_seconds)
+ except Empty:
+ pass
+ else:
+ q.task_done()
+ yield item
+
+ if self._future.done():
+ break
+
+ remaining = []
+ while True:
+ try:
+ item = q.get_nowait()
+ except Empty:
+ break
+ else:
+ q.task_done()
+ remaining.append(item)
+ q.join()
+ yield from remaining
+
+
+def windowed_complete(iterable, n):
+ """
+ Yield ``(beginning, middle, end)`` tuples, where:
+
+ * Each ``middle`` has *n* items from *iterable*
+ * Each ``beginning`` has the items before the ones in ``middle``
+ * Each ``end`` has the items after the ones in ``middle``
+
+ >>> iterable = range(7)
+ >>> n = 3
+ >>> for beginning, middle, end in windowed_complete(iterable, n):
+ ... print(beginning, middle, end)
+ () (0, 1, 2) (3, 4, 5, 6)
+ (0,) (1, 2, 3) (4, 5, 6)
+ (0, 1) (2, 3, 4) (5, 6)
+ (0, 1, 2) (3, 4, 5) (6,)
+ (0, 1, 2, 3) (4, 5, 6) ()
+
+ Note that *n* must be at least 0 and most equal to the length of
+ *iterable*.
+
+ This function will exhaust the iterable and may require significant
+ storage.
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+
+ seq = tuple(iterable)
+ size = len(seq)
+
+ if n > size:
+ raise ValueError('n must be <= len(seq)')
+
+ for i in range(size - n + 1):
+ beginning = seq[:i]
+ middle = seq[i : i + n]
+ end = seq[i + n :]
+ yield beginning, middle, end
+
+
+def all_unique(iterable, key=None):
+ """
+ Returns ``True`` if all the elements of *iterable* are unique (no two
+ elements are equal).
+
+ >>> all_unique('ABCB')
+ False
+
+ If a *key* function is specified, it will be used to make comparisons.
+
+ >>> all_unique('ABCb')
+ True
+ >>> all_unique('ABCb', str.lower)
+ False
+
+ The function returns as soon as the first non-unique element is
+ encountered. Iterables with a mix of hashable and unhashable items can
+ be used, but the function will be slower for unhashable items.
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ for element in map(key, iterable) if key else iterable:
+ try:
+ if element in seenset:
+ return False
+ seenset_add(element)
+ except TypeError:
+ if element in seenlist:
+ return False
+ seenlist_add(element)
+ return True
+
+
+def nth_product(index, *args):
+ """Equivalent to ``list(product(*args))[index]``.
+
+ The products of *args* can be ordered lexicographically.
+ :func:`nth_product` computes the product at sort position *index* without
+ computing the previous products.
+
+ >>> nth_product(8, range(2), range(2), range(2), range(2))
+ (1, 0, 0, 0)
+
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pools = list(map(tuple, reversed(args)))
+ ns = list(map(len, pools))
+
+ c = reduce(mul, ns)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ result = []
+ for pool, n in zip(pools, ns):
+ result.append(pool[index % n])
+ index //= n
+
+ return tuple(reversed(result))
+
+
+def nth_permutation(iterable, r, index):
+ """Equivalent to ``list(permutations(iterable, r))[index]```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`nth_permutation`
+ computes the subsequence at sort position *index* directly, without
+ computing the previous subsequences.
+
+ >>> nth_permutation('ghijk', 2, 5)
+ ('h', 'i')
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = list(iterable)
+ n = len(pool)
+
+ if r is None or r == n:
+ r, c = n, factorial(n)
+ elif not 0 <= r < n:
+ raise ValueError
+ else:
+ c = factorial(n) // factorial(n - r)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ if c == 0:
+ return tuple()
+
+ result = [0] * r
+ q = index * factorial(n) // c if r < n else index
+ for d in range(1, n + 1):
+ q, i = divmod(q, d)
+ if 0 <= n - d < r:
+ result[n - d] = i
+ if q == 0:
+ break
+
+ return tuple(map(pool.pop, result))
+
+
+def value_chain(*args):
+ """Yield all arguments passed to the function in the same order in which
+ they were passed. If an argument itself is iterable then iterate over its
+ values.
+
+ >>> list(value_chain(1, 2, 3, [4, 5, 6]))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and are emitted
+ as-is:
+
+ >>> list(value_chain('12', '34', ['56', '78']))
+ ['12', '34', '56', '78']
+
+
+ Multiple levels of nesting are not flattened.
+
+ """
+ for value in args:
+ if isinstance(value, (str, bytes)):
+ yield value
+ continue
+ try:
+ yield from value
+ except TypeError:
+ yield value
+
+
+def product_index(element, *args):
+ """Equivalent to ``list(product(*args)).index(element)``
+
+ The products of *args* can be ordered lexicographically.
+ :func:`product_index` computes the first index of *element* without
+ computing the previous products.
+
+ >>> product_index([8, 2], range(10), range(5))
+ 42
+
+ ``ValueError`` will be raised if the given *element* isn't in the product
+ of *args*.
+ """
+ index = 0
+
+ for x, pool in zip_longest(element, args, fillvalue=_marker):
+ if x is _marker or pool is _marker:
+ raise ValueError('element is not a product of args')
+
+ pool = tuple(pool)
+ index = index * len(pool) + pool.index(x)
+
+ return index
+
+
+def combination_index(element, iterable):
+ """Equivalent to ``list(combinations(iterable, r)).index(element)``
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`combination_index` computes the index of the
+ first *element*, without computing the previous combinations.
+
+ >>> combination_index('adf', 'abcdefg')
+ 10
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ combinations of *iterable*.
+ """
+ element = enumerate(element)
+ k, y = next(element, (None, None))
+ if k is None:
+ return 0
+
+ indexes = []
+ pool = enumerate(iterable)
+ for n, x in pool:
+ if x == y:
+ indexes.append(n)
+ tmp, y = next(element, (None, None))
+ if tmp is None:
+ break
+ else:
+ k = tmp
+ else:
+ raise ValueError('element is not a combination of iterable')
+
+ n, _ = last(pool, default=(n, None))
+
+ # Python versiosn below 3.8 don't have math.comb
+ index = 1
+ for i, j in enumerate(reversed(indexes), start=1):
+ j = n - j
+ if i <= j:
+ index += factorial(j) // (factorial(i) * factorial(j - i))
+
+ return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+
+
+def permutation_index(element, iterable):
+ """Equivalent to ``list(permutations(iterable, r)).index(element)```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`permutation_index`
+ computes the index of the first *element* directly, without computing
+ the previous permutations.
+
+ >>> permutation_index([1, 3, 2], range(5))
+ 19
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ permutations of *iterable*.
+ """
+ index = 0
+ pool = list(iterable)
+ for i, x in zip(range(len(pool), -1, -1), element):
+ r = pool.index(x)
+ index = index * i + r
+ del pool[r]
+
+ return index
+
+
+class countable:
+ """Wrap *iterable* and keep a count of how many items have been consumed.
+
+ The ``items_seen`` attribute starts at ``0`` and increments as the iterable
+ is consumed:
+
+ >>> iterable = map(str, range(10))
+ >>> it = countable(iterable)
+ >>> it.items_seen
+ 0
+ >>> next(it), next(it)
+ ('0', '1')
+ >>> list(it)
+ ['2', '3', '4', '5', '6', '7', '8', '9']
+ >>> it.items_seen
+ 10
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self.items_seen = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._it)
+ self.items_seen += 1
+
+ return item
+
+
+def chunked_even(iterable, n):
+ """Break *iterable* into lists of approximately length *n*.
+ Items are distributed such the lengths of the lists differ by at most
+ 1 item.
+
+ >>> iterable = [1, 2, 3, 4, 5, 6, 7]
+ >>> n = 3
+ >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2
+ [[1, 2, 3], [4, 5], [6, 7]]
+ >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1
+ [[1, 2, 3], [4, 5, 6], [7]]
+
+ """
+
+ len_method = getattr(iterable, '__len__', None)
+
+ if len_method is None:
+ return _chunked_even_online(iterable, n)
+ else:
+ return _chunked_even_finite(iterable, len_method(), n)
+
+
+def _chunked_even_online(iterable, n):
+ buffer = []
+ maxbuf = n + (n - 2) * (n - 1)
+ for x in iterable:
+ buffer.append(x)
+ if len(buffer) == maxbuf:
+ yield buffer[:n]
+ buffer = buffer[n:]
+ yield from _chunked_even_finite(buffer, len(buffer), n)
+
+
+def _chunked_even_finite(iterable, N, n):
+ if N < 1:
+ return
+
+ # Lists are either size `full_size <= n` or `partial_size = full_size - 1`
+ q, r = divmod(N, n)
+ num_lists = q + (1 if r > 0 else 0)
+ q, r = divmod(N, num_lists)
+ full_size = q + (1 if r > 0 else 0)
+ partial_size = full_size - 1
+ num_full = N - partial_size * num_lists
+ num_partial = num_lists - num_full
+
+ buffer = []
+ iterator = iter(iterable)
+
+ # Yield num_full lists of full_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == full_size:
+ yield buffer
+ buffer = []
+ num_full -= 1
+ if num_full <= 0:
+ break
+
+ # Yield num_partial lists of partial_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == partial_size:
+ yield buffer
+ buffer = []
+ num_partial -= 1
+
+
+def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
+ """A version of :func:`zip` that "broadcasts" any scalar
+ (i.e., non-iterable) items into output tuples.
+
+ >>> iterable_1 = [1, 2, 3]
+ >>> iterable_2 = ['a', 'b', 'c']
+ >>> scalar = '_'
+ >>> list(zip_broadcast(iterable_1, iterable_2, scalar))
+ [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]
+
+ The *scalar_types* keyword argument determines what types are considered
+ scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to
+ treat strings and byte strings as iterable:
+
+ >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None))
+ [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')]
+
+ If the *strict* keyword argument is ``True``, then
+ ``UnequalIterablesError`` will be raised if any of the iterables have
+ different lengthss.
+ """
+
+ def is_scalar(obj):
+ if scalar_types and isinstance(obj, scalar_types):
+ return True
+ try:
+ iter(obj)
+ except TypeError:
+ return True
+ else:
+ return False
+
+ size = len(objects)
+ if not size:
+ return
+
+ iterables, iterable_positions = [], []
+ scalars, scalar_positions = [], []
+ for i, obj in enumerate(objects):
+ if is_scalar(obj):
+ scalars.append(obj)
+ scalar_positions.append(i)
+ else:
+ iterables.append(iter(obj))
+ iterable_positions.append(i)
+
+ if len(scalars) == size:
+ yield tuple(objects)
+ return
+
+ zipper = _zip_equal if strict else zip
+ for item in zipper(*iterables):
+ new_item = [None] * size
+
+ for i, elem in zip(iterable_positions, item):
+ new_item[i] = elem
+
+ for i, elem in zip(scalar_positions, scalars):
+ new_item[i] = elem
+
+ yield tuple(new_item)
+
+
+def unique_in_window(iterable, n, key=None):
+ """Yield the items from *iterable* that haven't been seen recently.
+ *n* is the size of the lookback window.
+
+ >>> iterable = [0, 1, 0, 2, 3, 0]
+ >>> n = 3
+ >>> list(unique_in_window(iterable, n))
+ [0, 1, 2, 3, 0]
+
+ The *key* function, if provided, will be used to determine uniqueness:
+
+ >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower()))
+ ['a', 'b', 'c', 'd', 'a']
+
+ The items in *iterable* must be hashable.
+
+ """
+ if n <= 0:
+ raise ValueError('n must be greater than 0')
+
+ window = deque(maxlen=n)
+ uniques = set()
+ use_key = key is not None
+
+ for item in iterable:
+ k = key(item) if use_key else item
+ if k in uniques:
+ continue
+
+ if len(uniques) == n:
+ uniques.discard(window[0])
+
+ uniques.add(k)
+ window.append(k)
+
+ yield item
+
+
+def duplicates_everseen(iterable, key=None):
+ """Yield duplicate elements after their first appearance.
+
+ >>> list(duplicates_everseen('mississippi'))
+ ['s', 'i', 's', 's', 'i', 'p', 'i']
+ >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
+
+ This function is analagous to :func:`unique_everseen` and is subject to
+ the same performance considerations.
+
+ """
+ seen_set = set()
+ seen_list = []
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seen_set:
+ seen_set.add(k)
+ else:
+ yield element
+ except TypeError:
+ if k not in seen_list:
+ seen_list.append(k)
+ else:
+ yield element
+
+
+def duplicates_justseen(iterable, key=None):
+ """Yields serially-duplicate elements after their first appearance.
+
+ >>> list(duplicates_justseen('mississippi'))
+ ['s', 's', 'p']
+ >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
+
+ This function is analagous to :func:`unique_justseen`.
+
+ """
+ return flatten(
+ map(
+ lambda group_tuple: islice_extended(group_tuple[1])[1:],
+ groupby(iterable, key),
+ )
+ )
+
+
+def minmax(iterable_or_value, *others, key=None, default=_marker):
+ """Returns both the smallest and largest items in an iterable
+ or the largest of two or more arguments.
+
+ >>> minmax([3, 1, 5])
+ (1, 5)
+
+ >>> minmax(4, 2, 6)
+ (2, 6)
+
+ If a *key* function is provided, it will be used to transform the input
+ items for comparison.
+
+ >>> minmax([5, 30], key=str) # '30' sorts before '5'
+ (30, 5)
+
+ If a *default* value is provided, it will be returned if there are no
+ input items.
+
+ >>> minmax([], default=(0, 0))
+ (0, 0)
+
+ Otherwise ``ValueError`` is raised.
+
+ This function is based on the
+ `recipe <http://code.activestate.com/recipes/577916/>`__ by
+ Raymond Hettinger and takes care to minimize the number of comparisons
+ performed.
+ """
+ iterable = (iterable_or_value, *others) if others else iterable_or_value
+
+ it = iter(iterable)
+
+ try:
+ lo = hi = next(it)
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ '`minmax()` argument is an empty iterable. '
+ 'Provide a `default` value to suppress this error.'
+ ) from e
+ return default
+
+ # Different branches depending on the presence of key. This saves a lot
+ # of unimportant copies which would slow the "key=None" branch
+ # significantly down.
+ if key is None:
+ for x, y in zip_longest(it, it, fillvalue=lo):
+ if y < x:
+ x, y = y, x
+ if x < lo:
+ lo = x
+ if hi < y:
+ hi = y
+
+ else:
+ lo_key = hi_key = key(lo)
+
+ for x, y in zip_longest(it, it, fillvalue=lo):
+
+ x_key, y_key = key(x), key(y)
+
+ if y_key < x_key:
+ x, y, x_key, y_key = y, x, y_key, x_key
+ if x_key < lo_key:
+ lo, lo_key = x, x_key
+ if hi_key < y_key:
+ hi, hi_key = y, y_key
+
+ return lo, hi
diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py
new file mode 100644
index 0000000..a259642
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools/recipes.py
@@ -0,0 +1,698 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+import warnings
+from collections import deque
+from itertools import (
+ chain,
+ combinations,
+ count,
+ cycle,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ tee,
+ zip_longest,
+)
+import operator
+from random import randrange, sample, choice
+
+__all__ = [
+ 'all_equal',
+ 'before_and_after',
+ 'consume',
+ 'convolve',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pad_none',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'sliding_window',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'triplewise',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+
+ If there are fewer than *n* items in the iterable, all of them are
+ returned.
+
+ >>> take(10, range(3))
+ [0, 1, 2]
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def pad_none(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, pad_none(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+padnone = pad_none
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def _pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ yield from zip(a, b)
+
+
+try:
+ from itertools import pairwise as itertools_pairwise
+except ImportError:
+ pairwise = _pairwise
+else:
+
+ def pairwise(iterable):
+ yield from itertools_pairwise(iterable)
+
+ pairwise.__doc__ = _pairwise.__doc__
+
+
+def grouper(iterable, n, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper('ABCDEFG', 3, 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ if isinstance(iterable, int):
+ warnings.warn(
+ "grouper expects iterable as first parameter", DeprecationWarning
+ )
+ n, iterable = iterable, n
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ If *pred* is None, :func:`bool` is used.
+
+ >>> iterable = [0, 1, False, True, '', ' ']
+ >>> false_items, true_items = partition(None, iterable)
+ >>> list(false_items), list(true_items)
+ ([0, False, ''], [1, True, ' '])
+
+ """
+ if pred is None:
+ pred = bool
+
+ evaluations = ((pred(x), x) for x in iterable)
+ t1, t2 = tee(evaluations)
+ return (
+ (x for (cond, x) in t1 if not cond),
+ (x for (cond, x) in t2 if cond),
+ )
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1, 2, 3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ :func:`powerset` will operate on iterables that aren't :class:`set`
+ instances, so repeated elements in the input will produce repeated elements
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
+ duplicates:
+
+ >>> seq = [1, 1, 0]
+ >>> list(powerset(seq))
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
+ >>> from more_itertools import unique_everseen
+ >>> list(powerset(unique_everseen(seq)))
+ [(), (1,), (0,), (1, 0)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ Remember that ``list`` objects are unhashable - you can use the *key*
+ parameter to transform the list to a tuple (which is hashable) to
+ avoid a slowdown.
+
+ >>> iterable = ([1, 2], [2, 3], [1, 2])
+ >>> list(unique_everseen(iterable)) # Slow
+ [[1, 2], [2, 3]]
+ >>> list(unique_everseen(iterable, key=tuple)) # Faster
+ [[1, 2], [2, 3]]
+
+ Similary, you may want to convert unhashable ``set`` objects with
+ ``key=frozenset``. For ``dict`` objects,
+ ``key=lambda x: frozenset(x.items())`` can be used.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ Multiple exceptions can be specified as a stopping condition:
+
+ >>> l = [1, 2, 3, '...', 4, 5, 6]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [7, 6, 5]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [4, 3, 2]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ []
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=None, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, repeat=1):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * repeat
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ >>> nth_combination(range(5), 3, 5)
+ (0, 3, 4)
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`
+ or :func:`value_chain`.
+
+ """
+ return chain([value], iterator)
+
+
+def convolve(signal, kernel):
+ """Convolve the iterable *signal* with the iterable *kernel*.
+
+ >>> signal = (1, 2, 3, 4, 5)
+ >>> kernel = [3, 2, 1]
+ >>> list(convolve(signal, kernel))
+ [3, 8, 14, 20, 26, 14, 5]
+
+ Note: the input arguments are not interchangeable, as the *kernel*
+ is immediately consumed and stored.
+
+ """
+ kernel = tuple(kernel)[::-1]
+ n = len(kernel)
+ window = deque([0], maxlen=n) * n
+ for x in chain(signal, repeat(0, n - 1)):
+ window.append(x)
+ yield sum(map(operator.mul, kernel, window))
+
+
+def before_and_after(predicate, it):
+ """A variant of :func:`takewhile` that allows complete access to the
+ remainder of the iterator.
+
+ >>> it = iter('ABCdEfGhI')
+ >>> all_upper, remainder = before_and_after(str.isupper, it)
+ >>> ''.join(all_upper)
+ 'ABC'
+ >>> ''.join(remainder) # takewhile() would lose the 'd'
+ 'dEfGhI'
+
+ Note that the first iterator must be fully consumed before the second
+ iterator can generate valid results.
+ """
+ it = iter(it)
+ transition = []
+
+ def true_iterator():
+ for elem in it:
+ if predicate(elem):
+ yield elem
+ else:
+ transition.append(elem)
+ return
+
+ def remainder_iterator():
+ yield from transition
+ yield from it
+
+ return true_iterator(), remainder_iterator()
+
+
+def triplewise(iterable):
+ """Return overlapping triplets from *iterable*.
+
+ >>> list(triplewise('ABCDE'))
+ [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
+
+ """
+ for (a, _), (b, c) in pairwise(pairwise(iterable)):
+ yield a, b, c
+
+
+def sliding_window(iterable, n):
+ """Return a sliding window of width *n* over *iterable*.
+
+ >>> list(sliding_window(range(6), 4))
+ [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
+
+ If *iterable* has fewer than *n* items, then nothing is yielded:
+
+ >>> list(sliding_window(range(3), 4))
+ []
+
+ For a variant with more features, see :func:`windowed`.
+ """
+ it = iter(iterable)
+ window = deque(islice(it, n), maxlen=n)
+ if len(window) == n:
+ yield tuple(window)
+ for x in it:
+ window.append(x)
+ yield tuple(window)
diff --git a/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt b/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt
new file mode 100644
index 0000000..748809f
--- /dev/null
+++ b/pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+packaging
diff --git a/pkg_resources/_vendor/packaging/__about__.py b/pkg_resources/_vendor/packaging/__about__.py
index 95d330e..3551bc2 100644
--- a/pkg_resources/_vendor/packaging/__about__.py
+++ b/pkg_resources/_vendor/packaging/__about__.py
@@ -1,21 +1,26 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "16.8"
+__version__ = "21.3"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
-__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = "2014-2019 %s" % __author__
diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py
index 5ee6220..3c50c5d 100644
--- a/pkg_resources/_vendor/packaging/__init__.py
+++ b/pkg_resources/_vendor/packaging/__init__.py
@@ -1,14 +1,25 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
from .__about__ import (
- __author__, __copyright__, __email__, __license__, __summary__, __title__,
- __uri__, __version__
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
)
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
diff --git a/pkg_resources/_vendor/packaging/_compat.py b/pkg_resources/_vendor/packaging/_compat.py
deleted file mode 100644
index 210bb80..0000000
--- a/pkg_resources/_vendor/packaging/_compat.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-
-# flake8: noqa
-
-if PY3:
- string_types = str,
-else:
- string_types = basestring,
-
-
-def with_metaclass(meta, *bases):
- """
- Create a base class with a metaclass.
- """
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/pkg_resources/_vendor/packaging/_manylinux.py b/pkg_resources/_vendor/packaging/_manylinux.py
new file mode 100644
index 0000000..4c379aa
--- /dev/null
+++ b/pkg_resources/_vendor/packaging/_manylinux.py
@@ -0,0 +1,301 @@
+import collections
+import functools
+import os
+import re
+import struct
+import sys
+import warnings
+from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader:
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+ class _InvalidELFFileHeader(ValueError):
+ """
+ An invalid ELF file header was found.
+ """
+
+ ELF_MAGIC_NUMBER = 0x7F454C46
+ ELFCLASS32 = 1
+ ELFCLASS64 = 2
+ ELFDATA2LSB = 1
+ ELFDATA2MSB = 2
+ EM_386 = 3
+ EM_S390 = 22
+ EM_ARM = 40
+ EM_X86_64 = 62
+ EF_ARM_ABIMASK = 0xFF000000
+ EF_ARM_ABI_VER5 = 0x05000000
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+ def __init__(self, file: IO[bytes]) -> None:
+ def unpack(fmt: str) -> int:
+ try:
+ data = file.read(struct.calcsize(fmt))
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
+ except struct.error:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ return result[0]
+
+ self.e_ident_magic = unpack(">I")
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_class = unpack("B")
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_data = unpack("B")
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_version = unpack("B")
+ self.e_ident_osabi = unpack("B")
+ self.e_ident_abiversion = unpack("B")
+ self.e_ident_pad = file.read(7)
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+ self.e_type = unpack(format_h)
+ self.e_machine = unpack(format_h)
+ self.e_version = unpack(format_i)
+ self.e_entry = unpack(format_p)
+ self.e_phoff = unpack(format_p)
+ self.e_shoff = unpack(format_p)
+ self.e_flags = unpack(format_i)
+ self.e_ehsize = unpack(format_h)
+ self.e_phentsize = unpack(format_h)
+ self.e_phnum = unpack(format_h)
+ self.e_shentsize = unpack(format_h)
+ self.e_shnum = unpack(format_h)
+ self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header() -> Optional[_ELFFileHeader]:
+ try:
+ with open(sys.executable, "rb") as f:
+ elf_header = _ELFFileHeader(f)
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+ return None
+ return elf_header
+
+
+def _is_linux_armhf() -> bool:
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_ARM
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+ ) == elf_header.EF_ARM_ABI_VER5
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+ return result
+
+
+def _is_linux_i686() -> bool:
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_386
+ return result
+
+
+def _have_compatible_abi(arch: str) -> bool:
+ if arch == "armv7l":
+ return _is_linux_armhf()
+ if arch == "i686":
+ return _is_linux_i686()
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
+ assert version_string is not None
+ _, version = version_string.split()
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+ """Returns glibc version string, or None if not using glibc."""
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+ """Parse glibc version.
+
+ We use a regexp instead of str.split because we want to discard any
+ random junk that might come after the minor version -- this might happen
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ uses version strings like "2.20-2014.11"). See gh-3588.
+ """
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ "Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str,
+ RuntimeWarning,
+ )
+ return -1, -1
+ return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return (-1, -1)
+ return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+ sys_glibc = _get_glibc_version()
+ if sys_glibc < version:
+ return False
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux # noqa
+ except ImportError:
+ return True
+ if hasattr(_manylinux, "manylinux_compatible"):
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+ if result is not None:
+ return bool(result)
+ return True
+ if version == _GLibCVersion(2, 5):
+ if hasattr(_manylinux, "manylinux1_compatible"):
+ return bool(_manylinux.manylinux1_compatible)
+ if version == _GLibCVersion(2, 12):
+ if hasattr(_manylinux, "manylinux2010_compatible"):
+ return bool(_manylinux.manylinux2010_compatible)
+ if version == _GLibCVersion(2, 17):
+ if hasattr(_manylinux, "manylinux2014_compatible"):
+ return bool(_manylinux.manylinux2014_compatible)
+ return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
+ (2, 17): "manylinux2014",
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
+ (2, 12): "manylinux2010",
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
+ (2, 5): "manylinux1",
+}
+
+
+def platform_tags(linux: str, arch: str) -> Iterator[str]:
+ if not _have_compatible_abi(arch):
+ return
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
+ too_old_glibc2 = _GLibCVersion(2, 16)
+ if arch in {"x86_64", "i686"}:
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
+ too_old_glibc2 = _GLibCVersion(2, 4)
+ current_glibc = _GLibCVersion(*_get_glibc_version())
+ glibc_max_list = [current_glibc]
+ # We can assume compatibility across glibc major versions.
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+ #
+ # Build a list of maximum glibc versions so that we can
+ # output the canonical list of all glibc from current_glibc
+ # down to too_old_glibc2, including all intermediary versions.
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(tag, arch, glibc_version):
+ yield linux.replace("linux", tag)
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(legacy_tag, arch, glibc_version):
+ yield linux.replace("linux", legacy_tag)
diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py
new file mode 100644
index 0000000..8ac3059
--- /dev/null
+++ b/pkg_resources/_vendor/packaging/_musllinux.py
@@ -0,0 +1,136 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import contextlib
+import functools
+import operator
+import os
+import re
+import struct
+import subprocess
+import sys
+from typing import IO, Iterator, NamedTuple, Optional, Tuple
+
+
+def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
+ return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+
+
+def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
+ """Detect musl libc location by parsing the Python executable.
+
+ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+ """
+ f.seek(0)
+ try:
+ ident = _read_unpacked(f, "16B")
+ except struct.error:
+ return None
+ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
+ return None
+ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
+
+ try:
+ # e_fmt: Format for program header.
+ # p_fmt: Format for section header.
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+ e_fmt, p_fmt, p_idx = {
+ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
+ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
+ }[ident[4]]
+ except KeyError:
+ return None
+ else:
+ p_get = operator.itemgetter(*p_idx)
+
+ # Find the interpreter section and return its content.
+ try:
+ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
+ except struct.error:
+ return None
+ for i in range(e_phnum + 1):
+ f.seek(e_phoff + e_phentsize * i)
+ try:
+ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
+ except struct.error:
+ return None
+ if p_type != 3: # Not PT_INTERP.
+ continue
+ f.seek(p_offset)
+ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
+ if "musl" not in interpreter:
+ return None
+ return interpreter
+ return None
+
+
+class _MuslVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+ if len(lines) < 2 or lines[0][:4] != "musl":
+ return None
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+ if not m:
+ return None
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+ """Detect currently-running musl runtime version.
+
+ This is done by checking the specified executable's dynamic linking
+ information, and invoking the loader to parse its output for a version
+ string. If the loader is musl, the output would be something like::
+
+ musl libc (x86_64)
+ Version 1.2.2
+ Dynamic Program Loader
+ """
+ with contextlib.ExitStack() as stack:
+ try:
+ f = stack.enter_context(open(executable, "rb"))
+ except OSError:
+ return None
+ ld = _parse_ld_musl_from_elf(f)
+ if not ld:
+ return None
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+ return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(arch: str) -> Iterator[str]:
+ """Generate musllinux tags compatible to the current platform.
+
+ :param arch: Should be the part of platform tag after the ``linux_``
+ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
+ prerequisite for the current platform to be musllinux-compatible.
+
+ :returns: An iterator of compatible musllinux tags.
+ """
+ sys_musl = _get_musl_version(sys.executable)
+ if sys_musl is None: # Python not dynamically linked against musl.
+ return
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__": # pragma: no cover
+ import sysconfig
+
+ plat = sysconfig.get_platform()
+ assert plat.startswith("linux-"), "not linux"
+
+ print("plat:", plat)
+ print("musl:", _get_musl_version(sys.executable))
+ print("tags:", end=" ")
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+ print(t, end="\n ")
diff --git a/pkg_resources/_vendor/packaging/_structures.py b/pkg_resources/_vendor/packaging/_structures.py
index ccc2786..90a6465 100644
--- a/pkg_resources/_vendor/packaging/_structures.py
+++ b/pkg_resources/_vendor/packaging/_structures.py
@@ -1,68 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
-class Infinity(object):
-
- def __repr__(self):
+class InfinityType:
+ def __repr__(self) -> str:
return "Infinity"
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(repr(self))
- def __lt__(self, other):
+ def __lt__(self, other: object) -> bool:
return False
- def __le__(self, other):
+ def __le__(self, other: object) -> bool:
return False
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
+ def __gt__(self, other: object) -> bool:
return True
- def __ge__(self, other):
+ def __ge__(self, other: object) -> bool:
return True
- def __neg__(self):
+ def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity
-Infinity = Infinity()
+Infinity = InfinityType()
-class NegativeInfinity(object):
- def __repr__(self):
+class NegativeInfinityType:
+ def __repr__(self) -> str:
return "-Infinity"
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(repr(self))
- def __lt__(self, other):
+ def __lt__(self, other: object) -> bool:
return True
- def __le__(self, other):
+ def __le__(self, other: object) -> bool:
return True
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
+ def __gt__(self, other: object) -> bool:
return False
- def __ge__(self, other):
+ def __ge__(self, other: object) -> bool:
return False
- def __neg__(self):
+ def __neg__(self: object) -> InfinityType:
return Infinity
-NegativeInfinity = NegativeInfinity()
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/pkg_resources/_vendor/packaging/markers.py b/pkg_resources/_vendor/packaging/markers.py
index 892e578..18769b0 100644
--- a/pkg_resources/_vendor/packaging/markers.py
+++ b/pkg_resources/_vendor/packaging/markers.py
@@ -1,26 +1,37 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from pkg_resources.extern.pyparsing import ( # noqa: N817
+ Forward,
+ Group,
+ Literal as L,
+ ParseException,
+ ParseResults,
+ QuotedString,
+ ZeroOrMore,
+ stringEnd,
+ stringStart,
+)
-from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
-from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
-from pkg_resources.extern.pyparsing import Literal as L # noqa
-
-from ._compat import string_types
-from .specifiers import Specifier, InvalidSpecifier
-
+from .specifiers import InvalidSpecifier, Specifier
__all__ = [
- "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
- "Marker", "default_environment",
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
]
+Operator = Callable[[str, str], bool]
+
class InvalidMarker(ValueError):
"""
@@ -41,78 +52,67 @@ class UndefinedEnvironmentName(ValueError):
"""
-class Node(object):
-
- def __init__(self, value):
+class Node:
+ def __init__(self, value: Any) -> None:
self.value = value
- def __str__(self):
+ def __str__(self) -> str:
return str(self.value)
- def __repr__(self):
- return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
- def serialize(self):
+ def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
-
- def serialize(self):
+ def serialize(self) -> str:
return str(self)
class Value(Node):
-
- def serialize(self):
- return '"{0}"'.format(self)
+ def serialize(self) -> str:
+ return f'"{self}"'
class Op(Node):
-
- def serialize(self):
+ def serialize(self) -> str:
return str(self)
VARIABLE = (
- L("implementation_version") |
- L("platform_python_implementation") |
- L("implementation_name") |
- L("python_full_version") |
- L("platform_release") |
- L("platform_version") |
- L("platform_machine") |
- L("platform_system") |
- L("python_version") |
- L("sys_platform") |
- L("os_name") |
- L("os.name") | # PEP-345
- L("sys.platform") | # PEP-345
- L("platform.version") | # PEP-345
- L("platform.machine") | # PEP-345
- L("platform.python_implementation") | # PEP-345
- L("python_implementation") | # undocumented setuptools legacy
- L("extra")
+ L("implementation_version")
+ | L("platform_python_implementation")
+ | L("implementation_name")
+ | L("python_full_version")
+ | L("platform_release")
+ | L("platform_version")
+ | L("platform_machine")
+ | L("platform_system")
+ | L("python_version")
+ | L("sys_platform")
+ | L("os_name")
+ | L("os.name") # PEP-345
+ | L("sys.platform") # PEP-345
+ | L("platform.version") # PEP-345
+ | L("platform.machine") # PEP-345
+ | L("platform.python_implementation") # PEP-345
+ | L("python_implementation") # undocumented setuptools legacy
+ | L("extra") # PEP-508
)
ALIASES = {
- 'os.name': 'os_name',
- 'sys.platform': 'sys_platform',
- 'platform.version': 'platform_version',
- 'platform.machine': 'platform_machine',
- 'platform.python_implementation': 'platform_python_implementation',
- 'python_implementation': 'platform_python_implementation'
+ "os.name": "os_name",
+ "sys.platform": "sys_platform",
+ "platform.version": "platform_version",
+ "platform.machine": "platform_machine",
+ "platform.python_implementation": "platform_python_implementation",
+ "python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
- L("===") |
- L("==") |
- L(">=") |
- L("<=") |
- L("!=") |
- L("~=") |
- L(">") |
- L("<")
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
@@ -138,22 +138,28 @@ MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
-def _coerce_parse_result(results):
+def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
-def _format_marker(marker, first=True):
- assert isinstance(marker, (list, tuple, string_types))
+def _format_marker(
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
+) -> str:
+
+ assert isinstance(marker, (list, tuple, str))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
- if (isinstance(marker, list) and len(marker) == 1 and
- isinstance(marker[0], (list, tuple))):
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
return _format_marker(marker[0])
if isinstance(marker, list):
@@ -168,7 +174,7 @@ def _format_marker(marker, first=True):
return marker
-_operators = {
+_operators: Dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
@@ -180,7 +186,7 @@ _operators = {
}
-def _eval_op(lhs, op, rhs):
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
@@ -188,34 +194,36 @@ def _eval_op(lhs, op, rhs):
else:
return spec.contains(lhs)
- oper = _operators.get(op.serialize())
+ oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None:
- raise UndefinedComparison(
- "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
- )
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
return oper(lhs, rhs)
-_undefined = object()
+class Undefined:
+ pass
+
+
+_undefined = Undefined()
-def _get_env(environment, name):
- value = environment.get(name, _undefined)
+def _get_env(environment: Dict[str, str], name: str) -> str:
+ value: Union[str, Undefined] = environment.get(name, _undefined)
- if value is _undefined:
+ if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
- "{0!r} does not exist in evaluation environment.".format(name)
+ f"{name!r} does not exist in evaluation environment."
)
return value
-def _evaluate_markers(markers, environment):
- groups = [[]]
+def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
+ groups: List[List[bool]] = [[]]
for marker in markers:
- assert isinstance(marker, (list, tuple, string_types))
+ assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
@@ -238,22 +246,17 @@ def _evaluate_markers(markers, environment):
return any(all(item) for item in groups)
-def format_full_version(info):
- version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+def format_full_version(info: "sys._version_info") -> str:
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
- if kind != 'final':
+ if kind != "final":
version += kind[0] + str(info.serial)
return version
-def default_environment():
- if hasattr(sys, 'implementation'):
- iver = format_full_version(sys.implementation.version)
- implementation_name = sys.implementation.name
- else:
- iver = '0'
- implementation_name = ''
-
+def default_environment() -> Dict[str, str]:
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
return {
"implementation_name": implementation_name,
"implementation_version": iver,
@@ -264,28 +267,28 @@ def default_environment():
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
- "python_version": platform.python_version()[:3],
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
-class Marker(object):
-
- def __init__(self, marker):
+class Marker:
+ def __init__(self, marker: str) -> None:
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
- err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
- marker, marker[e.loc:e.loc + 8])
- raise InvalidMarker(err_str)
+ raise InvalidMarker(
+ f"Invalid marker: {marker!r}, parse error at "
+ f"{marker[e.loc : e.loc + 8]!r}"
+ )
- def __str__(self):
+ def __str__(self) -> str:
return _format_marker(self._markers)
- def __repr__(self):
- return "<Marker({0!r})>".format(str(self))
+ def __repr__(self) -> str:
+ return f"<Marker('{self}')>"
- def evaluate(self, environment=None):
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py
index 0c8c4a3..6af14ec 100644
--- a/pkg_resources/_vendor/packaging/requirements.py
+++ b/pkg_resources/_vendor/packaging/requirements.py
@@ -1,15 +1,24 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
-import string
import re
-
-from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
-from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
-from pkg_resources.extern.pyparsing import Literal as L # noqa
-from pkg_resources.extern.six.moves.urllib import parse as urlparse
+import string
+import urllib.parse
+from typing import List, Optional as TOptional, Set
+
+from pkg_resources.extern.pyparsing import ( # noqa
+ Combine,
+ Literal as L,
+ Optional,
+ ParseException,
+ Regex,
+ Word,
+ ZeroOrMore,
+ originalTextFor,
+ stringEnd,
+ stringStart,
+)
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
@@ -38,8 +47,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
-URI = Regex(r'[^ ]+')("url")
-URL = (AT + URI)
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
@@ -48,31 +57,34 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
-VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
- joinString=",", adjacent=False)("_raw_spec")
-_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+VERSION_MANY = Combine(
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
- lambda s, l, t: Marker(s[t._original_start:t._original_end])
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
-MARKER_SEPERATOR = SEMICOLON
-MARKER = MARKER_SEPERATOR + MARKER_EXPR
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
-NAMED_REQUIREMENT = \
- NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# pkg_resources.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
-class Requirement(object):
+class Requirement:
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
@@ -85,43 +97,50 @@ class Requirement(object):
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
- def __init__(self, requirement_string):
+ def __init__(self, requirement_string: str) -> None:
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
- "Invalid requirement, parse error at \"{0!r}\"".format(
- requirement_string[e.loc:e.loc + 8]))
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
+ )
- self.name = req.name
+ self.name: str = req.name
if req.url:
- parsed_url = urlparse.urlparse(req.url)
- if not (parsed_url.scheme and parsed_url.netloc) or (
- not parsed_url.scheme and not parsed_url.netloc):
- raise InvalidRequirement("Invalid URL given")
- self.url = req.url
+ parsed_url = urllib.parse.urlparse(req.url)
+ if parsed_url.scheme == "file":
+ if urllib.parse.urlunparse(parsed_url) != req.url:
+ raise InvalidRequirement("Invalid URL given")
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc
+ ):
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
+ self.url: TOptional[str] = req.url
else:
self.url = None
- self.extras = set(req.extras.asList() if req.extras else [])
- self.specifier = SpecifierSet(req.specifier)
- self.marker = req.marker if req.marker else None
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
- def __str__(self):
- parts = [self.name]
+ def __str__(self) -> str:
+ parts: List[str] = [self.name]
if self.extras:
- parts.append("[{0}]".format(",".join(sorted(self.extras))))
+ formatted_extras = ",".join(sorted(self.extras))
+ parts.append(f"[{formatted_extras}]")
if self.specifier:
parts.append(str(self.specifier))
if self.url:
- parts.append("@ {0}".format(self.url))
+ parts.append(f"@ {self.url}")
+ if self.marker:
+ parts.append(" ")
if self.marker:
- parts.append("; {0}".format(self.marker))
+ parts.append(f"; {self.marker}")
return "".join(parts)
- def __repr__(self):
- return "<Requirement({0!r})>".format(str(self))
+ def __repr__(self) -> str:
+ return f"<Requirement('{self}')>"
diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py
index 7f5a76c..0e218a6 100644
--- a/pkg_resources/_vendor/packaging/specifiers.py
+++ b/pkg_resources/_vendor/packaging/specifiers.py
@@ -1,15 +1,33 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
-
-from ._compat import string_types, with_metaclass
-from .version import Version, LegacyVersion, parse
+import warnings
+from typing import (
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from .utils import canonicalize_version
+from .version import LegacyVersion, Version, parse
+
+ParsedVersion = Union[Version, LegacyVersion]
+UnparsedVersion = Union[Version, LegacyVersion, str]
+VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
+CallableOperator = Callable[[ParsedVersion, str], bool]
class InvalidSpecifier(ValueError):
@@ -18,57 +36,51 @@ class InvalidSpecifier(ValueError):
"""
-class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
-
+class BaseSpecifier(metaclass=abc.ABCMeta):
@abc.abstractmethod
- def __str__(self):
+ def __str__(self) -> str:
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
- def __hash__(self):
+ def __hash__(self) -> int:
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
- @abc.abstractmethod
- def __ne__(self, other):
- """
- Returns a boolean representing whether or not the two Specifier like
- objects are not equal.
- """
-
@abc.abstractproperty
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
- def contains(self, item, prereleases=None):
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
- def filter(self, iterable, prereleases=None):
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
@@ -77,14 +89,15 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
class _IndividualSpecifier(BaseSpecifier):
- _operators = {}
+ _operators: Dict[str, str] = {}
+ _regex: Pattern[str]
- def __init__(self, spec="", prereleases=None):
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
match = self._regex.search(spec)
if not match:
- raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
- self._spec = (
+ self._spec: Tuple[str, str] = (
match.group("operator").strip(),
match.group("version").strip(),
)
@@ -92,94 +105,93 @@ class _IndividualSpecifier(BaseSpecifier):
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
- def __repr__(self):
+ def __repr__(self) -> str:
pre = (
- ", prereleases={0!r}".format(self.prereleases)
+ f", prereleases={self.prereleases!r}"
if self._prereleases is not None
else ""
)
- return "<{0}({1!r}{2})>".format(
- self.__class__.__name__,
- str(self),
- pre,
- )
-
- def __str__(self):
- return "{0}{1}".format(*self._spec)
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
- def __hash__(self):
- return hash(self._spec)
+ def __str__(self) -> str:
+ return "{}{}".format(*self._spec)
- def __eq__(self, other):
- if isinstance(other, string_types):
- try:
- other = self.__class__(other)
- except InvalidSpecifier:
- return NotImplemented
- elif not isinstance(other, self.__class__):
- return NotImplemented
+ @property
+ def _canonical_spec(self) -> Tuple[str, str]:
+ return self._spec[0], canonicalize_version(self._spec[1])
- return self._spec == other._spec
+ def __hash__(self) -> int:
+ return hash(self._canonical_spec)
- def __ne__(self, other):
- if isinstance(other, string_types):
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, str):
try:
- other = self.__class__(other)
+ other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
- return self._spec != other._spec
+ return self._canonical_spec == other._canonical_spec
- def _get_operator(self, op):
- return getattr(self, "_compare_{0}".format(self._operators[op]))
+ def _get_operator(self, op: str) -> CallableOperator:
+ operator_callable: CallableOperator = getattr(
+ self, f"_compare_{self._operators[op]}"
+ )
+ return operator_callable
- def _coerce_version(self, version):
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
- def operator(self):
+ def operator(self) -> str:
return self._spec[0]
@property
- def version(self):
+ def version(self) -> str:
return self._spec[1]
@property
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
return self._prereleases
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
- def __contains__(self, item):
+ def __contains__(self, item: str) -> bool:
return self.contains(item)
- def contains(self, item, prereleases=None):
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
- item = self._coerce_version(item)
+ normalized_item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
- if item.is_prerelease and not prereleases:
+ if normalized_item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
- return self._get_operator(self.operator)(item, self.version)
+ operator_callable: CallableOperator = self._get_operator(self.operator)
+ return operator_callable(normalized_item, self.version)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
- def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
@@ -192,13 +204,14 @@ class _IndividualSpecifier(BaseSpecifier):
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
- # prereleases, then we'll store it for later incase nothing
+ # prereleases, then we'll store it for later in case nothing
# else matches this specifier.
- if (parsed_version.is_prerelease and not
- (prereleases or self.prereleases)):
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
- # accepting prereleases from the begining.
+ # accepting prereleases from the beginning.
else:
yielded = True
yield version
@@ -213,8 +226,7 @@ class _IndividualSpecifier(BaseSpecifier):
class LegacySpecifier(_IndividualSpecifier):
- _regex_str = (
- r"""
+ _regex_str = r"""
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
@@ -225,10 +237,8 @@ class LegacySpecifier(_IndividualSpecifier):
# them, and a comma since it's a version separator.
)
"""
- )
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"==": "equal",
@@ -239,43 +249,56 @@ class LegacySpecifier(_IndividualSpecifier):
">": "greater_than",
}
- def _coerce_version(self, version):
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+ super().__init__(spec, prereleases)
+
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
- def _compare_equal(self, prospective, spec):
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective == self._coerce_version(spec)
- def _compare_not_equal(self, prospective, spec):
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective != self._coerce_version(spec)
- def _compare_less_than_equal(self, prospective, spec):
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective <= self._coerce_version(spec)
- def _compare_greater_than_equal(self, prospective, spec):
+ def _compare_greater_than_equal(
+ self, prospective: LegacyVersion, spec: str
+ ) -> bool:
return prospective >= self._coerce_version(spec)
- def _compare_less_than(self, prospective, spec):
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective < self._coerce_version(spec)
- def _compare_greater_than(self, prospective, spec):
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective > self._coerce_version(spec)
-def _require_version_compare(fn):
+def _require_version_compare(
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
+) -> Callable[["Specifier", ParsedVersion, str], bool]:
@functools.wraps(fn)
- def wrapped(self, prospective, spec):
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
+
return wrapped
class Specifier(_IndividualSpecifier):
- _regex_str = (
- r"""
+ _regex_str = r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
@@ -367,10 +390,8 @@ class Specifier(_IndividualSpecifier):
)
)
"""
- )
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"~=": "compatible",
@@ -384,7 +405,8 @@ class Specifier(_IndividualSpecifier):
}
@_require_version_compare
- def _compare_compatible(self, prospective, spec):
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
+
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
@@ -392,76 +414,86 @@ class Specifier(_IndividualSpecifier):
# the other specifiers.
# We want everything but the last item in the version, but we want to
- # ignore post and dev releases and we want to treat the pre-release as
- # it's own separate segment.
+ # ignore suffix segments.
prefix = ".".join(
- list(
- itertools.takewhile(
- lambda x: (not x.startswith("post") and not
- x.startswith("dev")),
- _version_split(spec),
- )
- )[:-1]
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
- return (self._get_operator(">=")(prospective, spec) and
- self._get_operator("==")(prospective, prefix))
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
@_require_version_compare
- def _compare_equal(self, prospective, spec):
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
- spec = _version_split(spec[:-2]) # Remove the trailing .*
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
- prospective = _version_split(str(prospective))
+ split_prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
- prospective = prospective[:len(spec)]
+ shortened_prospective = split_prospective[: len(split_spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
- spec, prospective = _pad_version(spec, prospective)
+ padded_spec, padded_prospective = _pad_version(
+ split_spec, shortened_prospective
+ )
+
+ return padded_prospective == padded_spec
else:
# Convert our spec string into a Version
- spec = Version(spec)
+ spec_version = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
- if not spec.local:
+ if not spec_version.local:
prospective = Version(prospective.public)
- return prospective == spec
+ return prospective == spec_version
@_require_version_compare
- def _compare_not_equal(self, prospective, spec):
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
return not self._compare_equal(prospective, spec)
@_require_version_compare
- def _compare_less_than_equal(self, prospective, spec):
- return prospective <= Version(spec)
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) <= Version(spec)
@_require_version_compare
- def _compare_greater_than_equal(self, prospective, spec):
- return prospective >= Version(spec)
+ def _compare_greater_than_equal(
+ self, prospective: ParsedVersion, spec: str
+ ) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) >= Version(spec)
@_require_version_compare
- def _compare_less_than(self, prospective, spec):
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
@@ -483,10 +515,11 @@ class Specifier(_IndividualSpecifier):
return True
@_require_version_compare
- def _compare_greater_than(self, prospective, spec):
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
@@ -503,7 +536,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is techincally greater than, to match.
+ # in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
@@ -513,11 +546,12 @@ class Specifier(_IndividualSpecifier):
# same version in the spec.
return True
- def _compare_arbitrary(self, prospective, spec):
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
return str(prospective).lower() == str(spec).lower()
@property
- def prereleases(self):
+ def prereleases(self) -> bool:
+
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
@@ -541,15 +575,15 @@ class Specifier(_IndividualSpecifier):
return False
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
-def _version_split(version):
- result = []
+def _version_split(version: str) -> List[str]:
+ result: List[str] = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
@@ -559,7 +593,13 @@ def _version_split(version):
return result
-def _pad_version(left, right):
+def _is_not_suffix(segment: str) -> bool:
+ return not any(
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+ )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
left_split, right_split = [], []
# Get the release segment of our versions
@@ -567,36 +607,29 @@ def _pad_version(left, right):
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
- left_split.append(left[len(left_split[0]):])
- right_split.append(right[len(right_split[0]):])
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
# Insert our padding
- left_split.insert(
- 1,
- ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
- )
- right_split.insert(
- 1,
- ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
- )
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
- return (
- list(itertools.chain(*left_split)),
- list(itertools.chain(*right_split)),
- )
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
class SpecifierSet(BaseSpecifier):
+ def __init__(
+ self, specifiers: str = "", prereleases: Optional[bool] = None
+ ) -> None:
- def __init__(self, specifiers="", prereleases=None):
- # Split on , to break each indidivual specifier into it's own item, and
+ # Split on , to break each individual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
- specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
- parsed = set()
- for specifier in specifiers:
+ parsed: Set[_IndividualSpecifier] = set()
+ for specifier in split_specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
@@ -609,23 +642,23 @@ class SpecifierSet(BaseSpecifier):
# we accept prereleases or not.
self._prereleases = prereleases
- def __repr__(self):
+ def __repr__(self) -> str:
pre = (
- ", prereleases={0!r}".format(self.prereleases)
+ f", prereleases={self.prereleases!r}"
if self._prereleases is not None
else ""
)
- return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+ return f"<SpecifierSet({str(self)!r}{pre})>"
- def __str__(self):
+ def __str__(self) -> str:
return ",".join(sorted(str(s) for s in self._specs))
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self._specs)
- def __and__(self, other):
- if isinstance(other, string_types):
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+ if isinstance(other, str):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
@@ -647,34 +680,23 @@ class SpecifierSet(BaseSpecifier):
return specifier
- def __eq__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, (str, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
- def __ne__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
- other = SpecifierSet(str(other))
- elif not isinstance(other, SpecifierSet):
- return NotImplemented
-
- return self._specs != other._specs
-
- def __len__(self):
+ def __len__(self) -> int:
return len(self._specs)
- def __iter__(self):
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
return iter(self._specs)
@property
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
+
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
@@ -691,13 +713,16 @@ class SpecifierSet(BaseSpecifier):
return any(s.prereleases for s in self._specs)
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
- def __contains__(self, item):
+ def __contains__(self, item: UnparsedVersion) -> bool:
return self.contains(item)
- def contains(self, item, prereleases=None):
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
@@ -721,12 +746,12 @@ class SpecifierSet(BaseSpecifier):
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
- return all(
- s.contains(item, prereleases=prereleases)
- for s in self._specs
- )
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
- def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
@@ -744,8 +769,11 @@ class SpecifierSet(BaseSpecifier):
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
- filtered = []
- found_prereleases = []
+ filtered: List[VersionTypeVar] = []
+ found_prereleases: List[VersionTypeVar] = []
+
+ item: UnparsedVersion
+ parsed_version: Union[Version, LegacyVersion]
for item in iterable:
# Ensure that we some kind of Version class for this item.
diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py
new file mode 100644
index 0000000..9a3d25a
--- /dev/null
+++ b/pkg_resources/_vendor/packaging/tags.py
@@ -0,0 +1,487 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+ Dict,
+ FrozenSet,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag:
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
+
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
+ # times when scanning a page of links for packages with tags matching that
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
+ # downstream consumers.
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+ @property
+ def interpreter(self) -> str:
+ return self._interpreter
+
+ @property
+ def abi(self) -> str:
+ return self._abi
+
+ @property
+ def platform(self) -> str:
+ return self._platform
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Tag):
+ return NotImplemented
+
+ return (
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
+ and (self._platform == other._platform)
+ and (self._abi == other._abi)
+ and (self._interpreter == other._interpreter)
+ )
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __str__(self) -> str:
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+ def __repr__(self) -> str:
+ return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+ value = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
+def _normalize_string(string: str) -> str:
+ return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+ py_version = tuple(py_version) # To allow for version comparison.
+ abis = []
+ version = _version_nodot(py_version[:2])
+ debug = pymalloc = ucs4 = ""
+ with_debug = _get_config_var("Py_DEBUG", warn)
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version < (3, 8):
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append(f"cp{version}")
+ abis.insert(
+ 0,
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+ ),
+ )
+ return abis
+
+
+def cpython_tags(
+ python_version: Optional[PythonVersion] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp<python_version>-<abi>-<platform>
+ - cp<python_version>-abi3-<platform>
+ - cp<python_version>-none-<platform>
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+ if abis is None:
+ if len(python_version) > 1:
+ abis = _cpython_abis(python_version, warn)
+ else:
+ abis = []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError:
+ pass
+
+ platforms = list(platforms or platform_tags())
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+ if _abi3_applies(python_version):
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+ if _abi3_applies(python_version):
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{version}".format(
+ version=_version_nodot((python_version[0], minor_version))
+ )
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> Iterator[str]:
+ abi = sysconfig.get_config_var("SOABI")
+ if abi:
+ yield _normalize_string(abi)
+
+
+def generic_tags(
+ interpreter: Optional[str] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a generic interpreter.
+
+ The tags consist of:
+ - <interpreter>-<abi>-<platform>
+
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = "".join([interp_name, interp_version])
+ if abis is None:
+ abis = _generic_abi()
+ platforms = list(platforms or platform_tags())
+ abis = list(abis)
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+ """
+ Yields Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all previous versions of that major version.
+ """
+ if len(py_version) > 1:
+ yield f"py{_version_nodot(py_version[:2])}"
+ yield f"py{py_version[0]}"
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+ python_version: Optional[PythonVersion] = None,
+ interpreter: Optional[str] = None,
+ platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+ """
+ Yields the sequence of tags that are compatible with a specific version of Python.
+
+ The tags consist of:
+ - py*-none-<platform>
+ - <interpreter>-none-any # ... if `interpreter` is provided.
+ - py*-none-any
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or platform_tags())
+ for version in _py_interpreter_range(python_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ if cpu_arch in {"arm64", "x86_64"}:
+ formats.append("universal2")
+
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+ formats.append("universal")
+
+ return formats
+
+
+def mac_platforms(
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ else:
+ version = version
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+ else:
+ arch = arch
+
+ if (10, 0) <= version and version < (11, 0):
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+ # "minor" version number. The major version was always 10.
+ for minor_version in range(version[1], -1, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=10, minor=minor_version, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Starting with Mac OS 11, each yearly release bumps the major version
+ # number. The minor versions are now the midyear updates.
+ for major_version in range(version[0], 10, -1):
+ compat_version = major_version, 0
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=major_version, minor=0, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+ # releases exist.
+ #
+ # However, the "universal2" binary format can have a
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+ # that version of macOS.
+ if arch == "x86_64":
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+ else:
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_format = "universal2"
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+ linux = _normalize_string(sysconfig.get_platform())
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv7l"
+ _, arch = linux.split("_", 1)
+ yield from _manylinux.platform_tags(linux, arch)
+ yield from _musllinux.platform_tags(arch)
+ yield linux
+
+
+def _generic_platforms() -> Iterator[str]:
+ yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name() -> str:
+ """
+ Returns the name of the running interpreter.
+ """
+ name = sys.implementation.name
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+ """
+ Returns the version of the running interpreter.
+ """
+ version = _get_config_var("py_version_nodot", warn=warn)
+ if version:
+ version = str(version)
+ else:
+ version = _version_nodot(sys.version_info[:2])
+ return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+ return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ yield from cpython_tags(warn=warn)
+ else:
+ yield from generic_tags()
+
+ if interp_name == "pp":
+ yield from compatible_tags(interpreter="pp3")
+ else:
+ yield from compatible_tags()
diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py
index 942387c..bab11b8 100644
--- a/pkg_resources/_vendor/packaging/utils.py
+++ b/pkg_resources/_vendor/packaging/utils.py
@@ -1,14 +1,136 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidWheelFilename(ValueError):
+ """
+ An invalid wheel filename was found, users should refer to PEP 427.
+ """
+
+
+class InvalidSdistFilename(ValueError):
+ """
+ An invalid sdist filename was found, users should refer to the packaging user guide.
+ """
_canonicalize_regex = re.compile(r"[-_.]+")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
-def canonicalize_name(name):
+def canonicalize_name(name: str) -> NormalizedName:
# This is taken from PEP 503.
- return _canonicalize_regex.sub("-", name).lower()
+ value = _canonicalize_regex.sub("-", name).lower()
+ return cast(NormalizedName, value)
+
+
+def canonicalize_version(version: Union[Version, str]) -> str:
+ """
+ This is very similar to Version.__str__, but has one subtle difference
+ with the way it handles the release segment.
+ """
+ if isinstance(version, str):
+ try:
+ parsed = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+ else:
+ parsed = version
+
+ parts = []
+
+ # Epoch
+ if parsed.epoch != 0:
+ parts.append(f"{parsed.epoch}!")
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
+
+ # Pre-release
+ if parsed.pre is not None:
+ parts.append("".join(str(x) for x in parsed.pre))
+
+ # Post-release
+ if parsed.post is not None:
+ parts.append(f".post{parsed.post}")
+
+ # Development release
+ if parsed.dev is not None:
+ parts.append(f".dev{parsed.dev}")
+
+ # Local version segment
+ if parsed.local is not None:
+ parts.append(f"+{parsed.local}")
+
+ return "".join(parts)
+
+
+def parse_wheel_filename(
+ filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+ if not filename.endswith(".whl"):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
+ )
+
+ filename = filename[:-4]
+ dashes = filename.count("-")
+ if dashes not in (4, 5):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (wrong number of parts): {filename}"
+ )
+
+ parts = filename.split("-", dashes - 2)
+ name_part = parts[0]
+ # See PEP 427 for the rules on escaping the project name
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
+ name = canonicalize_name(name_part)
+ version = Version(parts[1])
+ if dashes == 5:
+ build_part = parts[2]
+ build_match = _build_tag_regex.match(build_part)
+ if build_match is None:
+ raise InvalidWheelFilename(
+ f"Invalid build number: {build_part} in '{filename}'"
+ )
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+ else:
+ build = ()
+ tags = parse_tag(parts[-1])
+ return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+ if filename.endswith(".tar.gz"):
+ file_stem = filename[: -len(".tar.gz")]
+ elif filename.endswith(".zip"):
+ file_stem = filename[: -len(".zip")]
+ else:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+ f" {filename}"
+ )
+
+ # We are requiring a PEP 440 version, which cannot contain dashes,
+ # so we split on the last dash.
+ name_part, sep, version_part = file_stem.rpartition("-")
+ if not sep:
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+ name = canonicalize_name(name_part)
+ version = Version(version_part)
+ return (name, version)
diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py
index 83b5ee8..de9a09a 100644
--- a/pkg_resources/_vendor/packaging/version.py
+++ b/pkg_resources/_vendor/packaging/version.py
@@ -1,27 +1,45 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
-
-from ._structures import Infinity
-
-
-__all__ = [
- "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+import warnings
+from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+InfiniteTypes = Union[InfinityType, NegativeInfinityType]
+PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
+SubLocalType = Union[InfiniteTypes, int, str]
+LocalType = Union[
+ NegativeInfinityType,
+ Tuple[
+ Union[
+ SubLocalType,
+ Tuple[SubLocalType, str],
+ Tuple[NegativeInfinityType, SubLocalType],
+ ],
+ ...,
+ ],
+]
+CmpKey = Tuple[
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+]
+LegacyCmpKey = Tuple[int, Tuple[str, ...]]
+VersionComparisonMethod = Callable[
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
]
-
_Version = collections.namedtuple(
- "_Version",
- ["epoch", "release", "dev", "pre", "post", "local"],
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
)
-def parse(version):
+def parse(version: str) -> Union["LegacyVersion", "Version"]:
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
@@ -39,79 +57,126 @@ class InvalidVersion(ValueError):
"""
-class _BaseVersion(object):
+class _BaseVersion:
+ _key: Union[CmpKey, LegacyCmpKey]
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self._key)
- def __lt__(self, other):
- return self._compare(other, lambda s, o: s < o)
+ # Please keep the duplicated `isinstance` check
+ # in the six comparisons hereunder
+ # unless you find a way to avoid adding overhead function calls.
+ def __lt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __le__(self, other):
- return self._compare(other, lambda s, o: s <= o)
+ return self._key < other._key
- def __eq__(self, other):
- return self._compare(other, lambda s, o: s == o)
+ def __le__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __ge__(self, other):
- return self._compare(other, lambda s, o: s >= o)
+ return self._key <= other._key
- def __gt__(self, other):
- return self._compare(other, lambda s, o: s > o)
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __ne__(self, other):
- return self._compare(other, lambda s, o: s != o)
+ return self._key == other._key
- def _compare(self, other, method):
+ def __ge__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
- return method(self._key, other._key)
+ return self._key >= other._key
+ def __gt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
-class LegacyVersion(_BaseVersion):
+ return self._key > other._key
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key != other._key
- def __init__(self, version):
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version: str) -> None:
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
- def __str__(self):
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def __str__(self) -> str:
return self._version
- def __repr__(self):
- return "<LegacyVersion({0})>".format(repr(str(self)))
+ def __repr__(self) -> str:
+ return f"<LegacyVersion('{self}')>"
@property
- def public(self):
+ def public(self) -> str:
return self._version
@property
- def base_version(self):
+ def base_version(self) -> str:
return self._version
@property
- def local(self):
+ def epoch(self) -> int:
+ return -1
+
+ @property
+ def release(self) -> None:
return None
@property
- def is_prerelease(self):
+ def pre(self) -> None:
+ return None
+
+ @property
+ def post(self) -> None:
+ return None
+
+ @property
+ def dev(self) -> None:
+ return None
+
+ @property
+ def local(self) -> None:
+ return None
+
+ @property
+ def is_prerelease(self) -> bool:
+ return False
+
+ @property
+ def is_postrelease(self) -> bool:
return False
@property
- def is_postrelease(self):
+ def is_devrelease(self) -> bool:
return False
-_legacy_version_component_re = re.compile(
- r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
-)
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
- "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+ "pre": "c",
+ "preview": "c",
+ "-": "final-",
+ "rc": "c",
+ "dev": "@",
}
-def _parse_version_parts(s):
+def _parse_version_parts(s: str) -> Iterator[str]:
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
@@ -128,7 +193,8 @@ def _parse_version_parts(s):
yield "*final"
-def _legacy_cmpkey(version):
+def _legacy_cmpkey(version: str) -> LegacyCmpKey:
+
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
@@ -137,7 +203,7 @@ def _legacy_cmpkey(version):
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
- parts = []
+ parts: List[str] = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
@@ -150,9 +216,9 @@ def _legacy_cmpkey(version):
parts.pop()
parts.append(part)
- parts = tuple(parts)
- return epoch, parts
+ return epoch, tuple(parts)
+
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
@@ -190,33 +256,24 @@ VERSION_PATTERN = r"""
class Version(_BaseVersion):
- _regex = re.compile(
- r"^\s*" + VERSION_PATTERN + r"\s*$",
- re.VERBOSE | re.IGNORECASE,
- )
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ def __init__(self, version: str) -> None:
- def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
- raise InvalidVersion("Invalid version: '{0}'".format(version))
+ raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
- pre=_parse_letter_version(
- match.group("pre_l"),
- match.group("pre_n"),
- ),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
- match.group("post_l"),
- match.group("post_n1") or match.group("post_n2"),
- ),
- dev=_parse_letter_version(
- match.group("dev_l"),
- match.group("dev_n"),
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
@@ -230,72 +287,113 @@ class Version(_BaseVersion):
self._version.local,
)
- def __repr__(self):
- return "<Version({0})>".format(repr(str(self)))
+ def __repr__(self) -> str:
+ return f"<Version('{self}')>"
- def __str__(self):
+ def __str__(self) -> str:
parts = []
# Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self._version.release))
+ parts.append(".".join(str(x) for x in self.release))
# Pre-release
- if self._version.pre is not None:
- parts.append("".join(str(x) for x in self._version.pre))
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
# Post-release
- if self._version.post is not None:
- parts.append(".post{0}".format(self._version.post[1]))
+ if self.post is not None:
+ parts.append(f".post{self.post}")
# Development release
- if self._version.dev is not None:
- parts.append(".dev{0}".format(self._version.dev[1]))
+ if self.dev is not None:
+ parts.append(f".dev{self.dev}")
# Local version segment
- if self._version.local is not None:
- parts.append(
- "+{0}".format(".".join(str(x) for x in self._version.local))
- )
+ if self.local is not None:
+ parts.append(f"+{self.local}")
return "".join(parts)
@property
- def public(self):
+ def epoch(self) -> int:
+ _epoch: int = self._version.epoch
+ return _epoch
+
+ @property
+ def release(self) -> Tuple[int, ...]:
+ _release: Tuple[int, ...] = self._version.release
+ return _release
+
+ @property
+ def pre(self) -> Optional[Tuple[str, int]]:
+ _pre: Optional[Tuple[str, int]] = self._version.pre
+ return _pre
+
+ @property
+ def post(self) -> Optional[int]:
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self) -> Optional[int]:
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self) -> Optional[str]:
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
return str(self).split("+", 1)[0]
@property
- def base_version(self):
+ def base_version(self) -> str:
parts = []
# Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self._version.release))
+ parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
- def local(self):
- version_string = str(self)
- if "+" in version_string:
- return version_string.split("+", 1)[1]
+ def is_prerelease(self) -> bool:
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ return self.release[0] if len(self.release) >= 1 else 0
@property
- def is_prerelease(self):
- return bool(self._version.dev or self._version.pre)
+ def minor(self) -> int:
+ return self.release[1] if len(self.release) >= 2 else 0
@property
- def is_postrelease(self):
- return bool(self._version.post)
+ def micro(self) -> int:
+ return self.release[2] if len(self.release) >= 3 else 0
-def _parse_letter_version(letter, number):
+def _parse_letter_version(
+ letter: str, number: Union[str, bytes, SupportsInt]
+) -> Optional[Tuple[str, int]]:
+
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
@@ -325,34 +423,40 @@ def _parse_letter_version(letter, number):
return letter, int(number)
+ return None
+
-_local_version_seperators = re.compile(r"[\._-]")
+_local_version_separators = re.compile(r"[\._-]")
-def _parse_local_version(local):
+def _parse_local_version(local: str) -> Optional[LocalType]:
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
- for part in _local_version_seperators.split(local)
+ for part in _local_version_separators.split(local)
)
+ return None
+
+def _cmpkey(
+ epoch: int,
+ release: Tuple[int, ...],
+ pre: Optional[Tuple[str, int]],
+ post: Optional[Tuple[str, int]],
+ dev: Optional[Tuple[str, int]],
+ local: Optional[Tuple[SubLocalType]],
+) -> CmpKey:
-def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
- release = tuple(
- reversed(list(
- itertools.dropwhile(
- lambda x: x == 0,
- reversed(release),
- )
- ))
+ _release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
@@ -360,23 +464,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
- pre = -Infinity
+ _pre: PrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
- pre = Infinity
+ _pre = Infinity
+ else:
+ _pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
- post = -Infinity
+ _post: PrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
# Versions without a development segment should sort after those with one.
if dev is None:
- dev = Infinity
+ _dev: PrePostDevType = Infinity
+
+ else:
+ _dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
- local = -Infinity
+ _local: LocalType = NegativeInfinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
@@ -385,9 +497,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
- local = tuple(
- (i, "") if isinstance(i, int) else (-Infinity, i)
- for i in local
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
- return epoch, release, pre, post, dev, local
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
new file mode 100644
index 0000000..bbc959e
--- /dev/null
+++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
@@ -0,0 +1,18 @@
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
new file mode 100644
index 0000000..210dfec
--- /dev/null
+++ b/pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+pyparsing
diff --git a/pkg_resources/_vendor/pyparsing.py b/pkg_resources/_vendor/pyparsing.py
index a212243..cf75e1e 100644
--- a/pkg_resources/_vendor/pyparsing.py
+++ b/pkg_resources/_vendor/pyparsing.py
@@ -1,6 +1,6 @@
# module pyparsing.py
#
-# Copyright (c) 2003-2016 Paul T. McGuire
+# Copyright (c) 2003-2018 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -25,6 +25,7 @@
__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
@@ -58,10 +59,23 @@ The pyparsing module handles some of the problems that are typically vexing when
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings
- embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
"""
-__version__ = "2.1.10"
-__versionTime__ = "07 Oct 2016 01:31 UTC"
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
@@ -83,6 +97,15 @@ except ImportError:
from threading import RLock
try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
from collections import OrderedDict as _OrderedDict
except ImportError:
try:
@@ -144,7 +167,7 @@ else:
except UnicodeEncodeError:
# Else encode it
ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
- xmlcharref = Regex('&#\d+;')
+ xmlcharref = Regex(r'&#\d+;')
xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
return xmlcharref.transformString(ret)
@@ -809,7 +832,7 @@ class ParseResults(object):
return None
def getName(self):
- """
+ r"""
Returns the results name for this token expression. Useful when several
different expressions might match at a particular location.
@@ -940,7 +963,7 @@ class ParseResults(object):
def __dir__(self):
return (dir(type(self)) + list(self.keys()))
-collections.MutableMapping.register(ParseResults)
+MutableMapping.register(ParseResults)
def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators.
@@ -1025,11 +1048,11 @@ def _trim_arity(func, maxargs=2):
# special handling for Python 3.5.0 - extra deep call stack by 1
offset = -3 if system_version == (3,5,0) else -2
frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
def extract_tb(tb, limit=0):
frames = traceback.extract_tb(tb, limit=limit)
frame_summary = frames[-1]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
else:
extract_stack = traceback.extract_stack
extract_tb = traceback.extract_tb
@@ -1226,7 +1249,7 @@ class ParserElement(object):
def setParseAction( self, *fns, **kwargs ):
"""
- Define action to perform when successfully matching parse element definition.
+ Define one or more actions to perform when successfully matching parse element definition.
Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
- s = the original string being parsed (see note below)
@@ -1264,7 +1287,7 @@ class ParserElement(object):
def addParseAction( self, *fns, **kwargs ):
"""
- Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
+ Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
See examples in L{I{copy}<copy>}.
"""
@@ -1374,7 +1397,7 @@ class ParserElement(object):
else:
preloc = loc
tokensStart = preloc
- if self.mayIndexError or loc >= len(instring):
+ if self.mayIndexError or preloc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
@@ -1408,7 +1431,6 @@ class ParserElement(object):
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
-
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
@@ -1443,10 +1465,14 @@ class ParserElement(object):
def clear(self):
cache.clear()
+
+ def cache_len(self):
+ return len(cache)
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
if _OrderedDict is not None:
class _FifoCache(object):
@@ -1460,15 +1486,22 @@ class ParserElement(object):
def set(self, key, value):
cache[key] = value
- if len(cache) > size:
- cache.popitem(False)
+ while len(cache) > size:
+ try:
+ cache.popitem(False)
+ except KeyError:
+ pass
def clear(self):
cache.clear()
+ def cache_len(self):
+ return len(cache)
+
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
else:
class _FifoCache(object):
@@ -1483,7 +1516,7 @@ class ParserElement(object):
def set(self, key, value):
cache[key] = value
- if len(cache) > size:
+ while len(key_fifo) > size:
cache.pop(key_fifo.popleft(), None)
key_fifo.append(key)
@@ -1491,9 +1524,13 @@ class ParserElement(object):
cache.clear()
key_fifo.clear()
+ def cache_len(self):
+ return len(cache)
+
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
# argument cache for optimizing repeated calls when backtracking through recursive expressions
packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
@@ -1743,8 +1780,12 @@ class ParserElement(object):
cap_word = Word(alphas.upper(), alphas.lower())
print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+ # the sum() builtin can be used to merge results into a single ParseResults object
+ print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
prints::
- ['More', 'Iron', 'Lead', 'Gold', 'I']
+ [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+ ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
"""
try:
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
@@ -1819,7 +1860,7 @@ class ParserElement(object):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
- return And( [ self, And._ErrorStop(), other ] )
+ return self + And._ErrorStop() + other
def __rsub__(self, other ):
"""
@@ -2722,7 +2763,7 @@ class Word(Token):
class Regex(Token):
- """
+ r"""
Token for matching strings that match a given regular expression.
Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
@@ -2911,7 +2952,7 @@ class QuotedString(Token):
# replace escaped characters
if self.escChar:
- ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
+ ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
# replace escaped quotes
if self.escQuote:
@@ -3223,7 +3264,7 @@ class ParseExpression(ParserElement):
if isinstance( exprs, basestring ):
self.exprs = [ ParserElement._literalStringClass( exprs ) ]
- elif isinstance( exprs, collections.Iterable ):
+ elif isinstance( exprs, Iterable ):
exprs = list(exprs)
# if sequence of strings provided, wrap with Literal
if all(isinstance(expr, basestring) for expr in exprs):
@@ -4374,7 +4415,7 @@ def traceParseAction(f):
@traceParseAction
def remove_duplicate_chars(tokens):
- return ''.join(sorted(set(''.join(tokens)))
+ return ''.join(sorted(set(''.join(tokens))))
wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
@@ -4564,7 +4605,7 @@ def oneOf( strs, caseless=False, useRegex=True ):
symbols = []
if isinstance(strs,basestring):
symbols = strs.split()
- elif isinstance(strs, collections.Iterable):
+ elif isinstance(strs, Iterable):
symbols = list(strs)
else:
warnings.warn("Invalid argument to oneOf, expected string or iterable",
@@ -4715,7 +4756,7 @@ stringEnd = StringEnd().setName("stringEnd")
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
-_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
@@ -5020,7 +5061,9 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
- parseAction is the parse action to be associated with
expressions matching this operator expression (the
- parse action tuple member may be omitted)
+ parse action tuple member may be omitted); if the parse action
+ is passed a tuple or list of functions, this is equivalent to
+ calling C{setParseAction(*fn)} (L{ParserElement.setParseAction})
- lpar - expression for matching left-parentheses (default=C{Suppress('(')})
- rpar - expression for matching right-parentheses (default=C{Suppress(')')})
@@ -5093,7 +5136,10 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
else:
raise ValueError("operator must indicate right or left associativity")
if pa:
- matchExpr.setParseAction( pa )
+ if isinstance(pa, (tuple, list)):
+ matchExpr.setParseAction(*pa)
+ else:
+ matchExpr.setParseAction(pa)
thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
lastExpr = thisExpr
ret <<= lastExpr
diff --git a/pkg_resources/_vendor/six.py b/pkg_resources/_vendor/six.py
deleted file mode 100644
index 190c023..0000000
--- a/pkg_resources/_vendor/six.py
+++ /dev/null
@@ -1,868 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
- string_types = str,
- integer_types = int,
- class_types = type,
- text_type = str
- binary_type = bytes
-
- MAXSIZE = sys.maxsize
-else:
- string_types = basestring,
- integer_types = (int, long)
- class_types = (type, types.ClassType)
- text_type = unicode
- binary_type = str
-
- if sys.platform.startswith("java"):
- # Jython always uses 32 bits.
- MAXSIZE = int((1 << 31) - 1)
- else:
- # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
- class X(object):
-
- def __len__(self):
- return 1 << 31
- try:
- len(X())
- except OverflowError:
- # 32-bit
- MAXSIZE = int((1 << 31) - 1)
- else:
- # 64-bit
- MAXSIZE = int((1 << 63) - 1)
- del X
-
-
-def _add_doc(func, doc):
- """Add documentation to a function."""
- func.__doc__ = doc
-
-
-def _import_module(name):
- """Import module, returning the module after the last dot."""
- __import__(name)
- return sys.modules[name]
-
-
-class _LazyDescr(object):
-
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, tp):
- result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
- return result
-
-
-class MovedModule(_LazyDescr):
-
- def __init__(self, name, old, new=None):
- super(MovedModule, self).__init__(name)
- if PY3:
- if new is None:
- new = name
- self.mod = new
- else:
- self.mod = old
-
- def _resolve(self):
- return _import_module(self.mod)
-
- def __getattr__(self, attr):
- _module = self._resolve()
- value = getattr(_module, attr)
- setattr(self, attr, value)
- return value
-
-
-class _LazyModule(types.ModuleType):
-
- def __init__(self, name):
- super(_LazyModule, self).__init__(name)
- self.__doc__ = self.__class__.__doc__
-
- def __dir__(self):
- attrs = ["__doc__", "__name__"]
- attrs += [attr.name for attr in self._moved_attributes]
- return attrs
-
- # Subclasses should override this
- _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
- def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
- super(MovedAttribute, self).__init__(name)
- if PY3:
- if new_mod is None:
- new_mod = name
- self.mod = new_mod
- if new_attr is None:
- if old_attr is None:
- new_attr = name
- else:
- new_attr = old_attr
- self.attr = new_attr
- else:
- self.mod = old_mod
- if old_attr is None:
- old_attr = name
- self.attr = old_attr
-
- def _resolve(self):
- module = _import_module(self.mod)
- return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
- """
- A meta path importer to import six.moves and its submodules.
-
- This class implements a PEP302 finder and loader. It should be compatible
- with Python 2.5 and all existing versions of Python3
- """
-
- def __init__(self, six_module_name):
- self.name = six_module_name
- self.known_modules = {}
-
- def _add_module(self, mod, *fullnames):
- for fullname in fullnames:
- self.known_modules[self.name + "." + fullname] = mod
-
- def _get_module(self, fullname):
- return self.known_modules[self.name + "." + fullname]
-
- def find_module(self, fullname, path=None):
- if fullname in self.known_modules:
- return self
- return None
-
- def __get_module(self, fullname):
- try:
- return self.known_modules[fullname]
- except KeyError:
- raise ImportError("This loader does not know module " + fullname)
-
- def load_module(self, fullname):
- try:
- # in case of a reload
- return sys.modules[fullname]
- except KeyError:
- pass
- mod = self.__get_module(fullname)
- if isinstance(mod, MovedModule):
- mod = mod._resolve()
- else:
- mod.__loader__ = self
- sys.modules[fullname] = mod
- return mod
-
- def is_package(self, fullname):
- """
- Return true, if the named module is a package.
-
- We need this method to get correct spec objects with
- Python 3.4 (see PEP451)
- """
- return hasattr(self.__get_module(fullname), "__path__")
-
- def get_code(self, fullname):
- """Return None
-
- Required, if is_package is implemented"""
- self.__get_module(fullname) # eventually raises ImportError
- return None
- get_source = get_code # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
- """Lazy loading of moved objects"""
- __path__ = [] # mark as package
-
-
-_moved_attributes = [
- MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
- MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
- MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
- MovedAttribute("intern", "__builtin__", "sys"),
- MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
- MovedAttribute("reduce", "__builtin__", "functools"),
- MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
- MovedAttribute("StringIO", "StringIO", "io"),
- MovedAttribute("UserDict", "UserDict", "collections"),
- MovedAttribute("UserList", "UserList", "collections"),
- MovedAttribute("UserString", "UserString", "collections"),
- MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
- MovedModule("builtins", "__builtin__"),
- MovedModule("configparser", "ConfigParser"),
- MovedModule("copyreg", "copy_reg"),
- MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
- MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
- MovedModule("http_cookies", "Cookie", "http.cookies"),
- MovedModule("html_entities", "htmlentitydefs", "html.entities"),
- MovedModule("html_parser", "HTMLParser", "html.parser"),
- MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
- MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
- MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
- MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
- MovedModule("cPickle", "cPickle", "pickle"),
- MovedModule("queue", "Queue"),
- MovedModule("reprlib", "repr"),
- MovedModule("socketserver", "SocketServer"),
- MovedModule("_thread", "thread", "_thread"),
- MovedModule("tkinter", "Tkinter"),
- MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
- MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
- MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
- MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
- MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
- MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser",
- "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog",
- "tkinter.commondialog"),
- MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_font", "tkFont", "tkinter.font"),
- MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
- "tkinter.simpledialog"),
- MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
- MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
-for attr in _moved_attributes:
- setattr(_MovedItems, attr.name, attr)
- if isinstance(attr, MovedModule):
- _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
- MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
- MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
- MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
- MovedAttribute("urljoin", "urlparse", "urllib.parse"),
- MovedAttribute("urlparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
- MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
- MovedAttribute("quote", "urllib", "urllib.parse"),
- MovedAttribute("quote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote", "urllib", "urllib.parse"),
- MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("urlencode", "urllib", "urllib.parse"),
- MovedAttribute("splitquery", "urllib", "urllib.parse"),
- MovedAttribute("splittag", "urllib", "urllib.parse"),
- MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
- MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
- MovedAttribute("uses_params", "urlparse", "urllib.parse"),
- MovedAttribute("uses_query", "urlparse", "urllib.parse"),
- MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
- setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
- MovedAttribute("URLError", "urllib2", "urllib.error"),
- MovedAttribute("HTTPError", "urllib2", "urllib.error"),
- MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
- setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
- MovedAttribute("urlopen", "urllib2", "urllib.request"),
- MovedAttribute("install_opener", "urllib2", "urllib.request"),
- MovedAttribute("build_opener", "urllib2", "urllib.request"),
- MovedAttribute("pathname2url", "urllib", "urllib.request"),
- MovedAttribute("url2pathname", "urllib", "urllib.request"),
- MovedAttribute("getproxies", "urllib", "urllib.request"),
- MovedAttribute("Request", "urllib2", "urllib.request"),
- MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
- MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
- MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
- MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
- MovedAttribute("FileHandler", "urllib2", "urllib.request"),
- MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
- MovedAttribute("urlretrieve", "urllib", "urllib.request"),
- MovedAttribute("urlcleanup", "urllib", "urllib.request"),
- MovedAttribute("URLopener", "urllib", "urllib.request"),
- MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
- MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
- setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
- MovedAttribute("addbase", "urllib", "urllib.response"),
- MovedAttribute("addclosehook", "urllib", "urllib.response"),
- MovedAttribute("addinfo", "urllib", "urllib.response"),
- MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
- setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
- setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
- """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
- __path__ = [] # mark as package
- parse = _importer._get_module("moves.urllib_parse")
- error = _importer._get_module("moves.urllib_error")
- request = _importer._get_module("moves.urllib_request")
- response = _importer._get_module("moves.urllib_response")
- robotparser = _importer._get_module("moves.urllib_robotparser")
-
- def __dir__(self):
- return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
- "moves.urllib")
-
-
-def add_move(move):
- """Add an item to six.moves."""
- setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
- """Remove item from six.moves."""
- try:
- delattr(_MovedItems, name)
- except AttributeError:
- try:
- del moves.__dict__[name]
- except KeyError:
- raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
- _meth_func = "__func__"
- _meth_self = "__self__"
-
- _func_closure = "__closure__"
- _func_code = "__code__"
- _func_defaults = "__defaults__"
- _func_globals = "__globals__"
-else:
- _meth_func = "im_func"
- _meth_self = "im_self"
-
- _func_closure = "func_closure"
- _func_code = "func_code"
- _func_defaults = "func_defaults"
- _func_globals = "func_globals"
-
-
-try:
- advance_iterator = next
-except NameError:
- def advance_iterator(it):
- return it.next()
-next = advance_iterator
-
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
- def get_unbound_function(unbound):
- return unbound
-
- create_bound_method = types.MethodType
-
- def create_unbound_method(func, cls):
- return func
-
- Iterator = object
-else:
- def get_unbound_function(unbound):
- return unbound.im_func
-
- def create_bound_method(func, obj):
- return types.MethodType(func, obj, obj.__class__)
-
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
- class Iterator(object):
-
- def next(self):
- return type(self).__next__(self)
-
- callable = callable
-_add_doc(get_unbound_function,
- """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
- def iterkeys(d, **kw):
- return iter(d.keys(**kw))
-
- def itervalues(d, **kw):
- return iter(d.values(**kw))
-
- def iteritems(d, **kw):
- return iter(d.items(**kw))
-
- def iterlists(d, **kw):
- return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
-else:
- def iterkeys(d, **kw):
- return d.iterkeys(**kw)
-
- def itervalues(d, **kw):
- return d.itervalues(**kw)
-
- def iteritems(d, **kw):
- return d.iteritems(**kw)
-
- def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
- "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
- "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
- def b(s):
- return s.encode("latin-1")
-
- def u(s):
- return s
- unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
- byte2int = operator.itemgetter(0)
- indexbytes = operator.getitem
- iterbytes = iter
- import io
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
-else:
- def b(s):
- return s
- # Workaround for standalone backslash
-
- def u(s):
- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
- unichr = unichr
- int2byte = chr
-
- def byte2int(bs):
- return ord(bs[0])
-
- def indexbytes(buf, i):
- return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
- import StringIO
- StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
- exec_ = getattr(moves.builtins, "exec")
-
- def reraise(tp, value, tb=None):
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
-else:
- def exec_(_code_, _globs_=None, _locs_=None):
- """Execute code in a namespace."""
- if _globs_ is None:
- frame = sys._getframe(1)
- _globs_ = frame.f_globals
- if _locs_ is None:
- _locs_ = frame.f_locals
- del frame
- elif _locs_ is None:
- _locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
-
- exec_("""def reraise(tp, value, tb=None):
- raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- if from_value is None:
- raise value
- raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
- exec_("""def raise_from(value, from_value):
- raise value from from_value
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
- def print_(*args, **kwargs):
- """The new-style print function for Python 2.4 and 2.5."""
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
-
- def write(data):
- if not isinstance(data, basestring):
- data = str(data)
- # If the file has an encoding, encode unicode with it.
- if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
- errors = getattr(fp, "errors", None)
- if errors is None:
- errors = "strict"
- data = data.encode(fp.encoding, errors)
- fp.write(data)
- want_unicode = False
- sep = kwargs.pop("sep", None)
- if sep is not None:
- if isinstance(sep, unicode):
- want_unicode = True
- elif not isinstance(sep, str):
- raise TypeError("sep must be None or a string")
- end = kwargs.pop("end", None)
- if end is not None:
- if isinstance(end, unicode):
- want_unicode = True
- elif not isinstance(end, str):
- raise TypeError("end must be None or a string")
- if kwargs:
- raise TypeError("invalid keyword arguments to print()")
- if not want_unicode:
- for arg in args:
- if isinstance(arg, unicode):
- want_unicode = True
- break
- if want_unicode:
- newline = unicode("\n")
- space = unicode(" ")
- else:
- newline = "\n"
- space = " "
- if sep is None:
- sep = space
- if end is None:
- end = newline
- for i, arg in enumerate(args):
- if i:
- write(sep)
- write(arg)
- write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
- def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
-else:
- wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
-
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- slots = orig_vars.get('__slots__')
- if slots is not None:
- if isinstance(slots, str):
- slots = [slots]
- for slots_var in slots:
- orig_vars.pop(slots_var)
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
-
-
-def python_2_unicode_compatible(klass):
- """
- A decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = [] # required for PEP 302 and PEP 451
-__package__ = __name__ # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
- __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
- for i, importer in enumerate(sys.meta_path):
- # Here's some real nastiness: Another "instance" of the six module might
- # be floating around. Therefore, we can't use isinstance() to check for
- # the six meta path importer, since the other six instance will have
- # inserted an importer with different class.
- if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
- del sys.meta_path[i]
- break
- del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index 9a94c5b..d5dbe73 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -1,4 +1,8 @@
-packaging==16.8
-pyparsing==2.1.10
-six==1.10.0
-appdirs==1.4.0
+packaging==21.3
+pyparsing==2.2.1
+appdirs==1.4.3
+jaraco.text==3.7.0
+# required for jaraco.text on older Pythons
+importlib_resources==5.4.0
+# required for importlib_resources on older Pythons
+zipp==3.7.0
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000..e82f676
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/pkg_resources/_vendor/zipp.py b/pkg_resources/_vendor/zipp.py
new file mode 100644
index 0000000..26b723c
--- /dev/null
+++ b/pkg_resources/_vendor/zipp.py
@@ -0,0 +1,329 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+import pathlib
+
+if sys.version_info < (3, 7):
+ from collections import OrderedDict
+else:
+ OrderedDict = dict
+
+
+__all__ = ['Path']
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastLookup when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ source.__class__ = cls
+ return source
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'mem/abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('mem/abcde.zip', 'a.txt')
+ >>> b
+ Path('mem/abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('mem/abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> import os
+ >>> str(c).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip/b/c.txt'
+
+ At the root, ``name``, ``filename``, and ``parent``
+ resolve to the zipfile. Note these attributes are not
+ valid and will raise a ``ValueError`` if the zipfile
+ has no filename.
+
+ >>> root.name
+ 'abcde.zip'
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip'
+ >>> str(root.parent)
+ 'mem'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ """
+ Construct a Path from a ZipFile or filename.
+
+ Note: When the source is an existing ZipFile object,
+ its type (__class__) will be mutated to a
+ specialized type. If the caller wishes to retain the
+ original type, the caller should either create a
+ separate ZipFile object or pass a filename.
+ """
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return pathlib.Path(self.at).name or self.filename.name
+
+ @property
+ def suffix(self):
+ return pathlib.Path(self.at).suffix or self.filename.suffix
+
+ @property
+ def suffixes(self):
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
+
+ @property
+ def stem(self):
+ return pathlib.Path(self.at).stem or self.filename.stem
+
+ @property
+ def filename(self):
+ return pathlib.Path(self.root.filename).joinpath(self.at)
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return self.__class__(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return self.exists() and not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, *other):
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ if not self.at:
+ return self.filename.parent
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
diff --git a/pkg_resources/api_tests.txt b/pkg_resources/api_tests.txt
index 0a75170..ded1880 100644
--- a/pkg_resources/api_tests.txt
+++ b/pkg_resources/api_tests.txt
@@ -36,7 +36,7 @@ Distributions have various introspectable attributes::
>>> dist.version
'0.9'
- >>> dist.py_version == sys.version[:3]
+ >>> dist.py_version == '{}.{}'.format(*sys.version_info)
True
>>> print(dist.platform)
@@ -290,8 +290,8 @@ Platform Compatibility Rules
----------------------------
On the Mac, there are potential compatibility issues for modules compiled
-on newer versions of Mac OS X than what the user is running. Additionally,
-Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
+on newer versions of macOS than what the user is running. Additionally,
+macOS will soon have two platforms to contend with: Intel and PowerPC.
Basic equality works as on other platforms::
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index b4156fe..70897ee 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -1,3 +1,4 @@
+import importlib.util
import sys
@@ -20,17 +21,10 @@ class VendorImporter:
yield self.vendor_pkg + '.'
yield ''
- def find_module(self, fullname, path=None):
- """
- Return self when fullname starts with root_name and the
- target module is one vendored through this importer.
- """
+ def _module_matches_namespace(self, fullname):
+ """Figure out if the target module is vendored."""
root, base, target = fullname.partition(self.root_name + '.')
- if root:
- return
- if not any(map(target.startswith, self.vendored_names)):
- return
- return self
+ return not root and any(map(target.startswith, self.vendored_names))
def load_module(self, fullname):
"""
@@ -43,13 +37,6 @@ class VendorImporter:
__import__(extant)
mod = sys.modules[extant]
sys.modules[fullname] = mod
- # mysterious hack:
- # Remove the reference to the extant package/module
- # on later Python versions to cause relative imports
- # in the vendor package to resolve the same modules
- # as those going through this importer.
- if sys.version_info > (3, 3):
- del sys.modules[extant]
return mod
except ImportError:
pass
@@ -61,6 +48,19 @@ class VendorImporter:
"distribution.".format(**locals())
)
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
+ def find_spec(self, fullname, path=None, target=None):
+ """Return a module spec for vendored names."""
+ return (
+ importlib.util.spec_from_loader(fullname, self)
+ if self._module_matches_namespace(fullname) else None
+ )
+
def install(self):
"""
Install this importer into sys.meta_path if not already present.
@@ -69,5 +69,8 @@ class VendorImporter:
sys.meta_path.append(self)
-names = 'packaging', 'pyparsing', 'six', 'appdirs'
+names = (
+ 'packaging', 'pyparsing', 'appdirs', 'jaraco', 'importlib_resources',
+ 'more_itertools',
+)
VendorImporter(__name__, names).install()
diff --git a/pkg_resources/py31compat.py b/pkg_resources/py31compat.py
deleted file mode 100644
index 331a51b..0000000
--- a/pkg_resources/py31compat.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-import errno
-import sys
-
-
-def _makedirs_31(path, exist_ok=False):
- try:
- os.makedirs(path)
- except OSError as exc:
- if not exist_ok or exc.errno != errno.EEXIST:
- raise
-
-
-# rely on compatibility behavior until mode considerations
-# and exists_ok considerations are disentangled.
-# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
-needs_makedirs = (
- sys.version_info < (3, 2, 5) or
- (3, 3) <= sys.version_info < (3, 3, 6) or
- (3, 4) <= sys.version_info < (3, 4, 1)
-)
-makedirs = _makedirs_31 if needs_makedirs else os.makedirs
diff --git a/pkg_resources/tests/data/my-test-package-source/setup.cfg b/pkg_resources/tests/data/my-test-package-source/setup.cfg
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package-source/setup.cfg
diff --git a/pkg_resources/tests/data/my-test-package-source/setup.py b/pkg_resources/tests/data/my-test-package-source/setup.py
new file mode 100644
index 0000000..fe80d28
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package-source/setup.py
@@ -0,0 +1,6 @@
+import setuptools
+setuptools.setup(
+ name="my-test-package",
+ version="1.0",
+ zip_safe=True,
+)
diff --git a/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip b/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
new file mode 100644
index 0000000..81f9a01
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
Binary files differ
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 0000000..7328e3f
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: my-test-package
+Version: 1.0
+Summary: UNKNOWN
+Home-page: UNKNOWN
+Author: UNKNOWN
+Author-email: UNKNOWN
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 0000000..3c4ee16
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
@@ -0,0 +1,7 @@
+setup.cfg
+setup.py
+my_test_package.egg-info/PKG-INFO
+my_test_package.egg-info/SOURCES.txt
+my_test_package.egg-info/dependency_links.txt
+my_test_package.egg-info/top_level.txt
+my_test_package.egg-info/zip-safe \ No newline at end of file
diff --git a/setuptools.egg-info/zip-safe b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
index 8b13789..8b13789 100644
--- a/setuptools.egg-info/zip-safe
+++ b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
@@ -0,0 +1 @@
+
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
@@ -0,0 +1 @@
+
diff --git a/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg b/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
new file mode 100644
index 0000000..5115b89
--- /dev/null
+++ b/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
Binary files differ
diff --git a/pkg_resources/tests/test_find_distributions.py b/pkg_resources/tests/test_find_distributions.py
index d735c59..b01b482 100644
--- a/pkg_resources/tests/test_find_distributions.py
+++ b/pkg_resources/tests/test_find_distributions.py
@@ -1,17 +1,9 @@
-import subprocess
-import sys
-
+import py
import pytest
import pkg_resources
-SETUP_TEMPLATE = """
-import setuptools
-setuptools.setup(
- name="my-test-package",
- version="1.0",
- zip_safe=True,
-)
-""".lstrip()
+
+TESTS_DATA_DIR = py.path.local(__file__).dirpath('data')
class TestFindDistributions:
@@ -21,46 +13,31 @@ class TestFindDistributions:
target_dir = tmpdir.mkdir('target')
# place a .egg named directory in the target that is not an egg:
target_dir.mkdir('not.an.egg')
- return str(target_dir)
-
- @pytest.fixture
- def project_dir(self, tmpdir):
- project_dir = tmpdir.mkdir('my-test-package')
- (project_dir / "setup.py").write(SETUP_TEMPLATE)
- return str(project_dir)
+ return target_dir
def test_non_egg_dir_named_egg(self, target_dir):
- dists = pkg_resources.find_distributions(target_dir)
+ dists = pkg_resources.find_distributions(str(target_dir))
+ assert not list(dists)
+
+ def test_standalone_egg_directory(self, target_dir):
+ (TESTS_DATA_DIR / 'my-test-package_unpacked-egg').copy(target_dir)
+ dists = pkg_resources.find_distributions(str(target_dir))
+ assert [dist.project_name for dist in dists] == ['my-test-package']
+ dists = pkg_resources.find_distributions(str(target_dir), only=True)
assert not list(dists)
- def test_standalone_egg_directory(self, project_dir, target_dir):
- # install this distro as an unpacked egg:
- args = [
- sys.executable,
- '-c', 'from setuptools.command.easy_install import main; main()',
- '-mNx',
- '-d', target_dir,
- '--always-unzip',
- project_dir,
- ]
- subprocess.check_call(args)
- dists = pkg_resources.find_distributions(target_dir)
+ def test_zipped_egg(self, target_dir):
+ (TESTS_DATA_DIR / 'my-test-package_zipped-egg').copy(target_dir)
+ dists = pkg_resources.find_distributions(str(target_dir))
assert [dist.project_name for dist in dists] == ['my-test-package']
- dists = pkg_resources.find_distributions(target_dir, only=True)
+ dists = pkg_resources.find_distributions(str(target_dir), only=True)
assert not list(dists)
- def test_zipped_egg(self, project_dir, target_dir):
- # install this distro as an unpacked egg:
- args = [
- sys.executable,
- '-c', 'from setuptools.command.easy_install import main; main()',
- '-mNx',
- '-d', target_dir,
- '--zip-ok',
- project_dir,
- ]
- subprocess.check_call(args)
- dists = pkg_resources.find_distributions(target_dir)
+ def test_zipped_sdist_one_level_removed(self, target_dir):
+ (TESTS_DATA_DIR / 'my-test-package-zip').copy(target_dir)
+ dists = pkg_resources.find_distributions(
+ str(target_dir / "my-test-package.zip"))
assert [dist.project_name for dist in dists] == ['my-test-package']
- dists = pkg_resources.find_distributions(target_dir, only=True)
+ dists = pkg_resources.find_distributions(
+ str(target_dir / "my-test-package.zip"), only=True)
assert not list(dists)
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 7442b79..6518820 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -1,6 +1,3 @@
-# coding: utf-8
-from __future__ import unicode_literals
-
import sys
import tempfile
import os
@@ -12,17 +9,19 @@ import stat
import distutils.dist
import distutils.command.install_egg_info
-from pkg_resources.extern.six.moves import map
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+from pkg_resources import (
+ DistInfoDistribution, Distribution, EggInfoDistribution,
+)
import pytest
import pkg_resources
-try:
- unicode
-except NameError:
- unicode = str
-
def timestamp(dt):
"""
@@ -35,7 +34,7 @@ def timestamp(dt):
return time.mktime(dt.timetuple())
-class EggRemover(unicode):
+class EggRemover(str):
def __call__(self):
if self in sys.path:
sys.path.remove(self)
@@ -43,7 +42,7 @@ class EggRemover(unicode):
os.remove(self)
-class TestZipProvider(object):
+class TestZipProvider:
finalizers = []
ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
@@ -90,7 +89,6 @@ class TestZipProvider(object):
expected_root = ['data.dat', 'mod.py', 'subdir']
assert sorted(zp.resource_listdir('')) == expected_root
- assert sorted(zp.resource_listdir('/')) == expected_root
expected_subdir = ['data2.dat', 'mod2.py']
assert sorted(zp.resource_listdir('subdir')) == expected_subdir
@@ -103,7 +101,6 @@ class TestZipProvider(object):
zp2 = pkg_resources.ZipProvider(mod2)
assert sorted(zp2.resource_listdir('')) == expected_subdir
- assert sorted(zp2.resource_listdir('/')) == expected_subdir
assert zp2.resource_listdir('subdir') == []
assert zp2.resource_listdir('subdir/') == []
@@ -132,16 +129,42 @@ class TestZipProvider(object):
manager.cleanup_resources()
-class TestResourceManager(object):
+class TestResourceManager:
def test_get_cache_path(self):
mgr = pkg_resources.ResourceManager()
path = mgr.get_cache_path('foo')
type_ = str(type(path))
message = "Unexpected type from get_cache_path: " + type_
- assert isinstance(path, (unicode, str)), message
+ assert isinstance(path, str), message
+
+ def test_get_cache_path_race(self, tmpdir):
+ # Patch to os.path.isdir to create a race condition
+ def patched_isdir(dirname, unpatched_isdir=pkg_resources.isdir):
+ patched_isdir.dirnames.append(dirname)
+
+ was_dir = unpatched_isdir(dirname)
+ if not was_dir:
+ os.makedirs(dirname)
+ return was_dir
+
+ patched_isdir.dirnames = []
+
+ # Get a cache path with a "race condition"
+ mgr = pkg_resources.ResourceManager()
+ mgr.set_extraction_path(str(tmpdir))
+ archive_name = os.sep.join(('foo', 'bar', 'baz'))
+ with mock.patch.object(pkg_resources, 'isdir', new=patched_isdir):
+ mgr.get_cache_path(archive_name)
+
+ # Because this test relies on the implementation details of this
+ # function, these assertions are a sentinel to ensure that the
+ # test suite will not fail silently if the implementation changes.
+ called_dirnames = patched_isdir.dirnames
+ assert len(called_dirnames) == 2
+ assert called_dirnames[0].split(os.sep)[-2:] == ['foo', 'bar']
+ assert called_dirnames[1].split(os.sep)[-1:] == ['foo']
-class TestIndependence:
"""
Tests to ensure that pkg_resources runs independently from setuptools.
"""
@@ -163,7 +186,137 @@ class TestIndependence:
subprocess.check_call(cmd)
-class TestDeepVersionLookupDistutils(object):
+def make_test_distribution(metadata_path, metadata):
+ """
+ Make a test Distribution object, and return it.
+
+ :param metadata_path: the path to the metadata file that should be
+ created. This should be inside a distribution directory that should
+ also be created. For example, an argument value might end with
+ "<project>.dist-info/METADATA".
+ :param metadata: the desired contents of the metadata file, as bytes.
+ """
+ dist_dir = os.path.dirname(metadata_path)
+ os.mkdir(dist_dir)
+ with open(metadata_path, 'wb') as f:
+ f.write(metadata)
+ dists = list(pkg_resources.distributions_from_metadata(dist_dir))
+ dist, = dists
+
+ return dist
+
+
+def test_get_metadata__bad_utf8(tmpdir):
+ """
+ Test a metadata file with bytes that can't be decoded as utf-8.
+ """
+ filename = 'METADATA'
+ # Convert the tmpdir LocalPath object to a string before joining.
+ metadata_path = os.path.join(str(tmpdir), 'foo.dist-info', filename)
+ # Encode a non-ascii string with the wrong encoding (not utf-8).
+ metadata = 'née'.encode('iso-8859-1')
+ dist = make_test_distribution(metadata_path, metadata=metadata)
+
+ with pytest.raises(UnicodeDecodeError) as excinfo:
+ dist.get_metadata(filename)
+
+ exc = excinfo.value
+ actual = str(exc)
+ expected = (
+ # The error message starts with "'utf-8' codec ..." However, the
+ # spelling of "utf-8" can vary (e.g. "utf8") so we don't include it
+ "codec can't decode byte 0xe9 in position 1: "
+ 'invalid continuation byte in METADATA file at path: '
+ )
+ assert expected in actual, 'actual: {}'.format(actual)
+ assert actual.endswith(metadata_path), 'actual: {}'.format(actual)
+
+
+def make_distribution_no_version(tmpdir, basename):
+ """
+ Create a distribution directory with no file containing the version.
+ """
+ dist_dir = tmpdir / basename
+ dist_dir.ensure_dir()
+ # Make the directory non-empty so distributions_from_metadata()
+ # will detect it and yield it.
+ dist_dir.join('temp.txt').ensure()
+
+ if sys.version_info < (3, 6):
+ dist_dir = str(dist_dir)
+
+ dists = list(pkg_resources.distributions_from_metadata(dist_dir))
+ assert len(dists) == 1
+ dist, = dists
+
+ return dist, dist_dir
+
+
+@pytest.mark.parametrize(
+ 'suffix, expected_filename, expected_dist_type',
+ [
+ ('egg-info', 'PKG-INFO', EggInfoDistribution),
+ ('dist-info', 'METADATA', DistInfoDistribution),
+ ],
+)
+def test_distribution_version_missing(
+ tmpdir, suffix, expected_filename, expected_dist_type):
+ """
+ Test Distribution.version when the "Version" header is missing.
+ """
+ basename = 'foo.{}'.format(suffix)
+ dist, dist_dir = make_distribution_no_version(tmpdir, basename)
+
+ expected_text = (
+ "Missing 'Version:' header and/or {} file at path: "
+ ).format(expected_filename)
+ metadata_path = os.path.join(dist_dir, expected_filename)
+
+ # Now check the exception raised when the "version" attribute is accessed.
+ with pytest.raises(ValueError) as excinfo:
+ dist.version
+
+ err = str(excinfo.value)
+ # Include a string expression after the assert so the full strings
+ # will be visible for inspection on failure.
+ assert expected_text in err, str((expected_text, err))
+
+ # Also check the args passed to the ValueError.
+ msg, dist = excinfo.value.args
+ assert expected_text in msg
+ # Check that the message portion contains the path.
+ assert metadata_path in msg, str((metadata_path, msg))
+ assert type(dist) == expected_dist_type
+
+
+def test_distribution_version_missing_undetected_path():
+ """
+ Test Distribution.version when the "Version" header is missing and
+ the path can't be detected.
+ """
+ # Create a Distribution object with no metadata argument, which results
+ # in an empty metadata provider.
+ dist = Distribution('/foo')
+ with pytest.raises(ValueError) as excinfo:
+ dist.version
+
+ msg, dist = excinfo.value.args
+ expected = (
+ "Missing 'Version:' header and/or PKG-INFO file at path: "
+ '[could not detect]'
+ )
+ assert msg == expected
+
+
+@pytest.mark.parametrize('only', [False, True])
+def test_dist_info_is_not_dir(tmp_path, only):
+ """Test path containing a file with dist-info extension."""
+ dist_info = tmp_path / 'foobar.dist-info'
+ dist_info.touch()
+ assert not pkg_resources.dist_factory(str(tmp_path), str(dist_info), only)
+
+
+class TestDeepVersionLookupDistutils:
@pytest.fixture
def env(self, tmpdir):
"""
@@ -207,3 +360,56 @@ class TestDeepVersionLookupDistutils(object):
req = pkg_resources.Requirement.parse('foo>=1.9')
dist = pkg_resources.WorkingSet([env.paths['lib']]).find(req)
assert dist.version == version
+
+ @pytest.mark.parametrize(
+ 'unnormalized, normalized',
+ [
+ ('foo', 'foo'),
+ ('foo/', 'foo'),
+ ('foo/bar', 'foo/bar'),
+ ('foo/bar/', 'foo/bar'),
+ ],
+ )
+ def test_normalize_path_trailing_sep(self, unnormalized, normalized):
+ """Ensure the trailing slash is cleaned for path comparison.
+
+ See pypa/setuptools#1519.
+ """
+ result_from_unnormalized = pkg_resources.normalize_path(unnormalized)
+ result_from_normalized = pkg_resources.normalize_path(normalized)
+ assert result_from_unnormalized == result_from_normalized
+
+ @pytest.mark.skipif(
+ os.path.normcase('A') != os.path.normcase('a'),
+ reason='Testing case-insensitive filesystems.',
+ )
+ @pytest.mark.parametrize(
+ 'unnormalized, normalized',
+ [
+ ('MiXeD/CasE', 'mixed/case'),
+ ],
+ )
+ def test_normalize_path_normcase(self, unnormalized, normalized):
+ """Ensure mixed case is normalized on case-insensitive filesystems.
+ """
+ result_from_unnormalized = pkg_resources.normalize_path(unnormalized)
+ result_from_normalized = pkg_resources.normalize_path(normalized)
+ assert result_from_unnormalized == result_from_normalized
+
+ @pytest.mark.skipif(
+ os.path.sep != '\\',
+ reason='Testing systems using backslashes as path separators.',
+ )
+ @pytest.mark.parametrize(
+ 'unnormalized, expected',
+ [
+ ('forward/slash', 'forward\\slash'),
+ ('forward/slash/', 'forward\\slash'),
+ ('backward\\slash\\', 'backward\\slash'),
+ ],
+ )
+ def test_normalize_path_backslash_sep(self, unnormalized, expected):
+ """Ensure path seps are cleaned on backslash path sep systems.
+ """
+ result = pkg_resources.normalize_path(unnormalized)
+ assert result.endswith(expected)
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 05f35ad..107dda7 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -1,13 +1,9 @@
-from __future__ import unicode_literals
-
import os
import sys
import string
import platform
import itertools
-from pkg_resources.extern.six.moves import map
-
import pytest
from pkg_resources.extern import packaging
@@ -116,7 +112,7 @@ class TestDistro:
self.checkFooPkg(d)
d = Distribution("/some/path")
- assert d.py_version == sys.version[:3]
+ assert d.py_version == '{}.{}'.format(*sys.version_info)
assert d.platform is None
def testDistroParse(self):
@@ -145,6 +141,35 @@ class TestDistro:
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
self.checkRequires(self.distRequires(v), v)
+ needs_object_dir = pytest.mark.skipif(
+ not hasattr(object, '__dir__'),
+ reason='object.__dir__ necessary for self.__dir__ implementation',
+ )
+
+ def test_distribution_dir(self):
+ d = pkg_resources.Distribution()
+ dir(d)
+
+ @needs_object_dir
+ def test_distribution_dir_includes_provider_dir(self):
+ d = pkg_resources.Distribution()
+ before = d.__dir__()
+ assert 'test_attr' not in before
+ d._provider.test_attr = None
+ after = d.__dir__()
+ assert len(after) == len(before) + 1
+ assert 'test_attr' in after
+
+ @needs_object_dir
+ def test_distribution_dir_ignores_provider_dir_leading_underscore(self):
+ d = pkg_resources.Distribution()
+ before = d.__dir__()
+ assert '_test_attr' not in before
+ d._provider._test_attr = None
+ after = d.__dir__()
+ assert len(after) == len(before)
+ assert '_test_attr' not in after
+
def testResolve(self):
ad = pkg_resources.Environment([])
ws = WorkingSet([])
@@ -463,6 +488,14 @@ class TestEntryPoints:
with pytest.raises(ValueError):
EntryPoint.parse_map(self.submap_str)
+ def testDeprecationWarnings(self):
+ ep = EntryPoint(
+ "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"],
+ ["x"]
+ )
+ with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning):
+ ep.load(require=False)
+
class TestRequirements:
def testBasics(self):
@@ -482,6 +515,11 @@ class TestRequirements:
assert r1 == r2
assert str(r1) == str(r2)
assert str(r2) == "Twisted==1.2c1,>=1.2"
+ assert (
+ Requirement("Twisted")
+ !=
+ Requirement("Twisted @ https://localhost/twisted.zip")
+ )
def testBasicContains(self):
r = Requirement("Twisted>=1.2")
@@ -508,11 +546,23 @@ class TestRequirements:
==
hash((
"twisted",
+ None,
packaging.specifiers.SpecifierSet(">=1.2"),
frozenset(["foo", "bar"]),
None
))
)
+ assert (
+ hash(Requirement.parse("Twisted @ https://localhost/twisted.zip"))
+ ==
+ hash((
+ "twisted",
+ "https://localhost/twisted.zip",
+ packaging.specifiers.SpecifierSet(),
+ frozenset(),
+ None
+ ))
+ )
def testVersionEquality(self):
r1 = Requirement.parse("foo==0.3a2")
@@ -639,7 +689,7 @@ class TestParsing:
assert (
Requirement.parse("name==1.1;python_version=='2.7'")
!=
- Requirement.parse("name==1.1;python_version=='3.3'")
+ Requirement.parse("name==1.1;python_version=='3.6'")
)
assert (
Requirement.parse("name==1.0;python_version=='2.7'")
@@ -647,13 +697,13 @@ class TestParsing:
Requirement.parse("name==1.2;python_version=='2.7'")
)
assert (
- Requirement.parse("name[foo]==1.0;python_version=='3.3'")
+ Requirement.parse("name[foo]==1.0;python_version=='3.6'")
!=
- Requirement.parse("name[foo,bar]==1.0;python_version=='3.3'")
+ Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'")
)
def test_local_version(self):
- req, = parse_requirements('foo==1.0.org1')
+ req, = parse_requirements('foo==1.0+org1')
def test_spaces_between_multiple_versions(self):
req, = parse_requirements('foo>=1.0, <3')
@@ -723,7 +773,7 @@ class TestNamespaces:
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
- @pytest.yield_fixture
+ @pytest.fixture
def symlinked_tmpdir(self, tmpdir):
"""
Where available, return the tempdir as a symlink,
@@ -741,7 +791,7 @@ class TestNamespaces:
finally:
os.unlink(link_name)
- @pytest.yield_fixture(autouse=True)
+ @pytest.fixture(autouse=True)
def patched_path(self, tmpdir):
"""
Patch sys.path to include the 'site-pkgs' dir. Also
diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py
index 42ddcc8..db13c71 100644
--- a/pkg_resources/tests/test_working_set.py
+++ b/pkg_resources/tests/test_working_set.py
@@ -12,8 +12,8 @@ from .test_resources import Metadata
def strip_comments(s):
return '\n'.join(
- l for l in s.split('\n')
- if l.strip() and not l.strip().startswith('#')
+ line for line in s.split('\n')
+ if line.strip() and not line.strip().startswith('#')
)
@@ -37,7 +37,7 @@ def parse_distributions(s):
requires=['foo>=3.0', 'baz; extra=="feature"']
'''
s = s.strip()
- for spec in re.split('\n(?=[^\s])', s):
+ for spec in re.split(r'\n(?=[^\s])', s):
if not spec:
continue
fields = spec.split('\n', 1)
@@ -54,7 +54,7 @@ def parse_distributions(s):
yield dist
-class FakeInstaller(object):
+class FakeInstaller:
def __init__(self, installable_dists):
self._installable_dists = installable_dists
@@ -87,7 +87,7 @@ def parametrize_test_working_set_resolve(*test_list):
):
idlist.append(id_)
expected = strip_comments(expected.strip())
- if re.match('\w+$', expected):
+ if re.match(r'\w+$', expected):
expected = getattr(pkg_resources, expected)
assert issubclass(expected, Exception)
else:
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..6b426a3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,59 @@
+[build-system]
+requires = []
+build-backend = "setuptools.build_meta"
+backend-path = ["."]
+
+[tool.black]
+skip-string-normalization = true
+
+[tool.setuptools_scm]
+
+[pytest.enabler.black]
+#addopts = "--black"
+
+[pytest.enabler.mypy]
+#addopts = "--mypy"
+
+[pytest.enabler.flake8]
+addopts = "--flake8"
+
+[pytest.enabler.cov]
+addopts = "--cov"
+
+[pytest.enabler.xdist]
+addopts = "-n auto"
+
+[tool.towncrier]
+ package = "setuptools"
+ package_dir = "setuptools"
+ filename = "CHANGES.rst"
+ directory = "changelog.d"
+ title_format = "v{version}"
+ issue_format = "#{issue}"
+ template = "tools/towncrier_template.rst"
+ underlines = ["-", "^"]
+
+ [[tool.towncrier.type]]
+ directory = "deprecation"
+ name = "Deprecations"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "breaking"
+ name = "Breaking Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "change"
+ name = "Changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "doc"
+ name = "Documentation changes"
+ showcontent = true
+
+ [[tool.towncrier.type]]
+ directory = "misc"
+ name = "Misc"
+ showcontent = true
diff --git a/pytest.ini b/pytest.ini
index 16fdc5a..14c7e94 100755..100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -1,6 +1,62 @@
[pytest]
-addopts=--doctest-modules --ignore release.py --ignore setuptools/lib2to3_ex.py --ignore tests/manual_test.py --ignore tests/test_pypi.py --ignore tests/shlib_test --doctest-glob=pkg_resources/api_tests.txt --ignore scripts/upload-old-releases-as-zip.py --ignore pavement.py --ignore setuptools/tests/mod_with_constant.py -rsxX
-norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern .*
-flake8-ignore =
- setuptools/site-patch.py F821
- setuptools/py*compat.py F811
+norecursedirs=dist build .tox .eggs
+addopts=
+ --doctest-modules
+ --doctest-glob=pkg_resources/api_tests.txt
+ -r sxX
+doctest_optionflags=ALLOW_UNICODE ELLIPSIS
+filterwarnings=
+ # Fail on warnings
+ error
+
+ ## upstream
+ # Suppress deprecation warning in flake8
+ ignore:SelectableGroups dict interface is deprecated::flake8
+
+ # shopkeep/pytest-black#55
+ ignore:<class 'pytest_black.BlackItem'> is not using a cooperative constructor:pytest.PytestDeprecationWarning
+ ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning
+ ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning
+
+ # tholo/pytest-flake8#83
+ ignore:<class 'pytest_flake8.Flake8Item'> is not using a cooperative constructor:pytest.PytestDeprecationWarning
+ ignore:The \(fspath. py.path.local\) argument to Flake8Item is deprecated.:pytest.PytestDeprecationWarning
+ ignore:Flake8Item is an Item subclass and should not be a collector:pytest.PytestWarning
+
+ # dbader/pytest-mypy#131
+ ignore:The \(fspath. py.path.local\) argument to MypyFile is deprecated.:pytest.PytestDeprecationWarning
+ ## end upstream
+
+ # https://github.com/pypa/setuptools/issues/1823
+ ignore:bdist_wininst command is deprecated
+ # Suppress this error; unimportant for CI tests
+ ignore:Extraction path is writable by group/others:UserWarning
+ # Suppress weird RuntimeWarning.
+ ignore:Parent module 'setuptools' not found while handling absolute import:RuntimeWarning
+ # Suppress use of bytes for filenames on Windows until fixed #2016
+ ignore:The Windows bytes API has been deprecated:DeprecationWarning
+
+ # https://github.com/pypa/setuptools/issues/2823
+ ignore:setuptools.installer is deprecated.
+
+ # https://github.com/pypa/setuptools/issues/917
+ ignore:setup.py install is deprecated.
+ ignore:easy_install command is deprecated.
+
+ # https://github.com/pypa/setuptools/issues/2497
+ ignore:.* is an invalid version and will not be supported::pkg_resources
+
+ # https://github.com/pypa/setuptools/pull/2865#issuecomment-965700112
+ # ideally would apply to Python 3.10+ when
+ # SETUPTOOLS_USE_DISTUTILS=stdlib but for
+ # https://github.com/pytest-dev/pytest/discussions/9296
+ ignore:The distutils.sysconfig module is deprecated, use sysconfig instead
+ ignore:The distutils package is deprecated.*
+
+ # Workaround for pypa/setuptools#2868
+ # ideally would apply to PyPy only but for
+ # https://github.com/pytest-dev/pytest/discussions/9296
+ ignore:Distutils was imported before setuptools
+ ignore:Setuptools is replacing distutils
+
+ ignore:Support for project metadata in .pyproject.toml. is still experimental
diff --git a/setup.cfg b/setup.cfg
index 1184020..da74d0a 100755..100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,29 +1,150 @@
-[bumpversion]
-current_version = 39.1.0
-commit = True
-tag = True
+[metadata]
+name = setuptools
+version = 61.1.0
+author = Python Packaging Authority
+author_email = distutils-sig@python.org
+description = Easily download, build, install, upgrade, and uninstall Python packages
+long_description = file:README.rst
+url = https://github.com/pypa/setuptools
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
+ Topic :: Software Development :: Libraries :: Python Modules
+ Topic :: System :: Archiving :: Packaging
+ Topic :: System :: Systems Administration
+ Topic :: Utilities
+keywords = CPAN PyPI distutils eggs package management
+project_urls =
+ Documentation = https://setuptools.pypa.io/
+
+[options]
+packages = find_namespace:
+python_requires = >=3.7
+install_requires =
+
+[options.packages.find]
+exclude =
+ build*
+ dist*
+ docs*
+ tests*
+ *.tests
+ *.tests.*
+ tools*
+
+[options.extras_require]
+testing =
+ pytest >= 6
+ pytest-checkdocs >= 2.4
+ pytest-flake8
+ pytest-black >= 0.3.7; \
+ python_implementation != "PyPy"
+ pytest-cov; \
+ python_implementation != "PyPy"
+ pytest-mypy >= 0.9.1; \
+ python_implementation != "PyPy"
+ pytest-enabler >= 1.0.1
+ pytest-perf
+
+ mock
+ flake8-2020
+ virtualenv>=13.0.0
+ wheel
+ pip>=19.1 # For proper file:// URLs support.
+ jaraco.envs>=2.2
+ pytest-xdist
+ jaraco.path>=3.2.0
+ build[virtualenv]
+ filelock>=3.4.0
+ pip_run>=8.8
+ ini2toml[lite]>=0.9
+ tomli-w>=1.0.0
+testing-integration =
+ pytest
+ pytest-xdist
+ pytest-enabler
+ virtualenv>=13.0.0
+ tomli
+ wheel
+ jaraco.path>=3.2.0
+ jaraco.envs>=2.2
+ build[virtualenv]
+ filelock>=3.4.0
+docs =
+ sphinx
+ jaraco.packaging >= 9
+ rst.linker >= 1.9
+ jaraco.tidelift >= 1.4
+
+ pygments-github-lexers==0.0.5
+ sphinx-favicon
+ sphinx-inline-tabs
+ sphinxcontrib-towncrier
+ furo
+ssl =
+certs =
+
+[options.entry_points]
+distutils.commands =
+ alias = setuptools.command.alias:alias
+ bdist_egg = setuptools.command.bdist_egg:bdist_egg
+ bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+ build_clib = setuptools.command.build_clib:build_clib
+ build_ext = setuptools.command.build_ext:build_ext
+ build_py = setuptools.command.build_py:build_py
+ develop = setuptools.command.develop:develop
+ dist_info = setuptools.command.dist_info:dist_info
+ easy_install = setuptools.command.easy_install:easy_install
+ egg_info = setuptools.command.egg_info:egg_info
+ install = setuptools.command.install:install
+ install_egg_info = setuptools.command.install_egg_info:install_egg_info
+ install_lib = setuptools.command.install_lib:install_lib
+ install_scripts = setuptools.command.install_scripts:install_scripts
+ rotate = setuptools.command.rotate:rotate
+ saveopts = setuptools.command.saveopts:saveopts
+ sdist = setuptools.command.sdist:sdist
+ setopt = setuptools.command.setopt:setopt
+ test = setuptools.command.test:test
+ upload_docs = setuptools.command.upload_docs:upload_docs
+setuptools.finalize_distribution_options =
+ parent_finalize = setuptools.dist:_Distribution.finalize_options
+ keywords = setuptools.dist:Distribution._finalize_setup_keywords
+distutils.setup_keywords =
+ eager_resources = setuptools.dist:assert_string_list
+ namespace_packages = setuptools.dist:check_nsp
+ extras_require = setuptools.dist:check_extras
+ install_requires = setuptools.dist:check_requirements
+ tests_require = setuptools.dist:check_requirements
+ setup_requires = setuptools.dist:check_requirements
+ python_requires = setuptools.dist:check_specifier
+ entry_points = setuptools.dist:check_entry_points
+ test_suite = setuptools.dist:check_test_suite
+ zip_safe = setuptools.dist:assert_bool
+ package_data = setuptools.dist:check_package_data
+ exclude_package_data = setuptools.dist:check_package_data
+ include_package_data = setuptools.dist:assert_bool
+ packages = setuptools.dist:check_packages
+ dependency_links = setuptools.dist:assert_string_list
+ test_loader = setuptools.dist:check_importable
+ test_runner = setuptools.dist:check_importable
+ use_2to3 = setuptools.dist:invalid_unless_false
+egg_info.writers =
+ PKG-INFO = setuptools.command.egg_info:write_pkg_info
+ requires.txt = setuptools.command.egg_info:write_requirements
+ entry_points.txt = setuptools.command.egg_info:write_entries
+ eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+ namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+ top_level.txt = setuptools.command.egg_info:write_toplevel_names
+ depends.txt = setuptools.command.egg_info:warn_depends_obsolete
+ dependency_links.txt = setuptools.command.egg_info:overwrite_arg
[egg_info]
tag_build =
tag_date = 0
-[aliases]
-clean_egg_info = egg_info -Db ''
-release = clean_egg_info sdist bdist_wheel
-source = register sdist binary
-binary = bdist_egg upload --show-response
-
-[upload]
-repository = https://upload.pypi.org/legacy/
-
[sdist]
formats = zip
-[bdist_wheel]
-universal = 1
-
-[metadata]
-license_file = LICENSE
-
-[bumpversion:file:setup.py]
-
diff --git a/setup.py b/setup.py
index b08552d..4cda3d3 100755
--- a/setup.py
+++ b/setup.py
@@ -1,58 +1,15 @@
#!/usr/bin/env python
-"""
-Distutils setup file, used to install or test 'setuptools'
-"""
-import io
import os
import sys
import textwrap
import setuptools
+from setuptools.command.install import install
here = os.path.dirname(__file__)
-def require_metadata():
- "Prevent improper installs without necessary metadata. See #659"
- egg_info_dir = os.path.join(here, 'setuptools.egg-info')
- if not os.path.exists(egg_info_dir):
- msg = (
- "Cannot build setuptools without metadata. "
- "Run `bootstrap.py`."
- )
- raise RuntimeError(msg)
-
-
-def read_commands():
- command_ns = {}
- cmd_module_path = 'setuptools/command/__init__.py'
- init_path = os.path.join(here, cmd_module_path)
- with open(init_path) as init_file:
- exec(init_file.read(), command_ns)
- return command_ns['__all__']
-
-
-def _gen_console_scripts():
- yield "easy_install = setuptools.command.easy_install:main"
-
- # Gentoo distributions manage the python-version-specific scripts
- # themselves, so those platforms define an environment variable to
- # suppress the creation of the version-specific scripts.
- var_names = (
- 'SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT',
- 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT',
- )
- if any(os.environ.get(var) not in (None, "", "0") for var in var_names):
- return
- tmpl = "easy_install-{shortver} = setuptools.command.easy_install:main"
- yield tmpl.format(shortver=sys.version[:3])
-
-
-readme_path = os.path.join(here, 'README.rst')
-with io.open(readme_path, encoding='utf-8') as readme_file:
- long_description = readme_file.read()
-
package_data = dict(
setuptools=['script (dev).tmpl', 'script.tmpl', 'site-patch.py'],
)
@@ -62,19 +19,12 @@ force_windows_specific_files = (
not in ("", "0", "false", "no")
)
-include_windows_files = (
- sys.platform == 'win32' or
- os.name == 'java' and os._name == 'nt' or
- force_windows_specific_files
-)
+include_windows_files = sys.platform == 'win32' or force_windows_specific_files
if include_windows_files:
package_data.setdefault('setuptools', []).extend(['*.exe'])
package_data.setdefault('setuptools.command', []).extend(['*.xml'])
-needs_wheel = set(['release', 'bdist_wheel']).intersection(sys.argv)
-wheel = ['wheel'] if needs_wheel else []
-
def pypi_link(pkg_filename):
"""
@@ -87,109 +37,51 @@ def pypi_link(pkg_filename):
return '/'.join(parts)
+class install_with_pth(install):
+ """
+ Custom install command to install a .pth file for distutils patching.
+
+ This hack is necessary because there's no standard way to install behavior
+ on startup (and it's debatable if there should be one). This hack (ab)uses
+ the `extra_path` behavior in Setuptools to install a `.pth` file with
+ implicit behavior on startup to give higher precedence to the local version
+ of `distutils` over the version from the standard library.
+
+ Please do not replicate this behavior.
+ """
+
+ _pth_name = 'distutils-precedence'
+ _pth_contents = textwrap.dedent("""
+ import os
+ var = 'SETUPTOOLS_USE_DISTUTILS'
+ enabled = os.environ.get(var, 'local') == 'local'
+ enabled and __import__('_distutils_hack').add_shim()
+ """).lstrip().replace('\n', '; ')
+
+ def initialize_options(self):
+ install.initialize_options(self)
+ self.extra_path = self._pth_name, self._pth_contents
+
+ def finalize_options(self):
+ install.finalize_options(self)
+ self._restore_install_lib()
+
+ def _restore_install_lib(self):
+ """
+ Undo secondary effect of `extra_path` adding to `install_lib`
+ """
+ suffix = os.path.relpath(self.install_lib, self.install_libbase)
+
+ if suffix.strip() == self._pth_contents.strip():
+ self.install_lib = self.install_libbase
+
+
setup_params = dict(
- name="setuptools",
- version="39.1.0",
- description=(
- "Easily download, build, install, upgrade, and uninstall "
- "Python packages"
- ),
- author="Python Packaging Authority",
- author_email="distutils-sig@python.org",
- long_description=long_description,
- long_description_content_type='text/x-rst; charset=UTF-8',
- keywords="CPAN PyPI distutils eggs package management",
- url="https://github.com/pypa/setuptools",
- project_urls={
- "Documentation": "https://setuptools.readthedocs.io/",
- },
- src_root=None,
- packages=setuptools.find_packages(exclude=['*.tests']),
+ cmdclass={'install': install_with_pth},
package_data=package_data,
- py_modules=['easy_install'],
- zip_safe=True,
- entry_points={
- "distutils.commands": [
- "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
- for cmd in read_commands()
- ],
- "distutils.setup_keywords": [
- "eager_resources = setuptools.dist:assert_string_list",
- "namespace_packages = setuptools.dist:check_nsp",
- "extras_require = setuptools.dist:check_extras",
- "install_requires = setuptools.dist:check_requirements",
- "tests_require = setuptools.dist:check_requirements",
- "setup_requires = setuptools.dist:check_requirements",
- "python_requires = setuptools.dist:check_specifier",
- "entry_points = setuptools.dist:check_entry_points",
- "test_suite = setuptools.dist:check_test_suite",
- "zip_safe = setuptools.dist:assert_bool",
- "package_data = setuptools.dist:check_package_data",
- "exclude_package_data = setuptools.dist:check_package_data",
- "include_package_data = setuptools.dist:assert_bool",
- "packages = setuptools.dist:check_packages",
- "dependency_links = setuptools.dist:assert_string_list",
- "test_loader = setuptools.dist:check_importable",
- "test_runner = setuptools.dist:check_importable",
- "use_2to3 = setuptools.dist:assert_bool",
- "convert_2to3_doctests = setuptools.dist:assert_string_list",
- "use_2to3_fixers = setuptools.dist:assert_string_list",
- "use_2to3_exclude_fixers = setuptools.dist:assert_string_list",
- ],
- "egg_info.writers": [
- "PKG-INFO = setuptools.command.egg_info:write_pkg_info",
- "requires.txt = setuptools.command.egg_info:write_requirements",
- "entry_points.txt = setuptools.command.egg_info:write_entries",
- "eager_resources.txt = setuptools.command.egg_info:overwrite_arg",
- (
- "namespace_packages.txt = "
- "setuptools.command.egg_info:overwrite_arg"
- ),
- "top_level.txt = setuptools.command.egg_info:write_toplevel_names",
- "depends.txt = setuptools.command.egg_info:warn_depends_obsolete",
- "dependency_links.txt = setuptools.command.egg_info:overwrite_arg",
- ],
- "console_scripts": list(_gen_console_scripts()),
- "setuptools.installation":
- ['eggsecutable = setuptools.command.easy_install:bootstrap'],
- },
- classifiers=textwrap.dedent("""
- Development Status :: 5 - Production/Stable
- Intended Audience :: Developers
- License :: OSI Approved :: MIT License
- Operating System :: OS Independent
- Programming Language :: Python :: 2
- Programming Language :: Python :: 2.7
- Programming Language :: Python :: 3
- Programming Language :: Python :: 3.3
- Programming Language :: Python :: 3.4
- Programming Language :: Python :: 3.5
- Programming Language :: Python :: 3.6
- Topic :: Software Development :: Libraries :: Python Modules
- Topic :: System :: Archiving :: Packaging
- Topic :: System :: Systems Administration
- Topic :: Utilities
- """).strip().splitlines(),
- python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*',
- extras_require={
- "ssl:sys_platform=='win32'": "wincertstore==0.2",
- "certs": "certifi==2016.9.26",
- },
- dependency_links=[
- pypi_link(
- 'certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d',
- ),
- pypi_link(
- 'wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2',
- ),
- ],
- scripts=[],
- setup_requires=[
- ] + wheel,
)
if __name__ == '__main__':
# allow setup.py to run from another directory
here and os.chdir(here)
- require_metadata()
dist = setuptools.setup(**setup_params)
diff --git a/setuptools.egg-info/PKG-INFO b/setuptools.egg-info/PKG-INFO
index 51e6da6..542cb26 100644
--- a/setuptools.egg-info/PKG-INFO
+++ b/setuptools.egg-info/PKG-INFO
@@ -1,65 +1,103 @@
Metadata-Version: 2.1
Name: setuptools
-Version: 39.1.0
+Version: 61.1.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages
Home-page: https://github.com/pypa/setuptools
Author: Python Packaging Authority
Author-email: distutils-sig@python.org
License: UNKNOWN
-Project-URL: Documentation, https://setuptools.readthedocs.io/
-Description: .. image:: https://img.shields.io/pypi/v/setuptools.svg
- :target: https://pypi.org/project/setuptools
-
- .. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest
- :target: https://setuptools.readthedocs.io
-
- .. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI
- :target: https://travis-ci.org/pypa/setuptools
-
- .. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
- :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master
-
- .. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
-
- See the `Installation Instructions
- <https://packaging.python.org/installing/>`_ in the Python Packaging
- User's Guide for instructions on installing, upgrading, and uninstalling
- Setuptools.
-
- The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.
-
- Questions and comments should be directed to the `distutils-sig
- mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.
- Bug reports and especially tested patches may be
- submitted directly to the `bug tracker
- <https://github.com/pypa/setuptools/issues>`_.
-
-
- Code of Conduct
- ---------------
-
- Everyone interacting in the setuptools project's codebases, issue trackers,
- chat rooms, and mailing lists is expected to follow the
- `PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
-
+Project-URL: Documentation, https://setuptools.pypa.io/
Keywords: CPAN PyPI distutils eggs package management
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: System :: Archiving :: Packaging
Classifier: Topic :: System :: Systems Administration
Classifier: Topic :: Utilities
-Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*
-Description-Content-Type: text/x-rst; charset=UTF-8
+Requires-Python: >=3.7
+Provides-Extra: testing
+Provides-Extra: testing-integration
+Provides-Extra: docs
Provides-Extra: ssl
Provides-Extra: certs
+License-File: LICENSE
+
+.. image:: https://raw.githubusercontent.com/pypa/setuptools/main/docs/images/banner-640x320.svg
+ :align: center
+
+|
+
+.. image:: https://img.shields.io/pypi/v/setuptools.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/setuptools
+
+.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg
+ :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+ :target: https://setuptools.pypa.io
+
+.. image:: https://img.shields.io/badge/skeleton-2022-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+ :target: https://codecov.io/gh/pypa/setuptools
+
+.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+ :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
+
+.. image:: https://img.shields.io/discord/803025117553754132
+ :target: https://discord.com/channels/803025117553754132/815945031150993468
+ :alt: Discord
+
+See the `Installation Instructions
+<https://packaging.python.org/installing/>`_ in the Python Packaging
+User's Guide for instructions on installing, upgrading, and uninstalling
+Setuptools.
+
+Questions and comments should be directed to `GitHub Discussions
+<https://github.com/pypa/setuptools/discussions>`_.
+Bug reports and especially tested patches may be
+submitted directly to the `bug tracker
+<https://github.com/pypa/setuptools/issues>`_.
+
+
+Code of Conduct
+===============
+
+Everyone interacting in the setuptools project's codebases, issue trackers,
+chat rooms, and fora is expected to follow the
+`PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
+
+
+Security Contact
+================
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+
diff --git a/setuptools.egg-info/SOURCES.txt b/setuptools.egg-info/SOURCES.txt
index 93f588d..be8d77b 100644
--- a/setuptools.egg-info/SOURCES.txt
+++ b/setuptools.egg-info/SOURCES.txt
@@ -2,51 +2,130 @@ CHANGES.rst
LICENSE
MANIFEST.in
README.rst
-bootstrap.py
conftest.py
-easy_install.py
+exercises.py
launcher.c
-msvc-build-launcher.cmd
-pavement.py
+pyproject.toml
pytest.ini
setup.cfg
setup.py
tox.ini
-docs/Makefile
+_distutils_hack/__init__.py
+_distutils_hack/override.py
+changelog.d/.gitignore
+changelog.d/README.rst
+docs/artwork.rst
+docs/build_meta.rst
docs/conf.py
-docs/developer-guide.txt
-docs/development.txt
-docs/easy_install.txt
-docs/formats.txt
-docs/history.txt
-docs/index.txt
-docs/pkg_resources.txt
-docs/python3.txt
-docs/releases.txt
-docs/requirements.txt
-docs/roadmap.txt
-docs/setuptools.txt
-docs/_templates/indexsidebar.html
-docs/_theme/nature/theme.conf
-docs/_theme/nature/static/nature.css_t
-docs/_theme/nature/static/pygments.css
+docs/history.rst
+docs/index.rst
+docs/pkg_resources.rst
+docs/python 2 sunset.rst
+docs/roadmap.rst
+docs/setuptools.rst
+docs/deprecated/distutils-legacy.rst
+docs/deprecated/easy_install.rst
+docs/deprecated/functionalities.rst
+docs/deprecated/index.rst
+docs/deprecated/python_eggs.rst
+docs/deprecated/distutils/_setuptools_disclaimer.rst
+docs/deprecated/distutils/apiref.rst
+docs/deprecated/distutils/builtdist.rst
+docs/deprecated/distutils/commandref.rst
+docs/deprecated/distutils/configfile.rst
+docs/deprecated/distutils/examples.rst
+docs/deprecated/distutils/extending.rst
+docs/deprecated/distutils/index.rst
+docs/deprecated/distutils/introduction.rst
+docs/deprecated/distutils/packageindex.rst
+docs/deprecated/distutils/setupscript.rst
+docs/deprecated/distutils/sourcedist.rst
+docs/deprecated/distutils/uploading.rst
+docs/development/developer-guide.rst
+docs/development/index.rst
+docs/development/releases.rst
+docs/references/keywords.rst
+docs/userguide/commands.rst
+docs/userguide/datafiles.rst
+docs/userguide/declarative_config.rst
+docs/userguide/dependency_management.rst
+docs/userguide/development_mode.rst
+docs/userguide/distribution.rst
+docs/userguide/entry_point.rst
+docs/userguide/extension.rst
+docs/userguide/functionalities_rewrite.rst
+docs/userguide/index.rst
+docs/userguide/keywords.rst
+docs/userguide/miscellaneous.rst
+docs/userguide/package_discovery.rst
+docs/userguide/pyproject_config.rst
+docs/userguide/quickstart.rst
pkg_resources/__init__.py
pkg_resources/api_tests.txt
-pkg_resources/py31compat.py
pkg_resources/_vendor/__init__.py
pkg_resources/_vendor/appdirs.py
pkg_resources/_vendor/pyparsing.py
-pkg_resources/_vendor/six.py
pkg_resources/_vendor/vendored.txt
+pkg_resources/_vendor/zipp.py
+pkg_resources/_vendor/appdirs-1.4.3.dist-info/top_level.txt
+pkg_resources/_vendor/importlib_resources/__init__.py
+pkg_resources/_vendor/importlib_resources/_adapters.py
+pkg_resources/_vendor/importlib_resources/_common.py
+pkg_resources/_vendor/importlib_resources/_compat.py
+pkg_resources/_vendor/importlib_resources/_itertools.py
+pkg_resources/_vendor/importlib_resources/_legacy.py
+pkg_resources/_vendor/importlib_resources/abc.py
+pkg_resources/_vendor/importlib_resources/readers.py
+pkg_resources/_vendor/importlib_resources/simple.py
+pkg_resources/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
+pkg_resources/_vendor/importlib_resources/tests/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/_compat.py
+pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
+pkg_resources/_vendor/importlib_resources/tests/test_contents.py
+pkg_resources/_vendor/importlib_resources/tests/test_files.py
+pkg_resources/_vendor/importlib_resources/tests/test_open.py
+pkg_resources/_vendor/importlib_resources/tests/test_path.py
+pkg_resources/_vendor/importlib_resources/tests/test_read.py
+pkg_resources/_vendor/importlib_resources/tests/test_reader.py
+pkg_resources/_vendor/importlib_resources/tests/test_resource.py
+pkg_resources/_vendor/importlib_resources/tests/update-zips.py
+pkg_resources/_vendor/importlib_resources/tests/util.py
+pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
+pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
+pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py
+pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py
+pkg_resources/_vendor/jaraco/__init__.py
+pkg_resources/_vendor/jaraco/context.py
+pkg_resources/_vendor/jaraco/functools.py
+pkg_resources/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
+pkg_resources/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
+pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
+pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt
+pkg_resources/_vendor/jaraco/text/__init__.py
+pkg_resources/_vendor/more_itertools/__init__.py
+pkg_resources/_vendor/more_itertools/more.py
+pkg_resources/_vendor/more_itertools/recipes.py
+pkg_resources/_vendor/more_itertools-8.12.0.dist-info/top_level.txt
pkg_resources/_vendor/packaging/__about__.py
pkg_resources/_vendor/packaging/__init__.py
-pkg_resources/_vendor/packaging/_compat.py
+pkg_resources/_vendor/packaging/_manylinux.py
+pkg_resources/_vendor/packaging/_musllinux.py
pkg_resources/_vendor/packaging/_structures.py
pkg_resources/_vendor/packaging/markers.py
pkg_resources/_vendor/packaging/requirements.py
pkg_resources/_vendor/packaging/specifiers.py
+pkg_resources/_vendor/packaging/tags.py
pkg_resources/_vendor/packaging/utils.py
pkg_resources/_vendor/packaging/version.py
+pkg_resources/_vendor/packaging-21.3.dist-info/top_level.txt
+pkg_resources/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
+pkg_resources/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
+pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt
pkg_resources/extern/__init__.py
pkg_resources/tests/__init__.py
pkg_resources/tests/test_find_distributions.py
@@ -54,38 +133,51 @@ pkg_resources/tests/test_markers.py
pkg_resources/tests/test_pkg_resources.py
pkg_resources/tests/test_resources.py
pkg_resources/tests/test_working_set.py
+pkg_resources/tests/data/my-test-package-source/setup.cfg
+pkg_resources/tests/data/my-test-package-source/setup.py
+pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
+pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
+pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
setuptools/__init__.py
+setuptools/_deprecation_warning.py
+setuptools/_entry_points.py
+setuptools/_imp.py
+setuptools/_importlib.py
+setuptools/_itertools.py
+setuptools/_path.py
+setuptools/_reqs.py
setuptools/archive_util.py
setuptools/build_meta.py
setuptools/cli-32.exe
setuptools/cli-64.exe
+setuptools/cli-arm64.exe
setuptools/cli.exe
-setuptools/config.py
setuptools/dep_util.py
setuptools/depends.py
+setuptools/discovery.py
setuptools/dist.py
+setuptools/errors.py
setuptools/extension.py
-setuptools/glibc.py
setuptools/glob.py
setuptools/gui-32.exe
setuptools/gui-64.exe
+setuptools/gui-arm64.exe
setuptools/gui.exe
+setuptools/installer.py
setuptools/launch.py
-setuptools/lib2to3_ex.py
+setuptools/logging.py
setuptools/monkey.py
setuptools/msvc.py
setuptools/namespaces.py
setuptools/package_index.py
-setuptools/pep425tags.py
-setuptools/py27compat.py
-setuptools/py31compat.py
-setuptools/py33compat.py
-setuptools/py36compat.py
+setuptools/py34compat.py
setuptools/sandbox.py
setuptools/script (dev).tmpl
setuptools/script.tmpl
-setuptools/site-patch.py
-setuptools/ssl_support.py
setuptools/unicode_utils.py
setuptools/version.py
setuptools/wheel.py
@@ -96,33 +188,209 @@ setuptools.egg-info/dependency_links.txt
setuptools.egg-info/entry_points.txt
setuptools.egg-info/requires.txt
setuptools.egg-info/top_level.txt
-setuptools.egg-info/zip-safe
+setuptools/_distutils/__init__.py
+setuptools/_distutils/_collections.py
+setuptools/_distutils/_macos_compat.py
+setuptools/_distutils/_msvccompiler.py
+setuptools/_distutils/archive_util.py
+setuptools/_distutils/bcppcompiler.py
+setuptools/_distutils/ccompiler.py
+setuptools/_distutils/cmd.py
+setuptools/_distutils/config.py
+setuptools/_distutils/core.py
+setuptools/_distutils/cygwinccompiler.py
+setuptools/_distutils/debug.py
+setuptools/_distutils/dep_util.py
+setuptools/_distutils/dir_util.py
+setuptools/_distutils/dist.py
+setuptools/_distutils/errors.py
+setuptools/_distutils/extension.py
+setuptools/_distutils/fancy_getopt.py
+setuptools/_distutils/file_util.py
+setuptools/_distutils/filelist.py
+setuptools/_distutils/log.py
+setuptools/_distutils/msvc9compiler.py
+setuptools/_distutils/msvccompiler.py
+setuptools/_distutils/py35compat.py
+setuptools/_distutils/py38compat.py
+setuptools/_distutils/spawn.py
+setuptools/_distutils/sysconfig.py
+setuptools/_distutils/text_file.py
+setuptools/_distutils/unixccompiler.py
+setuptools/_distutils/util.py
+setuptools/_distutils/version.py
+setuptools/_distutils/versionpredicate.py
+setuptools/_distutils/command/__init__.py
+setuptools/_distutils/command/bdist.py
+setuptools/_distutils/command/bdist_dumb.py
+setuptools/_distutils/command/bdist_msi.py
+setuptools/_distutils/command/bdist_rpm.py
+setuptools/_distutils/command/bdist_wininst.py
+setuptools/_distutils/command/build.py
+setuptools/_distutils/command/build_clib.py
+setuptools/_distutils/command/build_ext.py
+setuptools/_distutils/command/build_py.py
+setuptools/_distutils/command/build_scripts.py
+setuptools/_distutils/command/check.py
+setuptools/_distutils/command/clean.py
+setuptools/_distutils/command/config.py
+setuptools/_distutils/command/install.py
+setuptools/_distutils/command/install_data.py
+setuptools/_distutils/command/install_egg_info.py
+setuptools/_distutils/command/install_headers.py
+setuptools/_distutils/command/install_lib.py
+setuptools/_distutils/command/install_scripts.py
+setuptools/_distutils/command/py37compat.py
+setuptools/_distutils/command/register.py
+setuptools/_distutils/command/sdist.py
+setuptools/_distutils/command/upload.py
+setuptools/_distutils/command/wininst-10.0-amd64.exe
+setuptools/_distutils/command/wininst-10.0.exe
+setuptools/_distutils/command/wininst-14.0-amd64.exe
+setuptools/_distutils/command/wininst-14.0.exe
+setuptools/_distutils/command/wininst-6.0.exe
+setuptools/_distutils/command/wininst-7.1.exe
+setuptools/_distutils/command/wininst-8.0.exe
+setuptools/_distutils/command/wininst-9.0-amd64.exe
+setuptools/_distutils/command/wininst-9.0.exe
+setuptools/_distutils/tests/__init__.py
+setuptools/_distutils/tests/py35compat.py
+setuptools/_distutils/tests/py38compat.py
+setuptools/_distutils/tests/support.py
+setuptools/_distutils/tests/test_archive_util.py
+setuptools/_distutils/tests/test_bdist.py
+setuptools/_distutils/tests/test_bdist_dumb.py
+setuptools/_distutils/tests/test_bdist_msi.py
+setuptools/_distutils/tests/test_bdist_rpm.py
+setuptools/_distutils/tests/test_bdist_wininst.py
+setuptools/_distutils/tests/test_build.py
+setuptools/_distutils/tests/test_build_clib.py
+setuptools/_distutils/tests/test_build_ext.py
+setuptools/_distutils/tests/test_build_py.py
+setuptools/_distutils/tests/test_build_scripts.py
+setuptools/_distutils/tests/test_check.py
+setuptools/_distutils/tests/test_clean.py
+setuptools/_distutils/tests/test_cmd.py
+setuptools/_distutils/tests/test_config.py
+setuptools/_distutils/tests/test_config_cmd.py
+setuptools/_distutils/tests/test_core.py
+setuptools/_distutils/tests/test_cygwinccompiler.py
+setuptools/_distutils/tests/test_dep_util.py
+setuptools/_distutils/tests/test_dir_util.py
+setuptools/_distutils/tests/test_dist.py
+setuptools/_distutils/tests/test_extension.py
+setuptools/_distutils/tests/test_file_util.py
+setuptools/_distutils/tests/test_filelist.py
+setuptools/_distutils/tests/test_install.py
+setuptools/_distutils/tests/test_install_data.py
+setuptools/_distutils/tests/test_install_headers.py
+setuptools/_distutils/tests/test_install_lib.py
+setuptools/_distutils/tests/test_install_scripts.py
+setuptools/_distutils/tests/test_log.py
+setuptools/_distutils/tests/test_msvc9compiler.py
+setuptools/_distutils/tests/test_msvccompiler.py
+setuptools/_distutils/tests/test_register.py
+setuptools/_distutils/tests/test_sdist.py
+setuptools/_distutils/tests/test_spawn.py
+setuptools/_distutils/tests/test_sysconfig.py
+setuptools/_distutils/tests/test_text_file.py
+setuptools/_distutils/tests/test_unixccompiler.py
+setuptools/_distutils/tests/test_upload.py
+setuptools/_distutils/tests/test_util.py
+setuptools/_distutils/tests/test_version.py
+setuptools/_distutils/tests/test_versionpredicate.py
+setuptools/_distutils/tests/unix_compat.py
setuptools/_vendor/__init__.py
+setuptools/_vendor/ordered_set.py
setuptools/_vendor/pyparsing.py
-setuptools/_vendor/six.py
+setuptools/_vendor/typing_extensions.py
setuptools/_vendor/vendored.txt
-setuptools/_vendor/__pycache__/__init__.cpython-36.pyc
-setuptools/_vendor/__pycache__/six.cpython-36.pyc
+setuptools/_vendor/zipp.py
+setuptools/_vendor/_validate_pyproject/__init__.py
+setuptools/_vendor/_validate_pyproject/error_reporting.py
+setuptools/_vendor/_validate_pyproject/extra_validations.py
+setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py
+setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py
+setuptools/_vendor/_validate_pyproject/formats.py
+setuptools/_vendor/importlib_metadata/__init__.py
+setuptools/_vendor/importlib_metadata/_adapters.py
+setuptools/_vendor/importlib_metadata/_collections.py
+setuptools/_vendor/importlib_metadata/_compat.py
+setuptools/_vendor/importlib_metadata/_functools.py
+setuptools/_vendor/importlib_metadata/_itertools.py
+setuptools/_vendor/importlib_metadata/_meta.py
+setuptools/_vendor/importlib_metadata/_text.py
+setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt
+setuptools/_vendor/importlib_resources/__init__.py
+setuptools/_vendor/importlib_resources/_adapters.py
+setuptools/_vendor/importlib_resources/_common.py
+setuptools/_vendor/importlib_resources/_compat.py
+setuptools/_vendor/importlib_resources/_itertools.py
+setuptools/_vendor/importlib_resources/_legacy.py
+setuptools/_vendor/importlib_resources/abc.py
+setuptools/_vendor/importlib_resources/readers.py
+setuptools/_vendor/importlib_resources/simple.py
+setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
+setuptools/_vendor/importlib_resources/tests/__init__.py
+setuptools/_vendor/importlib_resources/tests/_compat.py
+setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
+setuptools/_vendor/importlib_resources/tests/test_contents.py
+setuptools/_vendor/importlib_resources/tests/test_files.py
+setuptools/_vendor/importlib_resources/tests/test_open.py
+setuptools/_vendor/importlib_resources/tests/test_path.py
+setuptools/_vendor/importlib_resources/tests/test_read.py
+setuptools/_vendor/importlib_resources/tests/test_reader.py
+setuptools/_vendor/importlib_resources/tests/test_resource.py
+setuptools/_vendor/importlib_resources/tests/update-zips.py
+setuptools/_vendor/importlib_resources/tests/util.py
+setuptools/_vendor/importlib_resources/tests/data01/__init__.py
+setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
+setuptools/_vendor/importlib_resources/tests/data02/__init__.py
+setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py
+setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
+setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py
+setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
+setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py
+setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py
+setuptools/_vendor/jaraco/__init__.py
+setuptools/_vendor/jaraco/context.py
+setuptools/_vendor/jaraco/functools.py
+setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
+setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
+setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
+setuptools/_vendor/jaraco/text/Lorem ipsum.txt
+setuptools/_vendor/jaraco/text/__init__.py
+setuptools/_vendor/more_itertools/__init__.py
+setuptools/_vendor/more_itertools/more.py
+setuptools/_vendor/more_itertools/recipes.py
+setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
+setuptools/_vendor/nspektr/__init__.py
+setuptools/_vendor/nspektr/_compat.py
+setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt
+setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
setuptools/_vendor/packaging/__about__.py
setuptools/_vendor/packaging/__init__.py
-setuptools/_vendor/packaging/_compat.py
+setuptools/_vendor/packaging/_manylinux.py
+setuptools/_vendor/packaging/_musllinux.py
setuptools/_vendor/packaging/_structures.py
setuptools/_vendor/packaging/markers.py
setuptools/_vendor/packaging/requirements.py
setuptools/_vendor/packaging/specifiers.py
+setuptools/_vendor/packaging/tags.py
setuptools/_vendor/packaging/utils.py
setuptools/_vendor/packaging/version.py
-setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc
-setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc
-setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc
-setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc
-setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc
-setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc
+setuptools/_vendor/packaging-21.3.dist-info/top_level.txt
+setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
+setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
+setuptools/_vendor/tomli/__init__.py
+setuptools/_vendor/tomli/_parser.py
+setuptools/_vendor/tomli/_re.py
+setuptools/_vendor/tomli/_types.py
+setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt
setuptools/command/__init__.py
setuptools/command/alias.py
setuptools/command/bdist_egg.py
setuptools/command/bdist_rpm.py
-setuptools/command/bdist_wininst.py
setuptools/command/build_clib.py
setuptools/command/build_ext.py
setuptools/command/build_py.py
@@ -144,49 +412,75 @@ setuptools/command/setopt.py
setuptools/command/test.py
setuptools/command/upload.py
setuptools/command/upload_docs.py
+setuptools/config/__init__.py
+setuptools/config/_apply_pyprojecttoml.py
+setuptools/config/expand.py
+setuptools/config/pyprojecttoml.py
+setuptools/config/setupcfg.py
setuptools/extern/__init__.py
setuptools/tests/__init__.py
setuptools/tests/contexts.py
setuptools/tests/environment.py
-setuptools/tests/files.py
setuptools/tests/fixtures.py
setuptools/tests/mod_with_constant.py
setuptools/tests/namespaces.py
setuptools/tests/script-with-bom.py
setuptools/tests/server.py
setuptools/tests/test_archive_util.py
+setuptools/tests/test_bdist_deprecations.py
setuptools/tests/test_bdist_egg.py
setuptools/tests/test_build_clib.py
setuptools/tests/test_build_ext.py
setuptools/tests/test_build_meta.py
setuptools/tests/test_build_py.py
-setuptools/tests/test_config.py
+setuptools/tests/test_config_discovery.py
setuptools/tests/test_dep_util.py
setuptools/tests/test_depends.py
setuptools/tests/test_develop.py
setuptools/tests/test_dist.py
setuptools/tests/test_dist_info.py
+setuptools/tests/test_distutils_adoption.py
setuptools/tests/test_easy_install.py
+setuptools/tests/test_editable_install.py
setuptools/tests/test_egg_info.py
+setuptools/tests/test_extern.py
setuptools/tests/test_find_packages.py
+setuptools/tests/test_find_py_modules.py
+setuptools/tests/test_glob.py
setuptools/tests/test_install_scripts.py
setuptools/tests/test_integration.py
+setuptools/tests/test_logging.py
setuptools/tests/test_manifest.py
setuptools/tests/test_msvc.py
+setuptools/tests/test_msvc14.py
setuptools/tests/test_namespaces.py
setuptools/tests/test_packageindex.py
+setuptools/tests/test_register.py
setuptools/tests/test_sandbox.py
setuptools/tests/test_sdist.py
+setuptools/tests/test_setopt.py
setuptools/tests/test_setuptools.py
setuptools/tests/test_test.py
setuptools/tests/test_unicode_utils.py
-setuptools/tests/test_upload_docs.py
+setuptools/tests/test_upload.py
setuptools/tests/test_virtualenv.py
setuptools/tests/test_wheel.py
setuptools/tests/test_windows_wrappers.py
setuptools/tests/text.py
setuptools/tests/textwrap.py
+setuptools/tests/config/__init__.py
+setuptools/tests/config/test_apply_pyprojecttoml.py
+setuptools/tests/config/test_expand.py
+setuptools/tests/config/test_pyprojecttoml.py
+setuptools/tests/config/test_setupcfg.py
setuptools/tests/indexes/test_links_priority/external.html
setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
-tests/manual_test.py
-tests/test_pypi.py \ No newline at end of file
+setuptools/tests/integration/__init__.py
+setuptools/tests/integration/helpers.py
+setuptools/tests/integration/test_pip_install_sdist.py
+tools/finalize.py
+tools/msvc-build-launcher-arm64.cmd
+tools/msvc-build-launcher.cmd
+tools/ppc64le-patch.py
+tools/towncrier_template.rst
+tools/vendored.py \ No newline at end of file
diff --git a/setuptools.egg-info/dependency_links.txt b/setuptools.egg-info/dependency_links.txt
index e87d021..8b13789 100644
--- a/setuptools.egg-info/dependency_links.txt
+++ b/setuptools.egg-info/dependency_links.txt
@@ -1,2 +1 @@
-https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d
-https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2
+
diff --git a/setuptools.egg-info/entry_points.txt b/setuptools.egg-info/entry_points.txt
index 4159fd0..18041f6 100644
--- a/setuptools.egg-info/entry_points.txt
+++ b/setuptools.egg-info/entry_points.txt
@@ -1,12 +1,7 @@
-[console_scripts]
-easy_install = setuptools.command.easy_install:main
-easy_install-3.6 = setuptools.command.easy_install:main
-
[distutils.commands]
alias = setuptools.command.alias:alias
bdist_egg = setuptools.command.bdist_egg:bdist_egg
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
-bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
build_clib = setuptools.command.build_clib:build_clib
build_ext = setuptools.command.build_ext:build_ext
build_py = setuptools.command.build_py:build_py
@@ -18,17 +13,14 @@ install = setuptools.command.install:install
install_egg_info = setuptools.command.install_egg_info:install_egg_info
install_lib = setuptools.command.install_lib:install_lib
install_scripts = setuptools.command.install_scripts:install_scripts
-register = setuptools.command.register:register
rotate = setuptools.command.rotate:rotate
saveopts = setuptools.command.saveopts:saveopts
sdist = setuptools.command.sdist:sdist
setopt = setuptools.command.setopt:setopt
test = setuptools.command.test:test
-upload = setuptools.command.upload:upload
upload_docs = setuptools.command.upload_docs:upload_docs
[distutils.setup_keywords]
-convert_2to3_doctests = setuptools.dist:assert_string_list
dependency_links = setuptools.dist:assert_string_list
eager_resources = setuptools.dist:assert_string_list
entry_points = setuptools.dist:check_entry_points
@@ -45,9 +37,7 @@ test_loader = setuptools.dist:check_importable
test_runner = setuptools.dist:check_importable
test_suite = setuptools.dist:check_test_suite
tests_require = setuptools.dist:check_requirements
-use_2to3 = setuptools.dist:assert_bool
-use_2to3_exclude_fixers = setuptools.dist:assert_string_list
-use_2to3_fixers = setuptools.dist:assert_string_list
+use_2to3 = setuptools.dist:invalid_unless_false
zip_safe = setuptools.dist:assert_bool
[egg_info.writers]
@@ -60,6 +50,6 @@ namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
requires.txt = setuptools.command.egg_info:write_requirements
top_level.txt = setuptools.command.egg_info:write_toplevel_names
-[setuptools.installation]
-eggsecutable = setuptools.command.easy_install:bootstrap
-
+[setuptools.finalize_distribution_options]
+keywords = setuptools.dist:Distribution._finalize_setup_keywords
+parent_finalize = setuptools.dist:_Distribution.finalize_options
diff --git a/setuptools.egg-info/requires.txt b/setuptools.egg-info/requires.txt
index c1529e4..061c646 100644
--- a/setuptools.egg-info/requires.txt
+++ b/setuptools.egg-info/requires.txt
@@ -1,6 +1,52 @@
[certs]
-certifi==2016.9.26
-[ssl:sys_platform=='win32']
-wincertstore==0.2
+[docs]
+sphinx
+jaraco.packaging>=9
+rst.linker>=1.9
+jaraco.tidelift>=1.4
+pygments-github-lexers==0.0.5
+sphinx-favicon
+sphinx-inline-tabs
+sphinxcontrib-towncrier
+furo
+
+[ssl]
+
+[testing]
+pytest>=6
+pytest-checkdocs>=2.4
+pytest-flake8
+pytest-enabler>=1.0.1
+pytest-perf
+mock
+flake8-2020
+virtualenv>=13.0.0
+wheel
+pip>=19.1
+jaraco.envs>=2.2
+pytest-xdist
+jaraco.path>=3.2.0
+build[virtualenv]
+filelock>=3.4.0
+pip_run>=8.8
+ini2toml[lite]>=0.9
+tomli-w>=1.0.0
+
+[testing-integration]
+pytest
+pytest-xdist
+pytest-enabler
+virtualenv>=13.0.0
+tomli
+wheel
+jaraco.path>=3.2.0
+jaraco.envs>=2.2
+build[virtualenv]
+filelock>=3.4.0
+
+[testing:platform_python_implementation != "PyPy"]
+pytest-black>=0.3.7
+pytest-cov
+pytest-mypy>=0.9.1
diff --git a/setuptools.egg-info/top_level.txt b/setuptools.egg-info/top_level.txt
index 4577c6a..b5ac107 100644
--- a/setuptools.egg-info/top_level.txt
+++ b/setuptools.egg-info/top_level.txt
@@ -1,3 +1,3 @@
-easy_install
+_distutils_hack
pkg_resources
setuptools
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 7da47fb..502d2a2 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -1,122 +1,71 @@
"""Extensions to the 'distutils' for large or complex distributions"""
-import os
import functools
+import os
+import re
+import warnings
+
+import _distutils_hack.override # noqa: F401
+
import distutils.core
-import distutils.filelist
-from distutils.util import convert_path
-from fnmatch import fnmatchcase
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path as _convert_path
-from setuptools.extern.six.moves import filter, map
+from ._deprecation_warning import SetuptoolsDeprecationWarning
import setuptools.version
from setuptools.extension import Extension
-from setuptools.dist import Distribution, Feature
+from setuptools.dist import Distribution
from setuptools.depends import Require
+from setuptools.discovery import PackageFinder, PEP420PackageFinder
from . import monkey
+from . import logging
+
__all__ = [
- 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+ 'setup',
+ 'Distribution',
+ 'Command',
+ 'Extension',
+ 'Require',
+ 'SetuptoolsDeprecationWarning',
'find_packages',
+ 'find_namespace_packages',
]
__version__ = setuptools.version.__version__
bootstrap_install_from = None
-# If we run 2to3 on .py files, should we also convert docstrings?
-# Default: yes; assume that we can detect doctests reliably
-run_2to3_on_doctests = True
-# Standard package names for fixer packages
-lib2to3_fixer_packages = ['lib2to3.fixes']
-
-
-class PackageFinder(object):
- """
- Generate a list of all Python packages found within a directory
- """
- @classmethod
- def find(cls, where='.', exclude=(), include=('*',)):
- """Return a list all Python packages found within directory 'where'
-
- 'where' is the root directory which will be searched for packages. It
- should be supplied as a "cross-platform" (i.e. URL-style) path; it will
- be converted to the appropriate local path syntax.
-
- 'exclude' is a sequence of package names to exclude; '*' can be used
- as a wildcard in the names, such that 'foo.*' will exclude all
- subpackages of 'foo' (but not 'foo' itself).
-
- 'include' is a sequence of package names to include. If it's
- specified, only the named packages will be included. If it's not
- specified, all found packages will be included. 'include' can contain
- shell style wildcard patterns just like 'exclude'.
- """
+find_packages = PackageFinder.find
+find_namespace_packages = PEP420PackageFinder.find
- return list(cls._find_packages_iter(
- convert_path(where),
- cls._build_filter('ez_setup', '*__pycache__', *exclude),
- cls._build_filter(*include)))
- @classmethod
- def _find_packages_iter(cls, where, exclude, include):
- """
- All the packages found in 'where' that pass the 'include' filter, but
- not the 'exclude' filter.
- """
- for root, dirs, files in os.walk(where, followlinks=True):
- # Copy dirs to iterate over it, then empty dirs.
- all_dirs = dirs[:]
- dirs[:] = []
-
- for dir in all_dirs:
- full_path = os.path.join(root, dir)
- rel_path = os.path.relpath(full_path, where)
- package = rel_path.replace(os.path.sep, '.')
-
- # Skip directory trees that are not valid packages
- if ('.' in dir or not cls._looks_like_package(full_path)):
- continue
-
- # Should this package be included?
- if include(package) and not exclude(package):
- yield package
-
- # Keep searching subdirectories, as there may be more packages
- # down there, even if the parent was excluded.
- dirs.append(dir)
-
- @staticmethod
- def _looks_like_package(path):
- """Does a directory look like a package?"""
- return os.path.isfile(os.path.join(path, '__init__.py'))
-
- @staticmethod
- def _build_filter(*patterns):
+def _install_setup_requires(attrs):
+ # Note: do not use `setuptools.Distribution` directly, as
+ # our PEP 517 backend patch `distutils.core.Distribution`.
+ class MinimalDistribution(distutils.core.Distribution):
"""
- Given a list of patterns, return a callable that will be true only if
- the input matches at least one of the patterns.
+ A minimal version of a distribution for supporting the
+ fetch_build_eggs interface.
"""
- return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
-
-class PEP420PackageFinder(PackageFinder):
- @staticmethod
- def _looks_like_package(path):
- return True
+ def __init__(self, attrs):
+ _incl = 'dependency_links', 'setup_requires'
+ filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
+ super().__init__(filtered)
+ # Prevent accidentally triggering discovery with incomplete set of attrs
+ self.set_defaults._disable()
+ def finalize_options(self):
+ """
+ Disable finalize_options to avoid building the working set.
+ Ref #2158.
+ """
-find_packages = PackageFinder.find
-
+ dist = MinimalDistribution(attrs)
-def _install_setup_requires(attrs):
- # Note: do not use `setuptools.Distribution` directly, as
- # our PEP 517 backend patch `distutils.core.Distribution`.
- dist = distutils.core.Distribution(dict(
- (k, v) for k, v in attrs.items()
- if k in ('dependency_links', 'setup_requires')
- ))
# Honor setup.cfg's options.
dist.parse_config_files(ignore_option_errors=True)
if dist.setup_requires:
@@ -125,9 +74,11 @@ def _install_setup_requires(attrs):
def setup(**attrs):
# Make sure we have any requirements needed to interpret 'attrs'.
+ logging.configure()
_install_setup_requires(attrs)
return distutils.core.setup(**attrs)
+
setup.__doc__ = distutils.core.setup.__doc__
@@ -144,9 +95,41 @@ class Command(_Command):
Construct the command for dist, updating
vars(self) with any keyword parameters.
"""
- _Command.__init__(self, dist)
+ super().__init__(dist)
vars(self).update(kw)
+ def _ensure_stringlike(self, option, what, default=None):
+ val = getattr(self, option)
+ if val is None:
+ setattr(self, option, default)
+ return default
+ elif not isinstance(val, str):
+ raise DistutilsOptionError(
+ "'%s' must be a %s (got `%s`)" % (option, what, val)
+ )
+ return val
+
+ def ensure_string_list(self, option):
+ r"""Ensure that 'option' is a list of strings. If 'option' is
+ currently a string, we split it either on /,\s*/ or /\s+/, so
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
+ ["foo", "bar", "baz"].
+ """
+ val = getattr(self, option)
+ if val is None:
+ return
+ elif isinstance(val, str):
+ setattr(self, option, re.split(r',\s*|\s+', val))
+ else:
+ if isinstance(val, list):
+ ok = all(isinstance(v, str) for v in val)
+ else:
+ ok = False
+ if not ok:
+ raise DistutilsOptionError(
+ "'%s' must be a list of strings (got %r)" % (option, val)
+ )
+
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
vars(cmd).update(kw)
@@ -177,4 +160,22 @@ def findall(dir=os.curdir):
return list(files)
+@functools.wraps(_convert_path)
+def convert_path(pathname):
+ from inspect import cleandoc
+
+ msg = """
+ The function `convert_path` is considered internal and not part of the public API.
+ Its direct usage by 3rd-party packages is considered deprecated and the function
+ may be removed in the future.
+ """
+ warnings.warn(cleandoc(msg), SetuptoolsDeprecationWarning)
+ return _convert_path(pathname)
+
+
+class sic(str):
+ """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
+
+
+# Apply monkey patches
monkey.patch_all()
diff --git a/setuptools/_deprecation_warning.py b/setuptools/_deprecation_warning.py
new file mode 100644
index 0000000..086b64d
--- /dev/null
+++ b/setuptools/_deprecation_warning.py
@@ -0,0 +1,7 @@
+class SetuptoolsDeprecationWarning(Warning):
+ """
+ Base class for warning deprecations in ``setuptools``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
diff --git a/setuptools/_distutils/__init__.py b/setuptools/_distutils/__init__.py
new file mode 100644
index 0000000..8fd493b
--- /dev/null
+++ b/setuptools/_distutils/__init__.py
@@ -0,0 +1,24 @@
+"""distutils
+
+The main package for the Python Module Distribution Utilities. Normally
+used from a setup script as
+
+ from distutils.core import setup
+
+ setup (...)
+"""
+
+import sys
+import importlib
+
+__version__ = sys.version[:sys.version.index(' ')]
+
+
+try:
+ # Allow Debian and pkgsrc (only) to customize system
+ # behavior. Ref pypa/distutils#2 and pypa/distutils#16.
+ # This hook is deprecated and no other environments
+ # should use it.
+ importlib.import_module('_distutils_system_mod')
+except ImportError:
+ pass
diff --git a/setuptools/_distutils/_collections.py b/setuptools/_distutils/_collections.py
new file mode 100644
index 0000000..98fce80
--- /dev/null
+++ b/setuptools/_distutils/_collections.py
@@ -0,0 +1,56 @@
+import collections
+import itertools
+
+
+# from jaraco.collections 3.5.1
+class DictStack(list, collections.abc.Mapping):
+ """
+ A stack of dictionaries that behaves as a view on those dictionaries,
+ giving preference to the last.
+
+ >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
+ >>> stack['a']
+ 2
+ >>> stack['b']
+ 2
+ >>> stack['c']
+ 2
+ >>> len(stack)
+ 3
+ >>> stack.push(dict(a=3))
+ >>> stack['a']
+ 3
+ >>> set(stack.keys()) == set(['a', 'b', 'c'])
+ True
+ >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)])
+ True
+ >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2)
+ True
+ >>> d = stack.pop()
+ >>> stack['a']
+ 2
+ >>> d = stack.pop()
+ >>> stack['a']
+ 1
+ >>> stack.get('b', None)
+ >>> 'c' in stack
+ True
+ """
+
+ def __iter__(self):
+ dicts = list.__iter__(self)
+ return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
+
+ def __getitem__(self, key):
+ for scope in reversed(tuple(list.__iter__(self))):
+ if key in scope:
+ return scope[key]
+ raise KeyError(key)
+
+ push = list.append
+
+ def __contains__(self, other):
+ return collections.abc.Mapping.__contains__(self, other)
+
+ def __len__(self):
+ return len(list(iter(self)))
diff --git a/setuptools/_distutils/_macos_compat.py b/setuptools/_distutils/_macos_compat.py
new file mode 100644
index 0000000..17769e9
--- /dev/null
+++ b/setuptools/_distutils/_macos_compat.py
@@ -0,0 +1,12 @@
+import sys
+import importlib
+
+
+def bypass_compiler_fixup(cmd, args):
+ return cmd
+
+
+if sys.platform == 'darwin':
+ compiler_fixup = importlib.import_module('_osx_support').compiler_fixup
+else:
+ compiler_fixup = bypass_compiler_fixup
diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py
new file mode 100644
index 0000000..f2f801c
--- /dev/null
+++ b/setuptools/_distutils/_msvccompiler.py
@@ -0,0 +1,561 @@
+"""distutils._msvccompiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for Microsoft Visual Studio 2015.
+
+The module is compatible with VS 2015 and later. You can find legacy support
+for older versions in distutils.msvc9compiler and distutils.msvccompiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+# finding DevStudio (through the registry)
+# ported to VS 2005 and VS 2008 by Christian Heimes
+# ported to VS 2015 by Steve Dower
+
+import os
+import subprocess
+import contextlib
+import warnings
+import unittest.mock
+with contextlib.suppress(ImportError):
+ import winreg
+
+from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
+ CompileError, LibError, LinkError
+from distutils.ccompiler import CCompiler, gen_lib_options
+from distutils import log
+from distutils.util import get_platform
+
+from itertools import count
+
+def _find_vc2015():
+ try:
+ key = winreg.OpenKeyEx(
+ winreg.HKEY_LOCAL_MACHINE,
+ r"Software\Microsoft\VisualStudio\SxS\VC7",
+ access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
+ )
+ except OSError:
+ log.debug("Visual C++ is not registered")
+ return None, None
+
+ best_version = 0
+ best_dir = None
+ with key:
+ for i in count():
+ try:
+ v, vc_dir, vt = winreg.EnumValue(key, i)
+ except OSError:
+ break
+ if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
+ try:
+ version = int(float(v))
+ except (ValueError, TypeError):
+ continue
+ if version >= 14 and version > best_version:
+ best_version, best_dir = version, vc_dir
+ return best_version, best_dir
+
+def _find_vc2017():
+ """Returns "15, path" based on the result of invoking vswhere.exe
+ If no install is found, returns "None, None"
+
+ The version is returned to avoid unnecessarily changing the function
+ result. It may be ignored when the path is not None.
+
+ If vswhere.exe is not available, by definition, VS 2017 is not
+ installed.
+ """
+ root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
+ if not root:
+ return None, None
+
+ try:
+ path = subprocess.check_output([
+ os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
+ "-latest",
+ "-prerelease",
+ "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
+ "-property", "installationPath",
+ "-products", "*",
+ ], encoding="mbcs", errors="strict").strip()
+ except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
+ return None, None
+
+ path = os.path.join(path, "VC", "Auxiliary", "Build")
+ if os.path.isdir(path):
+ return 15, path
+
+ return None, None
+
+PLAT_SPEC_TO_RUNTIME = {
+ 'x86' : 'x86',
+ 'x86_amd64' : 'x64',
+ 'x86_arm' : 'arm',
+ 'x86_arm64' : 'arm64'
+}
+
+def _find_vcvarsall(plat_spec):
+ # bpo-38597: Removed vcruntime return value
+ _, best_dir = _find_vc2017()
+
+ if not best_dir:
+ best_version, best_dir = _find_vc2015()
+
+ if not best_dir:
+ log.debug("No suitable Visual C++ version found")
+ return None, None
+
+ vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
+ if not os.path.isfile(vcvarsall):
+ log.debug("%s cannot be found", vcvarsall)
+ return None, None
+
+ return vcvarsall, None
+
+def _get_vc_env(plat_spec):
+ if os.getenv("DISTUTILS_USE_SDK"):
+ return {
+ key.lower(): value
+ for key, value in os.environ.items()
+ }
+
+ vcvarsall, _ = _find_vcvarsall(plat_spec)
+ if not vcvarsall:
+ raise DistutilsPlatformError("Unable to find vcvarsall.bat")
+
+ try:
+ out = subprocess.check_output(
+ 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
+ stderr=subprocess.STDOUT,
+ ).decode('utf-16le', errors='replace')
+ except subprocess.CalledProcessError as exc:
+ log.error(exc.output)
+ raise DistutilsPlatformError("Error executing {}"
+ .format(exc.cmd))
+
+ env = {
+ key.lower(): value
+ for key, _, value in
+ (line.partition('=') for line in out.splitlines())
+ if key and value
+ }
+
+ return env
+
+def _find_exe(exe, paths=None):
+ """Return path to an MSVC executable program.
+
+ Tries to find the program in several places: first, one of the
+ MSVC program search paths from the registry; next, the directories
+ in the PATH environment variable. If any of those work, return an
+ absolute path that is known to exist. If none of them work, just
+ return the original program name, 'exe'.
+ """
+ if not paths:
+ paths = os.getenv('path').split(os.pathsep)
+ for p in paths:
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+ return exe
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
+# lighter-weight MSVC installs that do not include native 64-bit tools.
+PLAT_TO_VCVARS = {
+ 'win32' : 'x86',
+ 'win-amd64' : 'x86_amd64',
+ 'win-arm32' : 'x86_arm',
+ 'win-arm64' : 'x86_arm64'
+}
+
+class MSVCCompiler(CCompiler) :
+ """Concrete class that implements an interface to Microsoft Visual C++,
+ as defined by the CCompiler abstract class."""
+
+ compiler_type = 'msvc'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+ _rc_extensions = ['.rc']
+ _mc_extensions = ['.mc']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = (_c_extensions + _cpp_extensions +
+ _rc_extensions + _mc_extensions)
+ res_extension = '.res'
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+ super().__init__(verbose, dry_run, force)
+ # target platform (.plat_name is consistent with 'bdist')
+ self.plat_name = None
+ self.initialized = False
+
+ def initialize(self, plat_name=None):
+ # multi-init means we would need to check platform same each time...
+ assert not self.initialized, "don't init multiple times"
+ if plat_name is None:
+ plat_name = get_platform()
+ # sanity check for platforms to prevent obscure errors later.
+ if plat_name not in PLAT_TO_VCVARS:
+ raise DistutilsPlatformError("--plat-name must be one of {}"
+ .format(tuple(PLAT_TO_VCVARS)))
+
+ # Get the vcvarsall.bat spec for the requested platform.
+ plat_spec = PLAT_TO_VCVARS[plat_name]
+
+ vc_env = _get_vc_env(plat_spec)
+ if not vc_env:
+ raise DistutilsPlatformError("Unable to find a compatible "
+ "Visual Studio installation.")
+
+ self._paths = vc_env.get('path', '')
+ paths = self._paths.split(os.pathsep)
+ self.cc = _find_exe("cl.exe", paths)
+ self.linker = _find_exe("link.exe", paths)
+ self.lib = _find_exe("lib.exe", paths)
+ self.rc = _find_exe("rc.exe", paths) # resource compiler
+ self.mc = _find_exe("mc.exe", paths) # message compiler
+ self.mt = _find_exe("mt.exe", paths) # message compiler
+
+ for dir in vc_env.get('include', '').split(os.pathsep):
+ if dir:
+ self.add_include_dir(dir.rstrip(os.sep))
+
+ for dir in vc_env.get('lib', '').split(os.pathsep):
+ if dir:
+ self.add_library_dir(dir.rstrip(os.sep))
+
+ self.preprocess_options = None
+ # bpo-38597: Always compile with dynamic linking
+ # Future releases of Python 3.x will include all past
+ # versions of vcruntime*.dll for compatibility.
+ self.compile_options = [
+ '/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD'
+ ]
+
+ self.compile_options_debug = [
+ '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
+ ]
+
+ ldflags = [
+ '/nologo', '/INCREMENTAL:NO', '/LTCG'
+ ]
+
+ ldflags_debug = [
+ '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
+ ]
+
+ self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
+ self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
+ self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
+ self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
+ self.ldflags_static = [*ldflags]
+ self.ldflags_static_debug = [*ldflags_debug]
+
+ self._ldflags = {
+ (CCompiler.EXECUTABLE, None): self.ldflags_exe,
+ (CCompiler.EXECUTABLE, False): self.ldflags_exe,
+ (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
+ (CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
+ (CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
+ (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
+ (CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
+ (CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
+ (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
+ }
+
+ self.initialized = True
+
+ # -- Worker methods ------------------------------------------------
+
+ def object_filenames(self,
+ source_filenames,
+ strip_dir=0,
+ output_dir=''):
+ ext_map = {
+ **{ext: self.obj_extension for ext in self.src_extensions},
+ **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions},
+ }
+
+ output_dir = output_dir or ''
+
+ def make_out_path(p):
+ base, ext = os.path.splitext(p)
+ if strip_dir:
+ base = os.path.basename(base)
+ else:
+ _, base = os.path.splitdrive(base)
+ if base.startswith((os.path.sep, os.path.altsep)):
+ base = base[1:]
+ try:
+ # XXX: This may produce absurdly long paths. We should check
+ # the length of the result and trim base until we fit within
+ # 260 characters.
+ return os.path.join(output_dir, base + ext_map[ext])
+ except LookupError:
+ # Better to raise an exception instead of silently continuing
+ # and later complain about sources and targets having
+ # different lengths
+ raise CompileError("Don't know how to compile {}".format(p))
+
+ return list(map(make_out_path, source_filenames))
+
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=0,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ if not self.initialized:
+ self.initialize()
+ compile_info = self._setup_compile(output_dir, macros, include_dirs,
+ sources, depends, extra_postargs)
+ macros, objects, extra_postargs, pp_opts, build = compile_info
+
+ compile_opts = extra_preargs or []
+ compile_opts.append('/c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+
+ add_cpp_opts = False
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ if debug:
+ # pass the full pathname to MSVC in debug mode,
+ # this allows the debugger to find the source file
+ # without asking the user to browse for it
+ src = os.path.abspath(src)
+
+ if ext in self._c_extensions:
+ input_opt = "/Tc" + src
+ elif ext in self._cpp_extensions:
+ input_opt = "/Tp" + src
+ add_cpp_opts = True
+ elif ext in self._rc_extensions:
+ # compile .RC to .RES file
+ input_opt = src
+ output_opt = "/fo" + obj
+ try:
+ self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ elif ext in self._mc_extensions:
+ # Compile .MC to .RC file to .RES file.
+ # * '-h dir' specifies the directory for the
+ # generated include file
+ # * '-r dir' specifies the target directory of the
+ # generated RC file and the binary message resource
+ # it includes
+ #
+ # For now (since there are no options to change this),
+ # we use the source-directory for the include file and
+ # the build directory for the RC file and message
+ # resources. This works at least for win32all.
+ h_dir = os.path.dirname(src)
+ rc_dir = os.path.dirname(obj)
+ try:
+ # first compile .MC to .RC and .H file
+ self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
+ base, _ = os.path.splitext(os.path.basename (src))
+ rc_file = os.path.join(rc_dir, base + '.rc')
+ # then compile .RC to .RES file
+ self.spawn([self.rc, "/fo" + obj, rc_file])
+
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ else:
+ # how to handle this file?
+ raise CompileError("Don't know how to compile {} to {}"
+ .format(src, obj))
+
+ args = [self.cc] + compile_opts + pp_opts
+ if add_cpp_opts:
+ args.append('/EHsc')
+ args.append(input_opt)
+ args.append("/Fo" + obj)
+ args.extend(extra_postargs)
+
+ try:
+ self.spawn(args)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+
+ def create_static_lib(self,
+ objects,
+ output_libname,
+ output_dir=None,
+ debug=0,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ output_filename = self.library_filename(output_libname,
+ output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = objects + ['/OUT:' + output_filename]
+ if debug:
+ pass # XXX what goes here?
+ try:
+ log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
+ self.spawn([self.lib] + lib_args)
+ except DistutilsExecError as msg:
+ raise LibError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+
+ def link(self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ libraries, library_dirs, runtime_library_dirs = fixed_args
+
+ if runtime_library_dirs:
+ self.warn("I don't know what to do with 'runtime_library_dirs': "
+ + str(runtime_library_dirs))
+
+ lib_opts = gen_lib_options(self,
+ library_dirs, runtime_library_dirs,
+ libraries)
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ ldflags = self._ldflags[target_desc, debug]
+
+ export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
+
+ ld_args = (ldflags + lib_opts + export_opts +
+ objects + ['/OUT:' + output_filename])
+
+ # The MSVC linker generates .lib and .exp files, which cannot be
+ # suppressed by any linker switches. The .lib files may even be
+ # needed! Make sure they are generated in the temporary build
+ # directory. Since they have different names for debug and release
+ # builds, they can go into the same directory.
+ build_temp = os.path.dirname(objects[0])
+ if export_symbols is not None:
+ (dll_name, dll_ext) = os.path.splitext(
+ os.path.basename(output_filename))
+ implib_file = os.path.join(
+ build_temp,
+ self.library_filename(dll_name))
+ ld_args.append ('/IMPLIB:' + implib_file)
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ output_dir = os.path.dirname(os.path.abspath(output_filename))
+ self.mkpath(output_dir)
+ try:
+ log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
+ self.spawn([self.linker] + ld_args)
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ def spawn(self, cmd):
+ env = dict(os.environ, PATH=self._paths)
+ with self._fallback_spawn(cmd, env) as fallback:
+ return super().spawn(cmd, env=env)
+ return fallback.value
+
+ @contextlib.contextmanager
+ def _fallback_spawn(self, cmd, env):
+ """
+ Discovered in pypa/distutils#15, some tools monkeypatch the compiler,
+ so the 'env' kwarg causes a TypeError. Detect this condition and
+ restore the legacy, unsafe behavior.
+ """
+ bag = type('Bag', (), {})()
+ try:
+ yield bag
+ except TypeError as exc:
+ if "unexpected keyword argument 'env'" not in str(exc):
+ raise
+ else:
+ return
+ warnings.warn(
+ "Fallback spawn triggered. Please update distutils monkeypatch.")
+ with unittest.mock.patch.dict('os.environ', env):
+ bag.value = super().spawn(cmd)
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "/LIBPATH:" + dir
+
+ def runtime_library_dir_option(self, dir):
+ raise DistutilsPlatformError(
+ "don't know how to set runtime library search path for MSVC")
+
+ def library_option(self, lib):
+ return self.library_filename(lib)
+
+ def find_library_file(self, dirs, lib, debug=0):
+ # Prefer a debugging library if found (and requested), but deal
+ # with it if we don't have one.
+ if debug:
+ try_names = [lib + "_d", lib]
+ else:
+ try_names = [lib]
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename(name))
+ if os.path.isfile(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
diff --git a/setuptools/_distutils/archive_util.py b/setuptools/_distutils/archive_util.py
new file mode 100644
index 0000000..565a311
--- /dev/null
+++ b/setuptools/_distutils/archive_util.py
@@ -0,0 +1,256 @@
+"""distutils.archive_util
+
+Utility functions for creating archive files (tarballs, zip files,
+that sort of thing)."""
+
+import os
+from warnings import warn
+import sys
+
+try:
+ import zipfile
+except ImportError:
+ zipfile = None
+
+
+from distutils.errors import DistutilsExecError
+from distutils.spawn import spawn
+from distutils.dir_util import mkpath
+from distutils import log
+
+try:
+ from pwd import getpwnam
+except ImportError:
+ getpwnam = None
+
+try:
+ from grp import getgrnam
+except ImportError:
+ getgrnam = None
+
+def _get_gid(name):
+ """Returns a gid, given a group name."""
+ if getgrnam is None or name is None:
+ return None
+ try:
+ result = getgrnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def _get_uid(name):
+ """Returns an uid, given a user name."""
+ if getpwnam is None or name is None:
+ return None
+ try:
+ result = getpwnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
+ owner=None, group=None):
+ """Create a (possibly compressed) tar file from all the files under
+ 'base_dir'.
+
+ 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
+ None. ("compress" will be deprecated in Python 3.2)
+
+ 'owner' and 'group' can be used to define an owner and a group for the
+ archive that is being built. If not provided, the current owner and group
+ will be used.
+
+ The output tar file will be named 'base_dir' + ".tar", possibly plus
+ the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
+
+ Returns the output filename.
+ """
+ tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '',
+ 'compress': ''}
+ compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz',
+ 'compress': '.Z'}
+
+ # flags for compression program, each element of list will be an argument
+ if compress is not None and compress not in compress_ext.keys():
+ raise ValueError(
+ "bad value for 'compress': must be None, 'gzip', 'bzip2', "
+ "'xz' or 'compress'")
+
+ archive_name = base_name + '.tar'
+ if compress != 'compress':
+ archive_name += compress_ext.get(compress, '')
+
+ mkpath(os.path.dirname(archive_name), dry_run=dry_run)
+
+ # creating the tarball
+ import tarfile # late import so Python build itself doesn't break
+
+ log.info('Creating tar archive')
+
+ uid = _get_uid(owner)
+ gid = _get_gid(group)
+
+ def _set_uid_gid(tarinfo):
+ if gid is not None:
+ tarinfo.gid = gid
+ tarinfo.gname = group
+ if uid is not None:
+ tarinfo.uid = uid
+ tarinfo.uname = owner
+ return tarinfo
+
+ if not dry_run:
+ tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+ try:
+ tar.add(base_dir, filter=_set_uid_gid)
+ finally:
+ tar.close()
+
+ # compression using `compress`
+ if compress == 'compress':
+ warn("'compress' will be deprecated.", PendingDeprecationWarning)
+ # the option varies depending on the platform
+ compressed_name = archive_name + compress_ext[compress]
+ if sys.platform == 'win32':
+ cmd = [compress, archive_name, compressed_name]
+ else:
+ cmd = [compress, '-f', archive_name]
+ spawn(cmd, dry_run=dry_run)
+ return compressed_name
+
+ return archive_name
+
+def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
+ """Create a zip file from all the files under 'base_dir'.
+
+ The output zip file will be named 'base_name' + ".zip". Uses either the
+ "zipfile" Python module (if available) or the InfoZIP "zip" utility
+ (if installed and found on the default search path). If neither tool is
+ available, raises DistutilsExecError. Returns the name of the output zip
+ file.
+ """
+ zip_filename = base_name + ".zip"
+ mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+
+ # If zipfile module is not available, try spawning an external
+ # 'zip' command.
+ if zipfile is None:
+ if verbose:
+ zipoptions = "-r"
+ else:
+ zipoptions = "-rq"
+
+ try:
+ spawn(["zip", zipoptions, zip_filename, base_dir],
+ dry_run=dry_run)
+ except DistutilsExecError:
+ # XXX really should distinguish between "couldn't find
+ # external 'zip' command" and "zip failed".
+ raise DistutilsExecError(("unable to create zip file '%s': "
+ "could neither import the 'zipfile' module nor "
+ "find a standalone zip utility") % zip_filename)
+
+ else:
+ log.info("creating '%s' and adding '%s' to it",
+ zip_filename, base_dir)
+
+ if not dry_run:
+ try:
+ zip = zipfile.ZipFile(zip_filename, "w",
+ compression=zipfile.ZIP_DEFLATED)
+ except RuntimeError:
+ zip = zipfile.ZipFile(zip_filename, "w",
+ compression=zipfile.ZIP_STORED)
+
+ with zip:
+ if base_dir != os.curdir:
+ path = os.path.normpath(os.path.join(base_dir, ''))
+ zip.write(path, path)
+ log.info("adding '%s'", path)
+ for dirpath, dirnames, filenames in os.walk(base_dir):
+ for name in dirnames:
+ path = os.path.normpath(os.path.join(dirpath, name, ''))
+ zip.write(path, path)
+ log.info("adding '%s'", path)
+ for name in filenames:
+ path = os.path.normpath(os.path.join(dirpath, name))
+ if os.path.isfile(path):
+ zip.write(path, path)
+ log.info("adding '%s'", path)
+
+ return zip_filename
+
+ARCHIVE_FORMATS = {
+ 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+ 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
+ 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
+ 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
+ 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
+ 'zip': (make_zipfile, [],"ZIP file")
+ }
+
+def check_archive_formats(formats):
+ """Returns the first format from the 'format' list that is unknown.
+
+ If all formats are known, returns None
+ """
+ for format in formats:
+ if format not in ARCHIVE_FORMATS:
+ return format
+ return None
+
+def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
+ dry_run=0, owner=None, group=None):
+ """Create an archive file (eg. zip or tar).
+
+ 'base_name' is the name of the file to create, minus any format-specific
+ extension; 'format' is the archive format: one of "zip", "tar", "gztar",
+ "bztar", "xztar", or "ztar".
+
+ 'root_dir' is a directory that will be the root directory of the
+ archive; ie. we typically chdir into 'root_dir' before creating the
+ archive. 'base_dir' is the directory where we start archiving from;
+ ie. 'base_dir' will be the common prefix of all files and
+ directories in the archive. 'root_dir' and 'base_dir' both default
+ to the current directory. Returns the name of the archive file.
+
+ 'owner' and 'group' are used when creating a tar archive. By default,
+ uses the current owner and group.
+ """
+ save_cwd = os.getcwd()
+ if root_dir is not None:
+ log.debug("changing into '%s'", root_dir)
+ base_name = os.path.abspath(base_name)
+ if not dry_run:
+ os.chdir(root_dir)
+
+ if base_dir is None:
+ base_dir = os.curdir
+
+ kwargs = {'dry_run': dry_run}
+
+ try:
+ format_info = ARCHIVE_FORMATS[format]
+ except KeyError:
+ raise ValueError("unknown archive format '%s'" % format)
+
+ func = format_info[0]
+ for arg, val in format_info[1]:
+ kwargs[arg] = val
+
+ if format != 'zip':
+ kwargs['owner'] = owner
+ kwargs['group'] = group
+
+ try:
+ filename = func(base_name, base_dir, **kwargs)
+ finally:
+ if root_dir is not None:
+ log.debug("changing back to '%s'", save_cwd)
+ os.chdir(save_cwd)
+
+ return filename
diff --git a/setuptools/_distutils/bcppcompiler.py b/setuptools/_distutils/bcppcompiler.py
new file mode 100644
index 0000000..2eb6d2e
--- /dev/null
+++ b/setuptools/_distutils/bcppcompiler.py
@@ -0,0 +1,393 @@
+"""distutils.bcppcompiler
+
+Contains BorlandCCompiler, an implementation of the abstract CCompiler class
+for the Borland C++ compiler.
+"""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler! --GPW
+
+
+import os
+from distutils.errors import \
+ DistutilsExecError, \
+ CompileError, LibError, LinkError, UnknownFileError
+from distutils.ccompiler import \
+ CCompiler, gen_preprocess_options
+from distutils.file_util import write_file
+from distutils.dep_util import newer
+from distutils import log
+
+class BCPPCompiler(CCompiler) :
+ """Concrete class that implements an interface to the Borland C/C++
+ compiler, as defined by the CCompiler abstract class.
+ """
+
+ compiler_type = 'bcpp'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = _c_extensions + _cpp_extensions
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+
+ def __init__ (self,
+ verbose=0,
+ dry_run=0,
+ force=0):
+
+ super().__init__(verbose, dry_run, force)
+
+ # These executables are assumed to all be in the path.
+ # Borland doesn't seem to use any special registry settings to
+ # indicate their installation locations.
+
+ self.cc = "bcc32.exe"
+ self.linker = "ilink32.exe"
+ self.lib = "tlib.exe"
+
+ self.preprocess_options = None
+ self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+ self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+ self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+ self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+ self.ldflags_static = []
+ self.ldflags_exe = ['/Gn', '/q', '/x']
+ self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+ # -- Worker methods ------------------------------------------------
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=0,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+ compile_opts = extra_preargs or []
+ compile_opts.append ('-c')
+ if debug:
+ compile_opts.extend (self.compile_options_debug)
+ else:
+ compile_opts.extend (self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ # XXX why do the normpath here?
+ src = os.path.normpath(src)
+ obj = os.path.normpath(obj)
+ # XXX _setup_compile() did a mkpath() too but before the normpath.
+ # Is it possible to skip the normpath?
+ self.mkpath(os.path.dirname(obj))
+
+ if ext == '.res':
+ # This is already a binary file -- skip it.
+ continue # the 'for' loop
+ if ext == '.rc':
+ # This needs to be compiled to a .res file -- do it now.
+ try:
+ self.spawn (["brcc32", "-fo", obj, src])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue # the 'for' loop
+
+ # The next two are both for the real compiler.
+ if ext in self._c_extensions:
+ input_opt = ""
+ elif ext in self._cpp_extensions:
+ input_opt = "-P"
+ else:
+ # Unknown file type -- no extra options. The compiler
+ # will probably fail, but let it just in case this is a
+ # file the compiler recognizes even if we don't.
+ input_opt = ""
+
+ output_opt = "-o" + obj
+
+ # Compiler command line syntax is: "bcc32 [options] file(s)".
+ # Note that the source file names must appear at the end of
+ # the command line.
+ try:
+ self.spawn ([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs + [src])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+ # compile ()
+
+
+ def create_static_lib (self,
+ objects,
+ output_libname,
+ output_dir=None,
+ debug=0,
+ target_lang=None):
+
+ (objects, output_dir) = self._fix_object_args (objects, output_dir)
+ output_filename = \
+ self.library_filename (output_libname, output_dir=output_dir)
+
+ if self._need_link (objects, output_filename):
+ lib_args = [output_filename, '/u'] + objects
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn ([self.lib] + lib_args)
+ except DistutilsExecError as msg:
+ raise LibError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ # create_static_lib ()
+
+
+ def link (self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+
+ # XXX this ignores 'build_temp'! should follow the lead of
+ # msvccompiler.py
+
+ (objects, output_dir) = self._fix_object_args (objects, output_dir)
+ (libraries, library_dirs, runtime_library_dirs) = \
+ self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+ if runtime_library_dirs:
+ log.warn("I don't know what to do with 'runtime_library_dirs': %s",
+ str(runtime_library_dirs))
+
+ if output_dir is not None:
+ output_filename = os.path.join (output_dir, output_filename)
+
+ if self._need_link (objects, output_filename):
+
+ # Figure out linker args based on type of target.
+ if target_desc == CCompiler.EXECUTABLE:
+ startup_obj = 'c0w32'
+ if debug:
+ ld_args = self.ldflags_exe_debug[:]
+ else:
+ ld_args = self.ldflags_exe[:]
+ else:
+ startup_obj = 'c0d32'
+ if debug:
+ ld_args = self.ldflags_shared_debug[:]
+ else:
+ ld_args = self.ldflags_shared[:]
+
+
+ # Create a temporary exports file for use by the linker
+ if export_symbols is None:
+ def_file = ''
+ else:
+ head, tail = os.path.split (output_filename)
+ modname, ext = os.path.splitext (tail)
+ temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+ def_file = os.path.join (temp_dir, '%s.def' % modname)
+ contents = ['EXPORTS']
+ for sym in (export_symbols or []):
+ contents.append(' %s=_%s' % (sym, sym))
+ self.execute(write_file, (def_file, contents),
+ "writing %s" % def_file)
+
+ # Borland C++ has problems with '/' in paths
+ objects2 = map(os.path.normpath, objects)
+ # split objects in .obj and .res files
+ # Borland C++ needs them at different positions in the command line
+ objects = [startup_obj]
+ resources = []
+ for file in objects2:
+ (base, ext) = os.path.splitext(os.path.normcase(file))
+ if ext == '.res':
+ resources.append(file)
+ else:
+ objects.append(file)
+
+
+ for l in library_dirs:
+ ld_args.append("/L%s" % os.path.normpath(l))
+ ld_args.append("/L.") # we sometimes use relative paths
+
+ # list of object files
+ ld_args.extend(objects)
+
+ # XXX the command-line syntax for Borland C++ is a bit wonky;
+ # certain filenames are jammed together in one big string, but
+ # comma-delimited. This doesn't mesh too well with the
+ # Unix-centric attitude (with a DOS/Windows quoting hack) of
+ # 'spawn()', so constructing the argument list is a bit
+ # awkward. Note that doing the obvious thing and jamming all
+ # the filenames and commas into one argument would be wrong,
+ # because 'spawn()' would quote any filenames with spaces in
+ # them. Arghghh!. Apparently it works fine as coded...
+
+ # name of dll/exe file
+ ld_args.extend([',',output_filename])
+ # no map file and start libraries
+ ld_args.append(',,')
+
+ for lib in libraries:
+ # see if we find it and if there is a bcpp specific lib
+ # (xxx_bcpp.lib)
+ libfile = self.find_library_file(library_dirs, lib, debug)
+ if libfile is None:
+ ld_args.append(lib)
+ # probably a BCPP internal library -- don't warn
+ else:
+ # full name which prefers bcpp_xxx.lib over xxx.lib
+ ld_args.append(libfile)
+
+ # some default libraries
+ ld_args.append ('import32')
+ ld_args.append ('cw32mt')
+
+ # def file for export symbols
+ ld_args.extend([',',def_file])
+ # add resource files
+ ld_args.append(',')
+ ld_args.extend(resources)
+
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath (os.path.dirname (output_filename))
+ try:
+ self.spawn ([self.linker] + ld_args)
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ # link ()
+
+ # -- Miscellaneous methods -----------------------------------------
+
+
+ def find_library_file (self, dirs, lib, debug=0):
+ # List of effective library names to try, in order of preference:
+ # xxx_bcpp.lib is better than xxx.lib
+ # and xxx_d.lib is better than xxx.lib if debug is set
+ #
+ # The "_bcpp" suffix is to handle a Python installation for people
+ # with multiple compilers (primarily Distutils hackers, I suspect
+ # ;-). The idea is they'd have one static library for each
+ # compiler they care about, since (almost?) every Windows compiler
+ # seems to have a different format for static libraries.
+ if debug:
+ dlib = (lib + "_d")
+ try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+ else:
+ try_names = (lib + "_bcpp", lib)
+
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename(name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # overwrite the one from CCompiler to support rc and res-files
+ def object_filenames (self,
+ source_filenames,
+ strip_dir=0,
+ output_dir=''):
+ if output_dir is None: output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+ (base, ext) = os.path.splitext (os.path.normcase(src_name))
+ if ext not in (self.src_extensions + ['.rc','.res']):
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % \
+ (ext, src_name))
+ if strip_dir:
+ base = os.path.basename (base)
+ if ext == '.res':
+ # these can go unchanged
+ obj_names.append (os.path.join (output_dir, base + ext))
+ elif ext == '.rc':
+ # these need to be compiled to .res-files
+ obj_names.append (os.path.join (output_dir, base + '.res'))
+ else:
+ obj_names.append (os.path.join (output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+ # object_filenames ()
+
+ def preprocess (self,
+ source,
+ output_file=None,
+ macros=None,
+ include_dirs=None,
+ extra_preargs=None,
+ extra_postargs=None):
+
+ (_, macros, include_dirs) = \
+ self._fix_compile_args(None, macros, include_dirs)
+ pp_opts = gen_preprocess_options(macros, include_dirs)
+ pp_args = ['cpp32.exe'] + pp_opts
+ if output_file is not None:
+ pp_args.append('-o' + output_file)
+ if extra_preargs:
+ pp_args[:0] = extra_preargs
+ if extra_postargs:
+ pp_args.extend(extra_postargs)
+ pp_args.append(source)
+
+ # We need to preprocess: either we're being forced to, or the
+ # source file is newer than the target (or the target doesn't
+ # exist).
+ if self.force or output_file is None or newer(source, output_file):
+ if output_file:
+ self.mkpath(os.path.dirname(output_file))
+ try:
+ self.spawn(pp_args)
+ except DistutilsExecError as msg:
+ print(msg)
+ raise CompileError(msg)
+
+ # preprocess()
diff --git a/setuptools/_distutils/ccompiler.py b/setuptools/_distutils/ccompiler.py
new file mode 100644
index 0000000..777fc66
--- /dev/null
+++ b/setuptools/_distutils/ccompiler.py
@@ -0,0 +1,1123 @@
+"""distutils.ccompiler
+
+Contains CCompiler, an abstract base class that defines the interface
+for the Distutils compiler abstraction model."""
+
+import sys, os, re
+from distutils.errors import *
+from distutils.spawn import spawn
+from distutils.file_util import move_file
+from distutils.dir_util import mkpath
+from distutils.dep_util import newer_group
+from distutils.util import split_quoted, execute
+from distutils import log
+
+class CCompiler:
+ """Abstract base class to define the interface that must be implemented
+ by real compiler classes. Also has some utility methods used by
+ several compiler classes.
+
+ The basic idea behind a compiler abstraction class is that each
+ instance can be used for all the compile/link steps in building a
+ single project. Thus, attributes common to all of those compile and
+ link steps -- include directories, macros to define, libraries to link
+ against, etc. -- are attributes of the compiler instance. To allow for
+ variability in how individual files are treated, most of those
+ attributes may be varied on a per-compilation or per-link basis.
+ """
+
+ # 'compiler_type' is a class attribute that identifies this class. It
+ # keeps code that wants to know what kind of compiler it's dealing with
+ # from having to import all possible compiler classes just to do an
+ # 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
+ # should really, really be one of the keys of the 'compiler_class'
+ # dictionary (see below -- used by the 'new_compiler()' factory
+ # function) -- authors of new compiler interface classes are
+ # responsible for updating 'compiler_class'!
+ compiler_type = None
+
+ # XXX things not handled by this compiler abstraction model:
+ # * client can't provide additional options for a compiler,
+ # e.g. warning, optimization, debugging flags. Perhaps this
+ # should be the domain of concrete compiler abstraction classes
+ # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+ # class should have methods for the common ones.
+ # * can't completely override the include or library searchg
+ # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+ # I'm not sure how widely supported this is even by Unix
+ # compilers, much less on other platforms. And I'm even less
+ # sure how useful it is; maybe for cross-compiling, but
+ # support for that is a ways off. (And anyways, cross
+ # compilers probably have a dedicated binary with the
+ # right paths compiled in. I hope.)
+ # * can't do really freaky things with the library list/library
+ # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+ # different versions of libfoo.a in different locations. I
+ # think this is useless without the ability to null out the
+ # library search path anyways.
+
+
+ # Subclasses that rely on the standard filename generation methods
+ # implemented below should override these; see the comment near
+ # those methods ('object_filenames()' et. al.) for details:
+ src_extensions = None # list of strings
+ obj_extension = None # string
+ static_lib_extension = None
+ shared_lib_extension = None # string
+ static_lib_format = None # format string
+ shared_lib_format = None # prob. same as static_lib_format
+ exe_extension = None # string
+
+ # Default language settings. language_map is used to detect a source
+ # file or Extension target language, checking source filenames.
+ # language_order is used to detect the language precedence, when deciding
+ # what language to use when mixing source types. For example, if some
+ # extension has two files with ".c" extension, and one with ".cpp", it
+ # is still linked as c++.
+ language_map = {".c" : "c",
+ ".cc" : "c++",
+ ".cpp" : "c++",
+ ".cxx" : "c++",
+ ".m" : "objc",
+ }
+ language_order = ["c++", "objc", "c"]
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+ self.dry_run = dry_run
+ self.force = force
+ self.verbose = verbose
+
+ # 'output_dir': a common output directory for object, library,
+ # shared object, and shared library files
+ self.output_dir = None
+
+ # 'macros': a list of macro definitions (or undefinitions). A
+ # macro definition is a 2-tuple (name, value), where the value is
+ # either a string or None (no explicit value). A macro
+ # undefinition is a 1-tuple (name,).
+ self.macros = []
+
+ # 'include_dirs': a list of directories to search for include files
+ self.include_dirs = []
+
+ # 'libraries': a list of libraries to include in any link
+ # (library names, not filenames: eg. "foo" not "libfoo.a")
+ self.libraries = []
+
+ # 'library_dirs': a list of directories to search for libraries
+ self.library_dirs = []
+
+ # 'runtime_library_dirs': a list of directories to search for
+ # shared libraries/objects at runtime
+ self.runtime_library_dirs = []
+
+ # 'objects': a list of object files (or similar, such as explicitly
+ # named library files) to include on any link
+ self.objects = []
+
+ for key in self.executables.keys():
+ self.set_executable(key, self.executables[key])
+
+ def set_executables(self, **kwargs):
+ """Define the executables (and options for them) that will be run
+ to perform the various stages of compilation. The exact set of
+ executables that may be specified here depends on the compiler
+ class (via the 'executables' class attribute), but most will have:
+ compiler the C/C++ compiler
+ linker_so linker used to create shared objects and libraries
+ linker_exe linker used to create binary executables
+ archiver static library creator
+
+ On platforms with a command-line (Unix, DOS/Windows), each of these
+ is a string that will be split into executable name and (optional)
+ list of arguments. (Splitting the string is done similarly to how
+ Unix shells operate: words are delimited by spaces, but quotes and
+ backslashes can override this. See
+ 'distutils.util.split_quoted()'.)
+ """
+
+ # Note that some CCompiler implementation classes will define class
+ # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+ # this is appropriate when a compiler class is for exactly one
+ # compiler/OS combination (eg. MSVCCompiler). Other compiler
+ # classes (UnixCCompiler, in particular) are driven by information
+ # discovered at run-time, since there are many different ways to do
+ # basically the same things with Unix C compilers.
+
+ for key in kwargs:
+ if key not in self.executables:
+ raise ValueError("unknown executable '%s' for class %s" %
+ (key, self.__class__.__name__))
+ self.set_executable(key, kwargs[key])
+
+ def set_executable(self, key, value):
+ if isinstance(value, str):
+ setattr(self, key, split_quoted(value))
+ else:
+ setattr(self, key, value)
+
+ def _find_macro(self, name):
+ i = 0
+ for defn in self.macros:
+ if defn[0] == name:
+ return i
+ i += 1
+ return None
+
+ def _check_macro_definitions(self, definitions):
+ """Ensures that every element of 'definitions' is a valid macro
+ definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
+ nothing if all definitions are OK, raise TypeError otherwise.
+ """
+ for defn in definitions:
+ if not (isinstance(defn, tuple) and
+ (len(defn) in (1, 2) and
+ (isinstance (defn[1], str) or defn[1] is None)) and
+ isinstance (defn[0], str)):
+ raise TypeError(("invalid macro definition '%s': " % defn) + \
+ "must be tuple (string,), (string, string), or " + \
+ "(string, None)")
+
+
+ # -- Bookkeeping methods -------------------------------------------
+
+ def define_macro(self, name, value=None):
+ """Define a preprocessor macro for all compilations driven by this
+ compiler object. The optional parameter 'value' should be a
+ string; if it is not supplied, then the macro will be defined
+ without an explicit value and the exact outcome depends on the
+ compiler used (XXX true? does ANSI say anything about this?)
+ """
+ # Delete from the list of macro definitions/undefinitions if
+ # already there (so that this one will take precedence).
+ i = self._find_macro (name)
+ if i is not None:
+ del self.macros[i]
+
+ self.macros.append((name, value))
+
+ def undefine_macro(self, name):
+ """Undefine a preprocessor macro for all compilations driven by
+ this compiler object. If the same macro is defined by
+ 'define_macro()' and undefined by 'undefine_macro()' the last call
+ takes precedence (including multiple redefinitions or
+ undefinitions). If the macro is redefined/undefined on a
+ per-compilation basis (ie. in the call to 'compile()'), then that
+ takes precedence.
+ """
+ # Delete from the list of macro definitions/undefinitions if
+ # already there (so that this one will take precedence).
+ i = self._find_macro (name)
+ if i is not None:
+ del self.macros[i]
+
+ undefn = (name,)
+ self.macros.append(undefn)
+
+ def add_include_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ header files. The compiler is instructed to search directories in
+ the order in which they are supplied by successive calls to
+ 'add_include_dir()'.
+ """
+ self.include_dirs.append(dir)
+
+ def set_include_dirs(self, dirs):
+ """Set the list of directories that will be searched to 'dirs' (a
+ list of strings). Overrides any preceding calls to
+ 'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+ to the list passed to 'set_include_dirs()'. This does not affect
+ any list of standard include directories that the compiler may
+ search by default.
+ """
+ self.include_dirs = dirs[:]
+
+ def add_library(self, libname):
+ """Add 'libname' to the list of libraries that will be included in
+ all links driven by this compiler object. Note that 'libname'
+ should *not* be the name of a file containing a library, but the
+ name of the library itself: the actual filename will be inferred by
+ the linker, the compiler, or the compiler class (depending on the
+ platform).
+
+ The linker will be instructed to link against libraries in the
+ order they were supplied to 'add_library()' and/or
+ 'set_libraries()'. It is perfectly valid to duplicate library
+ names; the linker will be instructed to link against libraries as
+ many times as they are mentioned.
+ """
+ self.libraries.append(libname)
+
+ def set_libraries(self, libnames):
+ """Set the list of libraries to be included in all links driven by
+ this compiler object to 'libnames' (a list of strings). This does
+ not affect any standard system libraries that the linker may
+ include by default.
+ """
+ self.libraries = libnames[:]
+
+ def add_library_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ libraries specified to 'add_library()' and 'set_libraries()'. The
+ linker will be instructed to search for libraries in the order they
+ are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+ """
+ self.library_dirs.append(dir)
+
+ def set_library_dirs(self, dirs):
+ """Set the list of library search directories to 'dirs' (a list of
+ strings). This does not affect any standard library search path
+ that the linker may search by default.
+ """
+ self.library_dirs = dirs[:]
+
+ def add_runtime_library_dir(self, dir):
+ """Add 'dir' to the list of directories that will be searched for
+ shared libraries at runtime.
+ """
+ self.runtime_library_dirs.append(dir)
+
+ def set_runtime_library_dirs(self, dirs):
+ """Set the list of directories to search for shared libraries at
+ runtime to 'dirs' (a list of strings). This does not affect any
+ standard search path that the runtime linker may search by
+ default.
+ """
+ self.runtime_library_dirs = dirs[:]
+
+ def add_link_object(self, object):
+ """Add 'object' to the list of object files (or analogues, such as
+ explicitly named library files or the output of "resource
+ compilers") to be included in every link driven by this compiler
+ object.
+ """
+ self.objects.append(object)
+
+ def set_link_objects(self, objects):
+ """Set the list of object files (or analogues) to be included in
+ every link to 'objects'. This does not affect any standard object
+ files that the linker may include by default (such as system
+ libraries).
+ """
+ self.objects = objects[:]
+
+
+ # -- Private utility methods --------------------------------------
+ # (here for the convenience of subclasses)
+
+ # Helper method to prep compiler in subclass compile() methods
+
+ def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+ extra):
+ """Process arguments and decide which source files to compile."""
+ if outdir is None:
+ outdir = self.output_dir
+ elif not isinstance(outdir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ if macros is None:
+ macros = self.macros
+ elif isinstance(macros, list):
+ macros = macros + (self.macros or [])
+ else:
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+ if incdirs is None:
+ incdirs = self.include_dirs
+ elif isinstance(incdirs, (list, tuple)):
+ incdirs = list(incdirs) + (self.include_dirs or [])
+ else:
+ raise TypeError(
+ "'include_dirs' (if supplied) must be a list of strings")
+
+ if extra is None:
+ extra = []
+
+ # Get the list of expected output (object) files
+ objects = self.object_filenames(sources, strip_dir=0,
+ output_dir=outdir)
+ assert len(objects) == len(sources)
+
+ pp_opts = gen_preprocess_options(macros, incdirs)
+
+ build = {}
+ for i in range(len(sources)):
+ src = sources[i]
+ obj = objects[i]
+ ext = os.path.splitext(src)[1]
+ self.mkpath(os.path.dirname(obj))
+ build[obj] = (src, ext)
+
+ return macros, objects, extra, pp_opts, build
+
+ def _get_cc_args(self, pp_opts, debug, before):
+ # works for unixccompiler, cygwinccompiler
+ cc_args = pp_opts + ['-c']
+ if debug:
+ cc_args[:0] = ['-g']
+ if before:
+ cc_args[:0] = before
+ return cc_args
+
+ def _fix_compile_args(self, output_dir, macros, include_dirs):
+ """Typecheck and fix-up some of the arguments to the 'compile()'
+ method, and return fixed-up values. Specifically: if 'output_dir'
+ is None, replaces it with 'self.output_dir'; ensures that 'macros'
+ is a list, and augments it with 'self.macros'; ensures that
+ 'include_dirs' is a list, and augments it with 'self.include_dirs'.
+ Guarantees that the returned values are of the correct type,
+ i.e. for 'output_dir' either string or None, and for 'macros' and
+ 'include_dirs' either list or None.
+ """
+ if output_dir is None:
+ output_dir = self.output_dir
+ elif not isinstance(output_dir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ if macros is None:
+ macros = self.macros
+ elif isinstance(macros, list):
+ macros = macros + (self.macros or [])
+ else:
+ raise TypeError("'macros' (if supplied) must be a list of tuples")
+
+ if include_dirs is None:
+ include_dirs = self.include_dirs
+ elif isinstance(include_dirs, (list, tuple)):
+ include_dirs = list(include_dirs) + (self.include_dirs or [])
+ else:
+ raise TypeError(
+ "'include_dirs' (if supplied) must be a list of strings")
+
+ return output_dir, macros, include_dirs
+
+ def _prep_compile(self, sources, output_dir, depends=None):
+ """Decide which source files must be recompiled.
+
+ Determine the list of object files corresponding to 'sources',
+ and figure out which ones really need to be recompiled.
+ Return a list of all object files and a dictionary telling
+ which source files can be skipped.
+ """
+ # Get the list of expected output (object) files
+ objects = self.object_filenames(sources, output_dir=output_dir)
+ assert len(objects) == len(sources)
+
+ # Return an empty dict for the "which source files can be skipped"
+ # return value to preserve API compatibility.
+ return objects, {}
+
+ def _fix_object_args(self, objects, output_dir):
+ """Typecheck and fix up some arguments supplied to various methods.
+ Specifically: ensure that 'objects' is a list; if output_dir is
+ None, replace with self.output_dir. Return fixed versions of
+ 'objects' and 'output_dir'.
+ """
+ if not isinstance(objects, (list, tuple)):
+ raise TypeError("'objects' must be a list or tuple of strings")
+ objects = list(objects)
+
+ if output_dir is None:
+ output_dir = self.output_dir
+ elif not isinstance(output_dir, str):
+ raise TypeError("'output_dir' must be a string or None")
+
+ return (objects, output_dir)
+
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+ """Typecheck and fix up some of the arguments supplied to the
+ 'link_*' methods. Specifically: ensure that all arguments are
+ lists, and augment them with their permanent versions
+ (eg. 'self.libraries' augments 'libraries'). Return a tuple with
+ fixed versions of all arguments.
+ """
+ if libraries is None:
+ libraries = self.libraries
+ elif isinstance(libraries, (list, tuple)):
+ libraries = list (libraries) + (self.libraries or [])
+ else:
+ raise TypeError(
+ "'libraries' (if supplied) must be a list of strings")
+
+ if library_dirs is None:
+ library_dirs = self.library_dirs
+ elif isinstance(library_dirs, (list, tuple)):
+ library_dirs = list (library_dirs) + (self.library_dirs or [])
+ else:
+ raise TypeError(
+ "'library_dirs' (if supplied) must be a list of strings")
+
+ if runtime_library_dirs is None:
+ runtime_library_dirs = self.runtime_library_dirs
+ elif isinstance(runtime_library_dirs, (list, tuple)):
+ runtime_library_dirs = (list(runtime_library_dirs) +
+ (self.runtime_library_dirs or []))
+ else:
+ raise TypeError("'runtime_library_dirs' (if supplied) "
+ "must be a list of strings")
+
+ return (libraries, library_dirs, runtime_library_dirs)
+
+ def _need_link(self, objects, output_file):
+ """Return true if we need to relink the files listed in 'objects'
+ to recreate 'output_file'.
+ """
+ if self.force:
+ return True
+ else:
+ if self.dry_run:
+ newer = newer_group (objects, output_file, missing='newer')
+ else:
+ newer = newer_group (objects, output_file)
+ return newer
+
+ def detect_language(self, sources):
+ """Detect the language of a given file, or list of files. Uses
+ language_map, and language_order to do the job.
+ """
+ if not isinstance(sources, list):
+ sources = [sources]
+ lang = None
+ index = len(self.language_order)
+ for source in sources:
+ base, ext = os.path.splitext(source)
+ extlang = self.language_map.get(ext)
+ try:
+ extindex = self.language_order.index(extlang)
+ if extindex < index:
+ lang = extlang
+ index = extindex
+ except ValueError:
+ pass
+ return lang
+
+
+ # -- Worker methods ------------------------------------------------
+ # (must be implemented by subclasses)
+
+ def preprocess(self, source, output_file=None, macros=None,
+ include_dirs=None, extra_preargs=None, extra_postargs=None):
+ """Preprocess a single C/C++ source file, named in 'source'.
+ Output will be written to file named 'output_file', or stdout if
+ 'output_file' not supplied. 'macros' is a list of macro
+ definitions as for 'compile()', which will augment the macros set
+ with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
+ list of directory names that will be added to the default list.
+
+ Raises PreprocessError on failure.
+ """
+ pass
+
+ def compile(self, sources, output_dir=None, macros=None,
+ include_dirs=None, debug=0, extra_preargs=None,
+ extra_postargs=None, depends=None):
+ """Compile one or more source files.
+
+ 'sources' must be a list of filenames, most likely C/C++
+ files, but in reality anything that can be handled by a
+ particular compiler and compiler class (eg. MSVCCompiler can
+ handle resource files in 'sources'). Return a list of object
+ filenames, one per source filename in 'sources'. Depending on
+ the implementation, not all source files will necessarily be
+ compiled, but all corresponding object filenames will be
+ returned.
+
+ If 'output_dir' is given, object files will be put under it, while
+ retaining their original path component. That is, "foo/bar.c"
+ normally compiles to "foo/bar.o" (for a Unix implementation); if
+ 'output_dir' is "build", then it would compile to
+ "build/foo/bar.o".
+
+ 'macros', if given, must be a list of macro definitions. A macro
+ definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+ The former defines a macro; if the value is None, the macro is
+ defined without an explicit value. The 1-tuple case undefines a
+ macro. Later definitions/redefinitions/ undefinitions take
+ precedence.
+
+ 'include_dirs', if given, must be a list of strings, the
+ directories to add to the default include file search path for this
+ compilation only.
+
+ 'debug' is a boolean; if true, the compiler will be instructed to
+ output debug symbols in (or alongside) the object file(s).
+
+ 'extra_preargs' and 'extra_postargs' are implementation- dependent.
+ On platforms that have the notion of a command-line (e.g. Unix,
+ DOS/Windows), they are most likely lists of strings: extra
+ command-line arguments to prepend/append to the compiler command
+ line. On other platforms, consult the implementation class
+ documentation. In any event, they are intended as an escape hatch
+ for those occasions when the abstract compiler framework doesn't
+ cut the mustard.
+
+ 'depends', if given, is a list of filenames that all targets
+ depend on. If a source file is older than any file in
+ depends, then the source file will be recompiled. This
+ supports dependency tracking, but only at a coarse
+ granularity.
+
+ Raises CompileError on failure.
+ """
+ # A concrete compiler class can either override this method
+ # entirely or implement _compile().
+ macros, objects, extra_postargs, pp_opts, build = \
+ self._setup_compile(output_dir, macros, include_dirs, sources,
+ depends, extra_postargs)
+ cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+ # Return *all* object filenames, not just the ones we just built.
+ return objects
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ """Compile 'src' to product 'obj'."""
+ # A concrete compiler class that does not override compile()
+ # should implement _compile().
+ pass
+
+ def create_static_lib(self, objects, output_libname, output_dir=None,
+ debug=0, target_lang=None):
+ """Link a bunch of stuff together to create a static library file.
+ The "bunch of stuff" consists of the list of object files supplied
+ as 'objects', the extra object files supplied to
+ 'add_link_object()' and/or 'set_link_objects()', the libraries
+ supplied to 'add_library()' and/or 'set_libraries()', and the
+ libraries supplied as 'libraries' (if any).
+
+ 'output_libname' should be a library name, not a filename; the
+ filename will be inferred from the library name. 'output_dir' is
+ the directory where the library file will be put.
+
+ 'debug' is a boolean; if true, debugging information will be
+ included in the library (note that on most platforms, it is the
+ compile step where this matters: the 'debug' flag is included here
+ just for consistency).
+
+ 'target_lang' is the target language for which the given objects
+ are being compiled. This allows specific linkage time treatment of
+ certain languages.
+
+ Raises LibError on failure.
+ """
+ pass
+
+
+ # values for target_desc parameter in link()
+ SHARED_OBJECT = "shared_object"
+ SHARED_LIBRARY = "shared_library"
+ EXECUTABLE = "executable"
+
+ def link(self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+ """Link a bunch of stuff together to create an executable or
+ shared library file.
+
+ The "bunch of stuff" consists of the list of object files supplied
+ as 'objects'. 'output_filename' should be a filename. If
+ 'output_dir' is supplied, 'output_filename' is relative to it
+ (i.e. 'output_filename' can provide directory components if
+ needed).
+
+ 'libraries' is a list of libraries to link against. These are
+ library names, not filenames, since they're translated into
+ filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+ on Unix and "foo.lib" on DOS/Windows). However, they can include a
+ directory component, which means the linker will look in that
+ specific directory rather than searching all the normal locations.
+
+ 'library_dirs', if supplied, should be a list of directories to
+ search for libraries that were specified as bare library names
+ (ie. no directory component). These are on top of the system
+ default and those supplied to 'add_library_dir()' and/or
+ 'set_library_dirs()'. 'runtime_library_dirs' is a list of
+ directories that will be embedded into the shared library and used
+ to search for other shared libraries that *it* depends on at
+ run-time. (This may only be relevant on Unix.)
+
+ 'export_symbols' is a list of symbols that the shared library will
+ export. (This appears to be relevant only on Windows.)
+
+ 'debug' is as for 'compile()' and 'create_static_lib()', with the
+ slight distinction that it actually matters on most platforms (as
+ opposed to 'create_static_lib()', which includes a 'debug' flag
+ mostly for form's sake).
+
+ 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+ of course that they supply command-line arguments for the
+ particular linker being used).
+
+ 'target_lang' is the target language for which the given objects
+ are being compiled. This allows specific linkage time treatment of
+ certain languages.
+
+ Raises LinkError on failure.
+ """
+ raise NotImplementedError
+
+
+ # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+ def link_shared_lib(self,
+ objects,
+ output_libname,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+ self.link(CCompiler.SHARED_LIBRARY, objects,
+ self.library_filename(output_libname, lib_type='shared'),
+ output_dir,
+ libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug,
+ extra_preargs, extra_postargs, build_temp, target_lang)
+
+
+ def link_shared_object(self,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+ self.link(CCompiler.SHARED_OBJECT, objects,
+ output_filename, output_dir,
+ libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug,
+ extra_preargs, extra_postargs, build_temp, target_lang)
+
+
+ def link_executable(self,
+ objects,
+ output_progname,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ target_lang=None):
+ self.link(CCompiler.EXECUTABLE, objects,
+ self.executable_filename(output_progname), output_dir,
+ libraries, library_dirs, runtime_library_dirs, None,
+ debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function; there is
+ # no appropriate default implementation so subclasses should
+ # implement all of these.
+
+ def library_dir_option(self, dir):
+ """Return the compiler option to add 'dir' to the list of
+ directories searched for libraries.
+ """
+ raise NotImplementedError
+
+ def runtime_library_dir_option(self, dir):
+ """Return the compiler option to add 'dir' to the list of
+ directories searched for runtime libraries.
+ """
+ raise NotImplementedError
+
+ def library_option(self, lib):
+ """Return the compiler option to add 'lib' to the list of libraries
+ linked into the shared library or executable.
+ """
+ raise NotImplementedError
+
+ def has_function(self, funcname, includes=None, include_dirs=None,
+ libraries=None, library_dirs=None):
+ """Return a boolean indicating whether funcname is supported on
+ the current platform. The optional arguments can be used to
+ augment the compilation environment.
+ """
+ # this can't be included at module scope because it tries to
+ # import math which might not be available at that point - maybe
+ # the necessary logic should just be inlined?
+ import tempfile
+ if includes is None:
+ includes = []
+ if include_dirs is None:
+ include_dirs = []
+ if libraries is None:
+ libraries = []
+ if library_dirs is None:
+ library_dirs = []
+ fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+ f = os.fdopen(fd, "w")
+ try:
+ for incl in includes:
+ f.write("""#include "%s"\n""" % incl)
+ f.write("""\
+int main (int argc, char **argv) {
+ %s();
+ return 0;
+}
+""" % funcname)
+ finally:
+ f.close()
+ try:
+ objects = self.compile([fname], include_dirs=include_dirs)
+ except CompileError:
+ return False
+ finally:
+ os.remove(fname)
+
+ try:
+ self.link_executable(objects, "a.out",
+ libraries=libraries,
+ library_dirs=library_dirs)
+ except (LinkError, TypeError):
+ return False
+ else:
+ os.remove(os.path.join(self.output_dir or '', "a.out"))
+ finally:
+ for fn in objects:
+ os.remove(fn)
+ return True
+
+ def find_library_file (self, dirs, lib, debug=0):
+ """Search the specified list of directories for a static or shared
+ library file 'lib' and return the full path to that file. If
+ 'debug' true, look for a debugging version (if that makes sense on
+ the current platform). Return None if 'lib' wasn't found in any of
+ the specified directories.
+ """
+ raise NotImplementedError
+
+ # -- Filename generation methods -----------------------------------
+
+ # The default implementation of the filename generating methods are
+ # prejudiced towards the Unix/DOS/Windows view of the world:
+ # * object files are named by replacing the source file extension
+ # (eg. .c/.cpp -> .o/.obj)
+ # * library files (shared or static) are named by plugging the
+ # library name and extension into a format string, eg.
+ # "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+ # * executables are named by appending an extension (possibly
+ # empty) to the program name: eg. progname + ".exe" for
+ # Windows
+ #
+ # To reduce redundant code, these methods expect to find
+ # several attributes in the current object (presumably defined
+ # as class attributes):
+ # * src_extensions -
+ # list of C/C++ source file extensions, eg. ['.c', '.cpp']
+ # * obj_extension -
+ # object file extension, eg. '.o' or '.obj'
+ # * static_lib_extension -
+ # extension for static library files, eg. '.a' or '.lib'
+ # * shared_lib_extension -
+ # extension for shared library/object files, eg. '.so', '.dll'
+ # * static_lib_format -
+ # format string for generating static library filenames,
+ # eg. 'lib%s.%s' or '%s.%s'
+ # * shared_lib_format
+ # format string for generating shared library filenames
+ # (probably same as static_lib_format, since the extension
+ # is one of the intended parameters to the format string)
+ # * exe_extension -
+ # extension for executable files, eg. '' or '.exe'
+
+ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ base, ext = os.path.splitext(src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ raise UnknownFileError(
+ "unknown file type '%s' (from '%s')" % (ext, src_name))
+ if strip_dir:
+ base = os.path.basename(base)
+ obj_names.append(os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+ def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+ assert output_dir is not None
+ if strip_dir:
+ basename = os.path.basename(basename)
+ return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+ def executable_filename(self, basename, strip_dir=0, output_dir=''):
+ assert output_dir is not None
+ if strip_dir:
+ basename = os.path.basename(basename)
+ return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+ def library_filename(self, libname, lib_type='static', # or 'shared'
+ strip_dir=0, output_dir=''):
+ assert output_dir is not None
+ if lib_type not in ("static", "shared", "dylib", "xcode_stub"):
+ raise ValueError(
+ "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"")
+ fmt = getattr(self, lib_type + "_lib_format")
+ ext = getattr(self, lib_type + "_lib_extension")
+
+ dir, base = os.path.split(libname)
+ filename = fmt % (base, ext)
+ if strip_dir:
+ dir = ''
+
+ return os.path.join(output_dir, dir, filename)
+
+
+ # -- Utility methods -----------------------------------------------
+
+ def announce(self, msg, level=1):
+ log.debug(msg)
+
+ def debug_print(self, msg):
+ from distutils.debug import DEBUG
+ if DEBUG:
+ print(msg)
+
+ def warn(self, msg):
+ sys.stderr.write("warning: %s\n" % msg)
+
+ def execute(self, func, args, msg=None, level=1):
+ execute(func, args, msg, self.dry_run)
+
+ def spawn(self, cmd, **kwargs):
+ spawn(cmd, dry_run=self.dry_run, **kwargs)
+
+ def move_file(self, src, dst):
+ return move_file(src, dst, dry_run=self.dry_run)
+
+ def mkpath (self, name, mode=0o777):
+ mkpath(name, mode, dry_run=self.dry_run)
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+
+ # Platform string mappings
+
+ # on a cygwin built python we can use gcc like an ordinary UNIXish
+ # compiler
+ ('cygwin.*', 'unix'),
+
+ # OS name mappings
+ ('posix', 'unix'),
+ ('nt', 'msvc'),
+
+ )
+
+def get_default_compiler(osname=None, platform=None):
+ """Determine the default compiler to use for the given platform.
+
+ osname should be one of the standard Python OS names (i.e. the
+ ones returned by os.name) and platform the common value
+ returned by sys.platform for the platform in question.
+
+ The default values are os.name and sys.platform in case the
+ parameters are not given.
+ """
+ if osname is None:
+ osname = os.name
+ if platform is None:
+ platform = sys.platform
+ for pattern, compiler in _default_compilers:
+ if re.match(pattern, platform) is not None or \
+ re.match(pattern, osname) is not None:
+ return compiler
+ # Default to Unix compiler
+ return 'unix'
+
+# Map compiler types to (module_name, class_name) pairs -- ie. where to
+# find the code that implements an interface to this compiler. (The module
+# is assumed to be in the 'distutils' package.)
+compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
+ "standard UNIX-style compiler"),
+ 'msvc': ('_msvccompiler', 'MSVCCompiler',
+ "Microsoft Visual C++"),
+ 'cygwin': ('cygwinccompiler', 'CygwinCCompiler',
+ "Cygwin port of GNU C Compiler for Win32"),
+ 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
+ "Mingw32 port of GNU C Compiler for Win32"),
+ 'bcpp': ('bcppcompiler', 'BCPPCompiler',
+ "Borland C++ Compiler"),
+ }
+
+def show_compilers():
+ """Print list of available compilers (used by the "--help-compiler"
+ options to "build", "build_ext", "build_clib").
+ """
+ # XXX this "knows" that the compiler option it's describing is
+ # "--compiler", which just happens to be the case for the three
+ # commands that use it.
+ from distutils.fancy_getopt import FancyGetopt
+ compilers = []
+ for compiler in compiler_class.keys():
+ compilers.append(("compiler="+compiler, None,
+ compiler_class[compiler][2]))
+ compilers.sort()
+ pretty_printer = FancyGetopt(compilers)
+ pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
+ """Generate an instance of some CCompiler subclass for the supplied
+ platform/compiler combination. 'plat' defaults to 'os.name'
+ (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+ for that platform. Currently only 'posix' and 'nt' are supported, and
+ the default compilers are "traditional Unix interface" (UnixCCompiler
+ class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
+ possible to ask for a Unix compiler object under Windows, and a
+ Microsoft compiler object under Unix -- if you supply a value for
+ 'compiler', 'plat' is ignored.
+ """
+ if plat is None:
+ plat = os.name
+
+ try:
+ if compiler is None:
+ compiler = get_default_compiler(plat)
+
+ (module_name, class_name, long_description) = compiler_class[compiler]
+ except KeyError:
+ msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+ if compiler is not None:
+ msg = msg + " with '%s' compiler" % compiler
+ raise DistutilsPlatformError(msg)
+
+ try:
+ module_name = "distutils." + module_name
+ __import__ (module_name)
+ module = sys.modules[module_name]
+ klass = vars(module)[class_name]
+ except ImportError:
+ raise DistutilsModuleError(
+ "can't compile C/C++ code: unable to load module '%s'" % \
+ module_name)
+ except KeyError:
+ raise DistutilsModuleError(
+ "can't compile C/C++ code: unable to find class '%s' "
+ "in module '%s'" % (class_name, module_name))
+
+ # XXX The None is necessary to preserve backwards compatibility
+ # with classes that expect verbose to be the first positional
+ # argument.
+ return klass(None, dry_run, force)
+
+
+def gen_preprocess_options(macros, include_dirs):
+ """Generate C pre-processor options (-D, -U, -I) as used by at least
+ two types of compilers: the typical Unix compiler and Visual C++.
+ 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+ means undefine (-U) macro 'name', and (name,value) means define (-D)
+ macro 'name' to 'value'. 'include_dirs' is just a list of directory
+ names to be added to the header file search path (-I). Returns a list
+ of command-line options suitable for either Unix compilers or Visual
+ C++.
+ """
+ # XXX it would be nice (mainly aesthetic, and so we don't generate
+ # stupid-looking command lines) to go over 'macros' and eliminate
+ # redundant definitions/undefinitions (ie. ensure that only the
+ # latest mention of a particular macro winds up on the command
+ # line). I don't think it's essential, though, since most (all?)
+ # Unix C compilers only pay attention to the latest -D or -U
+ # mention of a macro on their command line. Similar situation for
+ # 'include_dirs'. I'm punting on both for now. Anyways, weeding out
+ # redundancies like this should probably be the province of
+ # CCompiler, since the data structures used are inherited from it
+ # and therefore common to all CCompiler classes.
+ pp_opts = []
+ for macro in macros:
+ if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
+ raise TypeError(
+ "bad macro definition '%s': "
+ "each element of 'macros' list must be a 1- or 2-tuple"
+ % macro)
+
+ if len(macro) == 1: # undefine this macro
+ pp_opts.append("-U%s" % macro[0])
+ elif len(macro) == 2:
+ if macro[1] is None: # define with no explicit value
+ pp_opts.append("-D%s" % macro[0])
+ else:
+ # XXX *don't* need to be clever about quoting the
+ # macro value here, because we're going to avoid the
+ # shell at all costs when we spawn the command!
+ pp_opts.append("-D%s=%s" % macro)
+
+ for dir in include_dirs:
+ pp_opts.append("-I%s" % dir)
+ return pp_opts
+
+
+def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries):
+ """Generate linker options for searching library directories and
+ linking with specific libraries. 'libraries' and 'library_dirs' are,
+ respectively, lists of library names (not filenames!) and search
+ directories. Returns a list of command-line options suitable for use
+ with some compiler (depending on the two format strings passed in).
+ """
+ lib_opts = []
+
+ for dir in library_dirs:
+ lib_opts.append(compiler.library_dir_option(dir))
+
+ for dir in runtime_library_dirs:
+ opt = compiler.runtime_library_dir_option(dir)
+ if isinstance(opt, list):
+ lib_opts = lib_opts + opt
+ else:
+ lib_opts.append(opt)
+
+ # XXX it's important that we *not* remove redundant library mentions!
+ # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+ # resolve all symbols. I just hope we never have to say "-lfoo obj.o
+ # -lbar" to get things to work -- that's certainly a possibility, but a
+ # pretty nasty way to arrange your C code.
+
+ for lib in libraries:
+ (lib_dir, lib_name) = os.path.split(lib)
+ if lib_dir:
+ lib_file = compiler.find_library_file([lib_dir], lib_name)
+ if lib_file:
+ lib_opts.append(lib_file)
+ else:
+ compiler.warn("no library file corresponding to "
+ "'%s' found (skipping)" % lib)
+ else:
+ lib_opts.append(compiler.library_option (lib))
+ return lib_opts
diff --git a/setuptools/_distutils/cmd.py b/setuptools/_distutils/cmd.py
new file mode 100644
index 0000000..dba3191
--- /dev/null
+++ b/setuptools/_distutils/cmd.py
@@ -0,0 +1,403 @@
+"""distutils.cmd
+
+Provides the Command class, the base class for the command classes
+in the distutils.command package.
+"""
+
+import sys, os, re
+from distutils.errors import DistutilsOptionError
+from distutils import util, dir_util, file_util, archive_util, dep_util
+from distutils import log
+
+class Command:
+ """Abstract base class for defining command classes, the "worker bees"
+ of the Distutils. A useful analogy for command classes is to think of
+ them as subroutines with local variables called "options". The options
+ are "declared" in 'initialize_options()' and "defined" (given their
+ final values, aka "finalized") in 'finalize_options()', both of which
+ must be defined by every command class. The distinction between the
+ two is necessary because option values might come from the outside
+ world (command line, config file, ...), and any options dependent on
+ other options must be computed *after* these outside influences have
+ been processed -- hence 'finalize_options()'. The "body" of the
+ subroutine, where it does all its work based on the values of its
+ options, is the 'run()' method, which must also be implemented by every
+ command class.
+ """
+
+ # 'sub_commands' formalizes the notion of a "family" of commands,
+ # eg. "install" as the parent with sub-commands "install_lib",
+ # "install_headers", etc. The parent of a family of commands
+ # defines 'sub_commands' as a class attribute; it's a list of
+ # (command_name : string, predicate : unbound_method | string | None)
+ # tuples, where 'predicate' is a method of the parent command that
+ # determines whether the corresponding command is applicable in the
+ # current situation. (Eg. we "install_headers" is only applicable if
+ # we have any C header files to install.) If 'predicate' is None,
+ # that command is always applicable.
+ #
+ # 'sub_commands' is usually defined at the *end* of a class, because
+ # predicates can be unbound methods, so they must already have been
+ # defined. The canonical example is the "install" command.
+ sub_commands = []
+
+
+ # -- Creation/initialization methods -------------------------------
+
+ def __init__(self, dist):
+ """Create and initialize a new Command object. Most importantly,
+ invokes the 'initialize_options()' method, which is the real
+ initializer and depends on the actual command being
+ instantiated.
+ """
+ # late import because of mutual dependence between these classes
+ from distutils.dist import Distribution
+
+ if not isinstance(dist, Distribution):
+ raise TypeError("dist must be a Distribution instance")
+ if self.__class__ is Command:
+ raise RuntimeError("Command is an abstract class")
+
+ self.distribution = dist
+ self.initialize_options()
+
+ # Per-command versions of the global flags, so that the user can
+ # customize Distutils' behaviour command-by-command and let some
+ # commands fall back on the Distribution's behaviour. None means
+ # "not defined, check self.distribution's copy", while 0 or 1 mean
+ # false and true (duh). Note that this means figuring out the real
+ # value of each flag is a touch complicated -- hence "self._dry_run"
+ # will be handled by __getattr__, below.
+ # XXX This needs to be fixed.
+ self._dry_run = None
+
+ # verbose is largely ignored, but needs to be set for
+ # backwards compatibility (I think)?
+ self.verbose = dist.verbose
+
+ # Some commands define a 'self.force' option to ignore file
+ # timestamps, but methods defined *here* assume that
+ # 'self.force' exists for all commands. So define it here
+ # just to be safe.
+ self.force = None
+
+ # The 'help' flag is just used for command-line parsing, so
+ # none of that complicated bureaucracy is needed.
+ self.help = 0
+
+ # 'finalized' records whether or not 'finalize_options()' has been
+ # called. 'finalize_options()' itself should not pay attention to
+ # this flag: it is the business of 'ensure_finalized()', which
+ # always calls 'finalize_options()', to respect/update it.
+ self.finalized = 0
+
+ # XXX A more explicit way to customize dry_run would be better.
+ def __getattr__(self, attr):
+ if attr == 'dry_run':
+ myval = getattr(self, "_" + attr)
+ if myval is None:
+ return getattr(self.distribution, attr)
+ else:
+ return myval
+ else:
+ raise AttributeError(attr)
+
+ def ensure_finalized(self):
+ if not self.finalized:
+ self.finalize_options()
+ self.finalized = 1
+
+ # Subclasses must define:
+ # initialize_options()
+ # provide default values for all options; may be customized by
+ # setup script, by options from config file(s), or by command-line
+ # options
+ # finalize_options()
+ # decide on the final values for all options; this is called
+ # after all possible intervention from the outside world
+ # (command-line, option file, etc.) has been processed
+ # run()
+ # run the command: do whatever it is we're here to do,
+ # controlled by the command's various option values
+
+ def initialize_options(self):
+ """Set default values for all the options that this command
+ supports. Note that these defaults may be overridden by other
+ commands, by the setup script, by config files, or by the
+ command-line. Thus, this is not the place to code dependencies
+ between options; generally, 'initialize_options()' implementations
+ are just a bunch of "self.foo = None" assignments.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError("abstract method -- subclass %s must override"
+ % self.__class__)
+
+ def finalize_options(self):
+ """Set final values for all the options that this command supports.
+ This is always called as late as possible, ie. after any option
+ assignments from the command-line or from other commands have been
+ done. Thus, this is the place to code option dependencies: if
+ 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+ long as 'foo' still has the same value it was assigned in
+ 'initialize_options()'.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError("abstract method -- subclass %s must override"
+ % self.__class__)
+
+
+ def dump_options(self, header=None, indent=""):
+ from distutils.fancy_getopt import longopt_xlate
+ if header is None:
+ header = "command options for '%s':" % self.get_command_name()
+ self.announce(indent + header, level=log.INFO)
+ indent = indent + " "
+ for (option, _, _) in self.user_options:
+ option = option.translate(longopt_xlate)
+ if option[-1] == "=":
+ option = option[:-1]
+ value = getattr(self, option)
+ self.announce(indent + "%s = %s" % (option, value),
+ level=log.INFO)
+
+ def run(self):
+ """A command's raison d'etre: carry out the action it exists to
+ perform, controlled by the options initialized in
+ 'initialize_options()', customized by other commands, the setup
+ script, the command-line, and config files, and finalized in
+ 'finalize_options()'. All terminal output and filesystem
+ interaction should be done by 'run()'.
+
+ This method must be implemented by all command classes.
+ """
+ raise RuntimeError("abstract method -- subclass %s must override"
+ % self.__class__)
+
+ def announce(self, msg, level=1):
+ """If the current verbosity level is of greater than or equal to
+ 'level' print 'msg' to stdout.
+ """
+ log.log(level, msg)
+
+ def debug_print(self, msg):
+ """Print 'msg' to stdout if the global DEBUG (taken from the
+ DISTUTILS_DEBUG environment variable) flag is true.
+ """
+ from distutils.debug import DEBUG
+ if DEBUG:
+ print(msg)
+ sys.stdout.flush()
+
+
+ # -- Option validation methods -------------------------------------
+ # (these are very handy in writing the 'finalize_options()' method)
+ #
+ # NB. the general philosophy here is to ensure that a particular option
+ # value meets certain type and value constraints. If not, we try to
+ # force it into conformance (eg. if we expect a list but have a string,
+ # split the string on comma and/or whitespace). If we can't force the
+ # option into conformance, raise DistutilsOptionError. Thus, command
+ # classes need do nothing more than (eg.)
+ # self.ensure_string_list('foo')
+ # and they can be guaranteed that thereafter, self.foo will be
+ # a list of strings.
+
+ def _ensure_stringlike(self, option, what, default=None):
+ val = getattr(self, option)
+ if val is None:
+ setattr(self, option, default)
+ return default
+ elif not isinstance(val, str):
+ raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
+ % (option, what, val))
+ return val
+
+ def ensure_string(self, option, default=None):
+ """Ensure that 'option' is a string; if not defined, set it to
+ 'default'.
+ """
+ self._ensure_stringlike(option, "string", default)
+
+ def ensure_string_list(self, option):
+ r"""Ensure that 'option' is a list of strings. If 'option' is
+ currently a string, we split it either on /,\s*/ or /\s+/, so
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
+ ["foo", "bar", "baz"].
+ """
+ val = getattr(self, option)
+ if val is None:
+ return
+ elif isinstance(val, str):
+ setattr(self, option, re.split(r',\s*|\s+', val))
+ else:
+ if isinstance(val, list):
+ ok = all(isinstance(v, str) for v in val)
+ else:
+ ok = False
+ if not ok:
+ raise DistutilsOptionError(
+ "'%s' must be a list of strings (got %r)"
+ % (option, val))
+
+ def _ensure_tested_string(self, option, tester, what, error_fmt,
+ default=None):
+ val = self._ensure_stringlike(option, what, default)
+ if val is not None and not tester(val):
+ raise DistutilsOptionError(("error in '%s' option: " + error_fmt)
+ % (option, val))
+
+ def ensure_filename(self, option):
+ """Ensure that 'option' is the name of an existing file."""
+ self._ensure_tested_string(option, os.path.isfile,
+ "filename",
+ "'%s' does not exist or is not a file")
+
+ def ensure_dirname(self, option):
+ self._ensure_tested_string(option, os.path.isdir,
+ "directory name",
+ "'%s' does not exist or is not a directory")
+
+
+ # -- Convenience methods for commands ------------------------------
+
+ def get_command_name(self):
+ if hasattr(self, 'command_name'):
+ return self.command_name
+ else:
+ return self.__class__.__name__
+
+ def set_undefined_options(self, src_cmd, *option_pairs):
+ """Set the values of any "undefined" options from corresponding
+ option values in some other command object. "Undefined" here means
+ "is None", which is the convention used to indicate that an option
+ has not been changed between 'initialize_options()' and
+ 'finalize_options()'. Usually called from 'finalize_options()' for
+ options that depend on some other command rather than another
+ option of the same command. 'src_cmd' is the other command from
+ which option values will be taken (a command object will be created
+ for it if necessary); the remaining arguments are
+ '(src_option,dst_option)' tuples which mean "take the value of
+ 'src_option' in the 'src_cmd' command object, and copy it to
+ 'dst_option' in the current command object".
+ """
+ # Option_pairs: list of (src_option, dst_option) tuples
+ src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+ src_cmd_obj.ensure_finalized()
+ for (src_option, dst_option) in option_pairs:
+ if getattr(self, dst_option) is None:
+ setattr(self, dst_option, getattr(src_cmd_obj, src_option))
+
+ def get_finalized_command(self, command, create=1):
+ """Wrapper around Distribution's 'get_command_obj()' method: find
+ (create if necessary and 'create' is true) the command object for
+ 'command', call its 'ensure_finalized()' method, and return the
+ finalized command object.
+ """
+ cmd_obj = self.distribution.get_command_obj(command, create)
+ cmd_obj.ensure_finalized()
+ return cmd_obj
+
+ # XXX rename to 'get_reinitialized_command()'? (should do the
+ # same in dist.py, if so)
+ def reinitialize_command(self, command, reinit_subcommands=0):
+ return self.distribution.reinitialize_command(command,
+ reinit_subcommands)
+
+ def run_command(self, command):
+ """Run some other command: uses the 'run_command()' method of
+ Distribution, which creates and finalizes the command object if
+ necessary and then invokes its 'run()' method.
+ """
+ self.distribution.run_command(command)
+
+ def get_sub_commands(self):
+ """Determine the sub-commands that are relevant in the current
+ distribution (ie., that need to be run). This is based on the
+ 'sub_commands' class attribute: each tuple in that list may include
+ a method that we call to determine if the subcommand needs to be
+ run for the current distribution. Return a list of command names.
+ """
+ commands = []
+ for (cmd_name, method) in self.sub_commands:
+ if method is None or method(self):
+ commands.append(cmd_name)
+ return commands
+
+
+ # -- External world manipulation -----------------------------------
+
+ def warn(self, msg):
+ log.warn("warning: %s: %s\n", self.get_command_name(), msg)
+
+ def execute(self, func, args, msg=None, level=1):
+ util.execute(func, args, msg, dry_run=self.dry_run)
+
+ def mkpath(self, name, mode=0o777):
+ dir_util.mkpath(name, mode, dry_run=self.dry_run)
+
+ def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
+ link=None, level=1):
+ """Copy a file respecting verbose, dry-run and force flags. (The
+ former two default to whatever is in the Distribution object, and
+ the latter defaults to false for commands that don't define it.)"""
+ return file_util.copy_file(infile, outfile, preserve_mode,
+ preserve_times, not self.force, link,
+ dry_run=self.dry_run)
+
+ def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1,
+ preserve_symlinks=0, level=1):
+ """Copy an entire directory tree respecting verbose, dry-run,
+ and force flags.
+ """
+ return dir_util.copy_tree(infile, outfile, preserve_mode,
+ preserve_times, preserve_symlinks,
+ not self.force, dry_run=self.dry_run)
+
+ def move_file (self, src, dst, level=1):
+ """Move a file respecting dry-run flag."""
+ return file_util.move_file(src, dst, dry_run=self.dry_run)
+
+ def spawn(self, cmd, search_path=1, level=1):
+ """Spawn an external command respecting dry-run flag."""
+ from distutils.spawn import spawn
+ spawn(cmd, search_path, dry_run=self.dry_run)
+
+ def make_archive(self, base_name, format, root_dir=None, base_dir=None,
+ owner=None, group=None):
+ return archive_util.make_archive(base_name, format, root_dir, base_dir,
+ dry_run=self.dry_run,
+ owner=owner, group=group)
+
+ def make_file(self, infiles, outfile, func, args,
+ exec_msg=None, skip_msg=None, level=1):
+ """Special case of 'execute()' for operations that process one or
+ more input files and generate one output file. Works just like
+ 'execute()', except the operation is skipped and a different
+ message printed if 'outfile' already exists and is newer than all
+ files listed in 'infiles'. If the command defined 'self.force',
+ and it is true, then the command is unconditionally run -- does no
+ timestamp checks.
+ """
+ if skip_msg is None:
+ skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+ # Allow 'infiles' to be a single string
+ if isinstance(infiles, str):
+ infiles = (infiles,)
+ elif not isinstance(infiles, (list, tuple)):
+ raise TypeError(
+ "'infiles' must be a string, or a list or tuple of strings")
+
+ if exec_msg is None:
+ exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
+
+ # If 'outfile' must be regenerated (either because it doesn't
+ # exist, is out-of-date, or the 'force' flag is true) then
+ # perform the action that presumably regenerates it
+ if self.force or dep_util.newer_group(infiles, outfile):
+ self.execute(func, args, exec_msg, level)
+ # Otherwise, print the "skip" message
+ else:
+ log.debug(skip_msg)
diff --git a/setuptools/_distutils/command/__init__.py b/setuptools/_distutils/command/__init__.py
new file mode 100644
index 0000000..481eea9
--- /dev/null
+++ b/setuptools/_distutils/command/__init__.py
@@ -0,0 +1,31 @@
+"""distutils.command
+
+Package containing implementation of all the standard Distutils
+commands."""
+
+__all__ = ['build',
+ 'build_py',
+ 'build_ext',
+ 'build_clib',
+ 'build_scripts',
+ 'clean',
+ 'install',
+ 'install_lib',
+ 'install_headers',
+ 'install_scripts',
+ 'install_data',
+ 'sdist',
+ 'register',
+ 'bdist',
+ 'bdist_dumb',
+ 'bdist_rpm',
+ 'bdist_wininst',
+ 'check',
+ 'upload',
+ # These two are reserved for future use:
+ #'bdist_sdux',
+ #'bdist_pkgtool',
+ # Note:
+ # bdist_packager is not included because it only provides
+ # an abstract base class
+ ]
diff --git a/setuptools/_distutils/command/bdist.py b/setuptools/_distutils/command/bdist.py
new file mode 100644
index 0000000..014871d
--- /dev/null
+++ b/setuptools/_distutils/command/bdist.py
@@ -0,0 +1,143 @@
+"""distutils.command.bdist
+
+Implements the Distutils 'bdist' command (create a built [binary]
+distribution)."""
+
+import os
+from distutils.core import Command
+from distutils.errors import *
+from distutils.util import get_platform
+
+
+def show_formats():
+ """Print list of available formats (arguments to "--format" option).
+ """
+ from distutils.fancy_getopt import FancyGetopt
+ formats = []
+ for format in bdist.format_commands:
+ formats.append(("formats=" + format, None,
+ bdist.format_command[format][1]))
+ pretty_printer = FancyGetopt(formats)
+ pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist(Command):
+
+ description = "create a built (binary) distribution"
+
+ user_options = [('bdist-base=', 'b',
+ "temporary directory for creating built distributions"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('formats=', None,
+ "formats for distribution (comma-separated list)"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in "
+ "[default: dist]"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file"
+ " [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file"
+ " [default: current group]"),
+ ]
+
+ boolean_options = ['skip-build']
+
+ help_options = [
+ ('help-formats', None,
+ "lists available distribution formats", show_formats),
+ ]
+
+ # The following commands do not take a format option from bdist
+ no_format_option = ('bdist_rpm',)
+
+ # This won't do in reality: will need to distinguish RPM-ish Linux,
+ # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
+ default_format = {'posix': 'gztar',
+ 'nt': 'zip'}
+
+ # Establish the preferred order (for the --help-formats option).
+ format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar',
+ 'wininst', 'zip', 'msi']
+
+ # And the real information.
+ format_command = {'rpm': ('bdist_rpm', "RPM distribution"),
+ 'gztar': ('bdist_dumb', "gzip'ed tar file"),
+ 'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+ 'xztar': ('bdist_dumb', "xz'ed tar file"),
+ 'ztar': ('bdist_dumb', "compressed tar file"),
+ 'tar': ('bdist_dumb', "tar file"),
+ 'wininst': ('bdist_wininst',
+ "Windows executable installer"),
+ 'zip': ('bdist_dumb', "ZIP file"),
+ 'msi': ('bdist_msi', "Microsoft Installer")
+ }
+
+
+ def initialize_options(self):
+ self.bdist_base = None
+ self.plat_name = None
+ self.formats = None
+ self.dist_dir = None
+ self.skip_build = 0
+ self.group = None
+ self.owner = None
+
+ def finalize_options(self):
+ # have to finalize 'plat_name' before 'bdist_base'
+ if self.plat_name is None:
+ if self.skip_build:
+ self.plat_name = get_platform()
+ else:
+ self.plat_name = self.get_finalized_command('build').plat_name
+
+ # 'bdist_base' -- parent of per-built-distribution-format
+ # temporary directories (eg. we'll probably have
+ # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
+ if self.bdist_base is None:
+ build_base = self.get_finalized_command('build').build_base
+ self.bdist_base = os.path.join(build_base,
+ 'bdist.' + self.plat_name)
+
+ self.ensure_string_list('formats')
+ if self.formats is None:
+ try:
+ self.formats = [self.default_format[os.name]]
+ except KeyError:
+ raise DistutilsPlatformError(
+ "don't know how to create built distributions "
+ "on platform %s" % os.name)
+
+ if self.dist_dir is None:
+ self.dist_dir = "dist"
+
+ def run(self):
+ # Figure out which sub-commands we need to run.
+ commands = []
+ for format in self.formats:
+ try:
+ commands.append(self.format_command[format][0])
+ except KeyError:
+ raise DistutilsOptionError("invalid format '%s'" % format)
+
+ # Reinitialize and run each command.
+ for i in range(len(self.formats)):
+ cmd_name = commands[i]
+ sub_cmd = self.reinitialize_command(cmd_name)
+ if cmd_name not in self.no_format_option:
+ sub_cmd.format = self.formats[i]
+
+ # passing the owner and group names for tar archiving
+ if cmd_name == 'bdist_dumb':
+ sub_cmd.owner = self.owner
+ sub_cmd.group = self.group
+
+ # If we're going to need to run this command again, tell it to
+ # keep its temporary files around so subsequent runs go faster.
+ if cmd_name in commands[i+1:]:
+ sub_cmd.keep_temp = 1
+ self.run_command(cmd_name)
diff --git a/setuptools/_distutils/command/bdist_dumb.py b/setuptools/_distutils/command/bdist_dumb.py
new file mode 100644
index 0000000..f0d6b5b
--- /dev/null
+++ b/setuptools/_distutils/command/bdist_dumb.py
@@ -0,0 +1,123 @@
+"""distutils.command.bdist_dumb
+
+Implements the Distutils 'bdist_dumb' command (create a "dumb" built
+distribution -- i.e., just an archive to be unpacked under $prefix or
+$exec_prefix)."""
+
+import os
+from distutils.core import Command
+from distutils.util import get_platform
+from distutils.dir_util import remove_tree, ensure_relative
+from distutils.errors import *
+from distutils.sysconfig import get_python_version
+from distutils import log
+
+class bdist_dumb(Command):
+
+ description = "create a \"dumb\" built distribution"
+
+ user_options = [('bdist-dir=', 'd',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('format=', 'f',
+ "archive format to create (tar, gztar, bztar, xztar, "
+ "ztar, zip)"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('relative', None,
+ "build the archive using relative paths "
+ "(default: false)"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file"
+ " [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file"
+ " [default: current group]"),
+ ]
+
+ boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+ default_format = { 'posix': 'gztar',
+ 'nt': 'zip' }
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.format = None
+ self.keep_temp = 0
+ self.dist_dir = None
+ self.skip_build = None
+ self.relative = 0
+ self.owner = None
+ self.group = None
+
+ def finalize_options(self):
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+ if self.format is None:
+ try:
+ self.format = self.default_format[os.name]
+ except KeyError:
+ raise DistutilsPlatformError(
+ "don't know how to create dumb built distributions "
+ "on platform %s" % os.name)
+
+ self.set_undefined_options('bdist',
+ ('dist_dir', 'dist_dir'),
+ ('plat_name', 'plat_name'),
+ ('skip_build', 'skip_build'))
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install', reinit_subcommands=1)
+ install.root = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = 0
+
+ log.info("installing to %s", self.bdist_dir)
+ self.run_command('install')
+
+ # And make an archive relative to the root of the
+ # pseudo-installation tree.
+ archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+ self.plat_name)
+
+ pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+ if not self.relative:
+ archive_root = self.bdist_dir
+ else:
+ if (self.distribution.has_ext_modules() and
+ (install.install_base != install.install_platbase)):
+ raise DistutilsPlatformError(
+ "can't make a dumb built distribution where "
+ "base and platbase are different (%s, %s)"
+ % (repr(install.install_base),
+ repr(install.install_platbase)))
+ else:
+ archive_root = os.path.join(self.bdist_dir,
+ ensure_relative(install.install_base))
+
+ # Make the archive
+ filename = self.make_archive(pseudoinstall_root,
+ self.format, root_dir=archive_root,
+ owner=self.owner, group=self.group)
+ if self.distribution.has_ext_modules():
+ pyversion = get_python_version()
+ else:
+ pyversion = 'any'
+ self.distribution.dist_files.append(('bdist_dumb', pyversion,
+ filename))
+
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
diff --git a/setuptools/_distutils/command/bdist_msi.py b/setuptools/_distutils/command/bdist_msi.py
new file mode 100644
index 0000000..1525953
--- /dev/null
+++ b/setuptools/_distutils/command/bdist_msi.py
@@ -0,0 +1,749 @@
+# Copyright (C) 2005, 2006 Martin von Löwis
+# Licensed to PSF under a Contributor Agreement.
+# The bdist_wininst command proper
+# based on bdist_wininst
+"""
+Implements the bdist_msi command.
+"""
+
+import os
+import sys
+import warnings
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils.sysconfig import get_python_version
+from distutils.version import StrictVersion
+from distutils.errors import DistutilsOptionError
+from distutils.util import get_platform
+from distutils import log
+import msilib
+from msilib import schema, sequence, text
+from msilib import Directory, Feature, Dialog, add_data
+
+class PyDialog(Dialog):
+ """Dialog class with a fixed layout: controls at the top, then a ruler,
+ then a list of buttons: back, next, cancel. Optionally a bitmap at the
+ left."""
+ def __init__(self, *args, **kw):
+ """Dialog(database, name, x, y, w, h, attributes, title, first,
+ default, cancel, bitmap=true)"""
+ super().__init__(*args)
+ ruler = self.h - 36
+ bmwidth = 152*ruler/328
+ #if kw.get("bitmap", True):
+ # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
+ self.line("BottomLine", 0, ruler, self.w, 0)
+
+ def title(self, title):
+ "Set the title text of the dialog at the top."
+ # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
+ # text, in VerdanaBold10
+ self.text("Title", 15, 10, 320, 60, 0x30003,
+ r"{\VerdanaBold10}%s" % title)
+
+ def back(self, title, next, name = "Back", active = 1):
+ """Add a back button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
+
+ def cancel(self, title, next, name = "Cancel", active = 1):
+ """Add a cancel button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
+
+ def next(self, title, next, name = "Next", active = 1):
+ """Add a Next button with a given title, the tab-next button,
+ its name in the Control table, possibly initially disabled.
+
+ Return the button, so that events can be associated"""
+ if active:
+ flags = 3 # Visible|Enabled
+ else:
+ flags = 1 # Visible
+ return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
+
+ def xbutton(self, name, title, next, xpos):
+ """Add a button with a given title, the tab-next button,
+ its name in the Control table, giving its x position; the
+ y-position is aligned with the other buttons.
+
+ Return the button, so that events can be associated"""
+ return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
+
+class bdist_msi(Command):
+
+ description = "create a Microsoft Installer (.msi) binary distribution"
+
+ user_options = [('bdist-dir=', None,
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('target-version=', None,
+ "require a specific python version" +
+ " on the target system"),
+ ('no-target-compile', 'c',
+ "do not compile .py to .pyc on the target system"),
+ ('no-target-optimize', 'o',
+ "do not compile .py to .pyo (optimized) "
+ "on the target system"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('install-script=', None,
+ "basename of installation script to be run after "
+ "installation or before deinstallation"),
+ ('pre-install-script=', None,
+ "Fully qualified filename of a script to be run before "
+ "any files are installed. This script need not be in the "
+ "distribution"),
+ ]
+
+ boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+ 'skip-build']
+
+ all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
+ '2.5', '2.6', '2.7', '2.8', '2.9',
+ '3.0', '3.1', '3.2', '3.3', '3.4',
+ '3.5', '3.6', '3.7', '3.8', '3.9']
+ other_version = 'X'
+
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ warnings.warn("bdist_msi command is deprecated since Python 3.9, "
+ "use bdist_wheel (wheel packages) instead",
+ DeprecationWarning, 2)
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.no_target_compile = 0
+ self.no_target_optimize = 0
+ self.target_version = None
+ self.dist_dir = None
+ self.skip_build = None
+ self.install_script = None
+ self.pre_install_script = None
+ self.versions = None
+
+ def finalize_options(self):
+ self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'msi')
+
+ short_version = get_python_version()
+ if (not self.target_version) and self.distribution.has_ext_modules():
+ self.target_version = short_version
+
+ if self.target_version:
+ self.versions = [self.target_version]
+ if not self.skip_build and self.distribution.has_ext_modules()\
+ and self.target_version != short_version:
+ raise DistutilsOptionError(
+ "target version can only be %s, or the '--skip-build'"
+ " option must be specified" % (short_version,))
+ else:
+ self.versions = list(self.all_versions)
+
+ self.set_undefined_options('bdist',
+ ('dist_dir', 'dist_dir'),
+ ('plat_name', 'plat_name'),
+ )
+
+ if self.pre_install_script:
+ raise DistutilsOptionError(
+ "the pre-install-script feature is not yet implemented")
+
+ if self.install_script:
+ for script in self.distribution.scripts:
+ if self.install_script == os.path.basename(script):
+ break
+ else:
+ raise DistutilsOptionError(
+ "install_script '%s' not found in scripts"
+ % self.install_script)
+ self.install_script_key = None
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install', reinit_subcommands=1)
+ install.prefix = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = 0
+
+ install_lib = self.reinitialize_command('install_lib')
+ # we do not want to include pyc or pyo files
+ install_lib.compile = 0
+ install_lib.optimize = 0
+
+ if self.distribution.has_ext_modules():
+ # If we are building an installer for a Python version other
+ # than the one we are currently running, then we need to ensure
+ # our build_lib reflects the other Python version rather than ours.
+ # Note that for target_version!=sys.version, we must have skipped the
+ # build step, so there is no issue with enforcing the build of this
+ # version.
+ target_version = self.target_version
+ if not target_version:
+ assert self.skip_build, "Should have already checked this"
+ target_version = '%d.%d' % sys.version_info[:2]
+ plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+ build = self.get_finalized_command('build')
+ build.build_lib = os.path.join(build.build_base,
+ 'lib' + plat_specifier)
+
+ log.info("installing to %s", self.bdist_dir)
+ install.ensure_finalized()
+
+ # avoid warning of 'install_lib' about installing
+ # into a directory not in sys.path
+ sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+ install.run()
+
+ del sys.path[0]
+
+ self.mkpath(self.dist_dir)
+ fullname = self.distribution.get_fullname()
+ installer_name = self.get_installer_filename(fullname)
+ installer_name = os.path.abspath(installer_name)
+ if os.path.exists(installer_name): os.unlink(installer_name)
+
+ metadata = self.distribution.metadata
+ author = metadata.author
+ if not author:
+ author = metadata.maintainer
+ if not author:
+ author = "UNKNOWN"
+ version = metadata.get_version()
+ # ProductVersion must be strictly numeric
+ # XXX need to deal with prerelease versions
+ sversion = "%d.%d.%d" % StrictVersion(version).version
+ # Prefix ProductName with Python x.y, so that
+ # it sorts together with the other Python packages
+ # in Add-Remove-Programs (APR)
+ fullname = self.distribution.get_fullname()
+ if self.target_version:
+ product_name = "Python %s %s" % (self.target_version, fullname)
+ else:
+ product_name = "Python %s" % (fullname)
+ self.db = msilib.init_database(installer_name, schema,
+ product_name, msilib.gen_uuid(),
+ sversion, author)
+ msilib.add_tables(self.db, sequence)
+ props = [('DistVersion', version)]
+ email = metadata.author_email or metadata.maintainer_email
+ if email:
+ props.append(("ARPCONTACT", email))
+ if metadata.url:
+ props.append(("ARPURLINFOABOUT", metadata.url))
+ if props:
+ add_data(self.db, 'Property', props)
+
+ self.add_find_python()
+ self.add_files()
+ self.add_scripts()
+ self.add_ui()
+ self.db.Commit()
+
+ if hasattr(self.distribution, 'dist_files'):
+ tup = 'bdist_msi', self.target_version or 'any', fullname
+ self.distribution.dist_files.append(tup)
+
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ def add_files(self):
+ db = self.db
+ cab = msilib.CAB("distfiles")
+ rootdir = os.path.abspath(self.bdist_dir)
+
+ root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
+ f = Feature(db, "Python", "Python", "Everything",
+ 0, 1, directory="TARGETDIR")
+
+ items = [(f, root, '')]
+ for version in self.versions + [self.other_version]:
+ target = "TARGETDIR" + version
+ name = default = "Python" + version
+ desc = "Everything"
+ if version is self.other_version:
+ title = "Python from another location"
+ level = 2
+ else:
+ title = "Python %s from registry" % version
+ level = 1
+ f = Feature(db, name, title, desc, 1, level, directory=target)
+ dir = Directory(db, cab, root, rootdir, target, default)
+ items.append((f, dir, version))
+ db.Commit()
+
+ seen = {}
+ for feature, dir, version in items:
+ todo = [dir]
+ while todo:
+ dir = todo.pop()
+ for file in os.listdir(dir.absolute):
+ afile = os.path.join(dir.absolute, file)
+ if os.path.isdir(afile):
+ short = "%s|%s" % (dir.make_short(file), file)
+ default = file + version
+ newdir = Directory(db, cab, dir, file, default, short)
+ todo.append(newdir)
+ else:
+ if not dir.component:
+ dir.start_component(dir.logical, feature, 0)
+ if afile not in seen:
+ key = seen[afile] = dir.add_file(file)
+ if file==self.install_script:
+ if self.install_script_key:
+ raise DistutilsOptionError(
+ "Multiple files with name %s" % file)
+ self.install_script_key = '[#%s]' % key
+ else:
+ key = seen[afile]
+ add_data(self.db, "DuplicateFile",
+ [(key + version, dir.component, key, None, dir.logical)])
+ db.Commit()
+ cab.commit(db)
+
+ def add_find_python(self):
+ """Adds code to the installer to compute the location of Python.
+
+ Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
+ registry for each version of Python.
+
+ Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
+ else from PYTHON.MACHINE.X.Y.
+
+ Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
+
+ start = 402
+ for ver in self.versions:
+ install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
+ machine_reg = "python.machine." + ver
+ user_reg = "python.user." + ver
+ machine_prop = "PYTHON.MACHINE." + ver
+ user_prop = "PYTHON.USER." + ver
+ machine_action = "PythonFromMachine" + ver
+ user_action = "PythonFromUser" + ver
+ exe_action = "PythonExe" + ver
+ target_dir_prop = "TARGETDIR" + ver
+ exe_prop = "PYTHON" + ver
+ if msilib.Win64:
+ # type: msidbLocatorTypeRawValue + msidbLocatorType64bit
+ Type = 2+16
+ else:
+ Type = 2
+ add_data(self.db, "RegLocator",
+ [(machine_reg, 2, install_path, None, Type),
+ (user_reg, 1, install_path, None, Type)])
+ add_data(self.db, "AppSearch",
+ [(machine_prop, machine_reg),
+ (user_prop, user_reg)])
+ add_data(self.db, "CustomAction",
+ [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
+ (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
+ (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
+ ])
+ add_data(self.db, "InstallExecuteSequence",
+ [(machine_action, machine_prop, start),
+ (user_action, user_prop, start + 1),
+ (exe_action, None, start + 2),
+ ])
+ add_data(self.db, "InstallUISequence",
+ [(machine_action, machine_prop, start),
+ (user_action, user_prop, start + 1),
+ (exe_action, None, start + 2),
+ ])
+ add_data(self.db, "Condition",
+ [("Python" + ver, 0, "NOT TARGETDIR" + ver)])
+ start += 4
+ assert start < 500
+
+ def add_scripts(self):
+ if self.install_script:
+ start = 6800
+ for ver in self.versions + [self.other_version]:
+ install_action = "install_script." + ver
+ exe_prop = "PYTHON" + ver
+ add_data(self.db, "CustomAction",
+ [(install_action, 50, exe_prop, self.install_script_key)])
+ add_data(self.db, "InstallExecuteSequence",
+ [(install_action, "&Python%s=3" % ver, start)])
+ start += 1
+ # XXX pre-install scripts are currently refused in finalize_options()
+ # but if this feature is completed, it will also need to add
+ # entries for each version as the above code does
+ if self.pre_install_script:
+ scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
+ with open(scriptfn, "w") as f:
+ # The batch file will be executed with [PYTHON], so that %1
+ # is the path to the Python interpreter; %0 will be the path
+ # of the batch file.
+ # rem ="""
+ # %1 %0
+ # exit
+ # """
+ # <actual script>
+ f.write('rem ="""\n%1 %0\nexit\n"""\n')
+ with open(self.pre_install_script) as fin:
+ f.write(fin.read())
+ add_data(self.db, "Binary",
+ [("PreInstall", msilib.Binary(scriptfn))
+ ])
+ add_data(self.db, "CustomAction",
+ [("PreInstall", 2, "PreInstall", None)
+ ])
+ add_data(self.db, "InstallExecuteSequence",
+ [("PreInstall", "NOT Installed", 450)])
+
+
+ def add_ui(self):
+ db = self.db
+ x = y = 50
+ w = 370
+ h = 300
+ title = "[ProductName] Setup"
+
+ # see "Dialog Style Bits"
+ modal = 3 # visible | modal
+ modeless = 1 # visible
+ track_disk_space = 32
+
+ # UI customization properties
+ add_data(db, "Property",
+ # See "DefaultUIFont Property"
+ [("DefaultUIFont", "DlgFont8"),
+ # See "ErrorDialog Style Bit"
+ ("ErrorDialog", "ErrorDlg"),
+ ("Progress1", "Install"), # modified in maintenance type dlg
+ ("Progress2", "installs"),
+ ("MaintenanceForm_Action", "Repair"),
+ # possible values: ALL, JUSTME
+ ("WhichUsers", "ALL")
+ ])
+
+ # Fonts, see "TextStyle Table"
+ add_data(db, "TextStyle",
+ [("DlgFont8", "Tahoma", 9, None, 0),
+ ("DlgFontBold8", "Tahoma", 8, None, 1), #bold
+ ("VerdanaBold10", "Verdana", 10, None, 1),
+ ("VerdanaRed9", "Verdana", 9, 255, 0),
+ ])
+
+ # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
+ # Numbers indicate sequence; see sequence.py for how these action integrate
+ add_data(db, "InstallUISequence",
+ [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
+ ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
+ # In the user interface, assume all-users installation if privileged.
+ ("SelectFeaturesDlg", "Not Installed", 1230),
+ # XXX no support for resume installations yet
+ #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
+ ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
+ ("ProgressDlg", None, 1280)])
+
+ add_data(db, 'ActionText', text.ActionText)
+ add_data(db, 'UIText', text.UIText)
+ #####################################################################
+ # Standard dialogs: FatalError, UserExit, ExitDialog
+ fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ fatal.title("[ProductName] Installer ended prematurely")
+ fatal.back("< Back", "Finish", active = 0)
+ fatal.cancel("Cancel", "Back", active = 0)
+ fatal.text("Description1", 15, 70, 320, 80, 0x30003,
+ "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
+ fatal.text("Description2", 15, 155, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c=fatal.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Exit")
+
+ user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ user_exit.title("[ProductName] Installer was interrupted")
+ user_exit.back("< Back", "Finish", active = 0)
+ user_exit.cancel("Cancel", "Back", active = 0)
+ user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
+ "[ProductName] setup was interrupted. Your system has not been modified. "
+ "To install this program at a later time, please run the installation again.")
+ user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c = user_exit.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Exit")
+
+ exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
+ "Finish", "Finish", "Finish")
+ exit_dialog.title("Completing the [ProductName] Installer")
+ exit_dialog.back("< Back", "Finish", active = 0)
+ exit_dialog.cancel("Cancel", "Back", active = 0)
+ exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
+ "Click the Finish button to exit the Installer.")
+ c = exit_dialog.next("Finish", "Cancel", name="Finish")
+ c.event("EndDialog", "Return")
+
+ #####################################################################
+ # Required dialog: FilesInUse, ErrorDlg
+ inuse = PyDialog(db, "FilesInUse",
+ x, y, w, h,
+ 19, # KeepModeless|Modal|Visible
+ title,
+ "Retry", "Retry", "Retry", bitmap=False)
+ inuse.text("Title", 15, 6, 200, 15, 0x30003,
+ r"{\DlgFontBold8}Files in Use")
+ inuse.text("Description", 20, 23, 280, 20, 0x30003,
+ "Some files that need to be updated are currently in use.")
+ inuse.text("Text", 20, 55, 330, 50, 3,
+ "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
+ inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
+ None, None, None)
+ c=inuse.back("Exit", "Ignore", name="Exit")
+ c.event("EndDialog", "Exit")
+ c=inuse.next("Ignore", "Retry", name="Ignore")
+ c.event("EndDialog", "Ignore")
+ c=inuse.cancel("Retry", "Exit", name="Retry")
+ c.event("EndDialog","Retry")
+
+ # See "Error Dialog". See "ICE20" for the required names of the controls.
+ error = Dialog(db, "ErrorDlg",
+ 50, 10, 330, 101,
+ 65543, # Error|Minimize|Modal|Visible
+ title,
+ "ErrorText", None, None)
+ error.text("ErrorText", 50,9,280,48,3, "")
+ #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
+ error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
+ error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
+ error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
+ error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
+ error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
+ error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
+ error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
+
+ #####################################################################
+ # Global "Query Cancel" dialog
+ cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
+ "No", "No", "No")
+ cancel.text("Text", 48, 15, 194, 30, 3,
+ "Are you sure you want to cancel [ProductName] installation?")
+ #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
+ # "py.ico", None, None)
+ c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
+ c.event("EndDialog", "Exit")
+
+ c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
+ c.event("EndDialog", "Return")
+
+ #####################################################################
+ # Global "Wait for costing" dialog
+ costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
+ "Return", "Return", "Return")
+ costing.text("Text", 48, 15, 194, 30, 3,
+ "Please wait while the installer finishes determining your disk space requirements.")
+ c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
+ c.event("EndDialog", "Exit")
+
+ #####################################################################
+ # Preparation dialog: no user input except cancellation
+ prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
+ "Cancel", "Cancel", "Cancel")
+ prep.text("Description", 15, 70, 320, 40, 0x30003,
+ "Please wait while the Installer prepares to guide you through the installation.")
+ prep.title("Welcome to the [ProductName] Installer")
+ c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
+ c.mapping("ActionText", "Text")
+ c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
+ c.mapping("ActionData", "Text")
+ prep.back("Back", None, active=0)
+ prep.next("Next", None, active=0)
+ c=prep.cancel("Cancel", None)
+ c.event("SpawnDialog", "CancelDlg")
+
+ #####################################################################
+ # Feature (Python directory) selection
+ seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
+ "Next", "Next", "Cancel")
+ seldlg.title("Select Python Installations")
+
+ seldlg.text("Hint", 15, 30, 300, 20, 3,
+ "Select the Python locations where %s should be installed."
+ % self.distribution.get_fullname())
+
+ seldlg.back("< Back", None, active=0)
+ c = seldlg.next("Next >", "Cancel")
+ order = 1
+ c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
+ for version in self.versions + [self.other_version]:
+ order += 1
+ c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
+ "FEATURE_SELECTED AND &Python%s=3" % version,
+ ordering=order)
+ c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
+ c.event("EndDialog", "Return", ordering=order + 2)
+ c = seldlg.cancel("Cancel", "Features")
+ c.event("SpawnDialog", "CancelDlg")
+
+ c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
+ "FEATURE", None, "PathEdit", None)
+ c.event("[FEATURE_SELECTED]", "1")
+ ver = self.other_version
+ install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
+ dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
+
+ c = seldlg.text("Other", 15, 200, 300, 15, 3,
+ "Provide an alternate Python location")
+ c.condition("Enable", install_other_cond)
+ c.condition("Show", install_other_cond)
+ c.condition("Disable", dont_install_other_cond)
+ c.condition("Hide", dont_install_other_cond)
+
+ c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
+ "TARGETDIR" + ver, None, "Next", None)
+ c.condition("Enable", install_other_cond)
+ c.condition("Show", install_other_cond)
+ c.condition("Disable", dont_install_other_cond)
+ c.condition("Hide", dont_install_other_cond)
+
+ #####################################################################
+ # Disk cost
+ cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
+ "OK", "OK", "OK", bitmap=False)
+ cost.text("Title", 15, 6, 200, 15, 0x30003,
+ r"{\DlgFontBold8}Disk Space Requirements")
+ cost.text("Description", 20, 20, 280, 20, 0x30003,
+ "The disk space required for the installation of the selected features.")
+ cost.text("Text", 20, 53, 330, 60, 3,
+ "The highlighted volumes (if any) do not have enough disk space "
+ "available for the currently selected features. You can either "
+ "remove some files from the highlighted volumes, or choose to "
+ "install less features onto local drive(s), or select different "
+ "destination drive(s).")
+ cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
+ None, "{120}{70}{70}{70}{70}", None, None)
+ cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
+
+ #####################################################################
+ # WhichUsers Dialog. Only available on NT, and for privileged users.
+ # This must be run before FindRelatedProducts, because that will
+ # take into account whether the previous installation was per-user
+ # or per-machine. We currently don't support going back to this
+ # dialog after "Next" was selected; to support this, we would need to
+ # find how to reset the ALLUSERS property, and how to re-run
+ # FindRelatedProducts.
+ # On Windows9x, the ALLUSERS property is ignored on the command line
+ # and in the Property table, but installer fails according to the documentation
+ # if a dialog attempts to set ALLUSERS.
+ whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
+ "AdminInstall", "Next", "Cancel")
+ whichusers.title("Select whether to install [ProductName] for all users of this computer.")
+ # A radio group with two options: allusers, justme
+ g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
+ "WhichUsers", "", "Next")
+ g.add("ALL", 0, 5, 150, 20, "Install for all users")
+ g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
+
+ whichusers.back("Back", None, active=0)
+
+ c = whichusers.next("Next >", "Cancel")
+ c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
+ c.event("EndDialog", "Return", ordering = 2)
+
+ c = whichusers.cancel("Cancel", "AdminInstall")
+ c.event("SpawnDialog", "CancelDlg")
+
+ #####################################################################
+ # Installation Progress dialog (modeless)
+ progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
+ "Cancel", "Cancel", "Cancel", bitmap=False)
+ progress.text("Title", 20, 15, 200, 15, 0x30003,
+ r"{\DlgFontBold8}[Progress1] [ProductName]")
+ progress.text("Text", 35, 65, 300, 30, 3,
+ "Please wait while the Installer [Progress2] [ProductName]. "
+ "This may take several minutes.")
+ progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
+
+ c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
+ c.mapping("ActionText", "Text")
+
+ #c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
+ #c.mapping("ActionData", "Text")
+
+ c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
+ None, "Progress done", None, None)
+ c.mapping("SetProgress", "Progress")
+
+ progress.back("< Back", "Next", active=False)
+ progress.next("Next >", "Cancel", active=False)
+ progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
+
+ ###################################################################
+ # Maintenance type: repair/uninstall
+ maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
+ "Next", "Next", "Cancel")
+ maint.title("Welcome to the [ProductName] Setup Wizard")
+ maint.text("BodyText", 15, 63, 330, 42, 3,
+ "Select whether you want to repair or remove [ProductName].")
+ g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
+ "MaintenanceForm_Action", "", "Next")
+ #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
+ g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
+ g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
+
+ maint.back("< Back", None, active=False)
+ c=maint.next("Finish", "Cancel")
+ # Change installation: Change progress dialog to "Change", then ask
+ # for feature selection
+ #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
+ #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
+
+ # Reinstall: Change progress dialog to "Repair", then invoke reinstall
+ # Also set list of reinstalled features to "ALL"
+ c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
+ c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
+ c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
+ c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
+
+ # Uninstall: Change progress to "Remove", then invoke uninstall
+ # Also set list of removed features to "ALL"
+ c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
+ c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
+ c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
+ c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
+
+ # Close dialog when maintenance action scheduled
+ c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
+ #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
+
+ maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
+
+ def get_installer_filename(self, fullname):
+ # Factored out to allow overriding in subclasses
+ if self.target_version:
+ base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
+ self.target_version)
+ else:
+ base_name = "%s.%s.msi" % (fullname, self.plat_name)
+ installer_name = os.path.join(self.dist_dir, base_name)
+ return installer_name
diff --git a/setuptools/_distutils/command/bdist_rpm.py b/setuptools/_distutils/command/bdist_rpm.py
new file mode 100644
index 0000000..550cbfa
--- /dev/null
+++ b/setuptools/_distutils/command/bdist_rpm.py
@@ -0,0 +1,579 @@
+"""distutils.command.bdist_rpm
+
+Implements the Distutils 'bdist_rpm' command (create RPM source and binary
+distributions)."""
+
+import subprocess, sys, os
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.file_util import write_file
+from distutils.errors import *
+from distutils.sysconfig import get_python_version
+from distutils import log
+
+class bdist_rpm(Command):
+
+ description = "create an RPM distribution"
+
+ user_options = [
+ ('bdist-base=', None,
+ "base directory for creating built distributions"),
+ ('rpm-base=', None,
+ "base directory for creating RPMs (defaults to \"rpm\" under "
+ "--bdist-base; must be specified for RPM 2)"),
+ ('dist-dir=', 'd',
+ "directory to put final RPM files in "
+ "(and .spec files if --spec-only)"),
+ ('python=', None,
+ "path to Python interpreter to hard-code in the .spec file "
+ "(default: \"python\")"),
+ ('fix-python', None,
+ "hard-code the exact path to the current Python interpreter in "
+ "the .spec file"),
+ ('spec-only', None,
+ "only regenerate spec file"),
+ ('source-only', None,
+ "only generate source RPM"),
+ ('binary-only', None,
+ "only generate binary RPM"),
+ ('use-bzip2', None,
+ "use bzip2 instead of gzip to create source distribution"),
+
+ # More meta-data: too RPM-specific to put in the setup script,
+ # but needs to go in the .spec file -- so we make these options
+ # to "bdist_rpm". The idea is that packagers would put this
+ # info in setup.cfg, although they are of course free to
+ # supply it on the command line.
+ ('distribution-name=', None,
+ "name of the (Linux) distribution to which this "
+ "RPM applies (*not* the name of the module distribution!)"),
+ ('group=', None,
+ "package classification [default: \"Development/Libraries\"]"),
+ ('release=', None,
+ "RPM release number"),
+ ('serial=', None,
+ "RPM serial number"),
+ ('vendor=', None,
+ "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
+ "[default: maintainer or author from setup script]"),
+ ('packager=', None,
+ "RPM packager (eg. \"Jane Doe <jane@example.net>\") "
+ "[default: vendor]"),
+ ('doc-files=', None,
+ "list of documentation files (space or comma-separated)"),
+ ('changelog=', None,
+ "RPM changelog"),
+ ('icon=', None,
+ "name of icon file"),
+ ('provides=', None,
+ "capabilities provided by this package"),
+ ('requires=', None,
+ "capabilities required by this package"),
+ ('conflicts=', None,
+ "capabilities which conflict with this package"),
+ ('build-requires=', None,
+ "capabilities required to build this package"),
+ ('obsoletes=', None,
+ "capabilities made obsolete by this package"),
+ ('no-autoreq', None,
+ "do not automatically calculate dependencies"),
+
+ # Actions to take when building RPM
+ ('keep-temp', 'k',
+ "don't clean up RPM build directory"),
+ ('no-keep-temp', None,
+ "clean up RPM build directory [default]"),
+ ('use-rpm-opt-flags', None,
+ "compile with RPM_OPT_FLAGS when building from source RPM"),
+ ('no-rpm-opt-flags', None,
+ "do not pass any RPM CFLAGS to compiler"),
+ ('rpm3-mode', None,
+ "RPM 3 compatibility mode (default)"),
+ ('rpm2-mode', None,
+ "RPM 2 compatibility mode"),
+
+ # Add the hooks necessary for specifying custom scripts
+ ('prep-script=', None,
+ "Specify a script for the PREP phase of RPM building"),
+ ('build-script=', None,
+ "Specify a script for the BUILD phase of RPM building"),
+
+ ('pre-install=', None,
+ "Specify a script for the pre-INSTALL phase of RPM building"),
+ ('install-script=', None,
+ "Specify a script for the INSTALL phase of RPM building"),
+ ('post-install=', None,
+ "Specify a script for the post-INSTALL phase of RPM building"),
+
+ ('pre-uninstall=', None,
+ "Specify a script for the pre-UNINSTALL phase of RPM building"),
+ ('post-uninstall=', None,
+ "Specify a script for the post-UNINSTALL phase of RPM building"),
+
+ ('clean-script=', None,
+ "Specify a script for the CLEAN phase of RPM building"),
+
+ ('verify-script=', None,
+ "Specify a script for the VERIFY phase of the RPM build"),
+
+ # Allow a packager to explicitly force an architecture
+ ('force-arch=', None,
+ "Force an architecture onto the RPM build process"),
+
+ ('quiet', 'q',
+ "Run the INSTALL phase of RPM building in quiet mode"),
+ ]
+
+ boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
+ 'no-autoreq', 'quiet']
+
+ negative_opt = {'no-keep-temp': 'keep-temp',
+ 'no-rpm-opt-flags': 'use-rpm-opt-flags',
+ 'rpm2-mode': 'rpm3-mode'}
+
+
+ def initialize_options(self):
+ self.bdist_base = None
+ self.rpm_base = None
+ self.dist_dir = None
+ self.python = None
+ self.fix_python = None
+ self.spec_only = None
+ self.binary_only = None
+ self.source_only = None
+ self.use_bzip2 = None
+
+ self.distribution_name = None
+ self.group = None
+ self.release = None
+ self.serial = None
+ self.vendor = None
+ self.packager = None
+ self.doc_files = None
+ self.changelog = None
+ self.icon = None
+
+ self.prep_script = None
+ self.build_script = None
+ self.install_script = None
+ self.clean_script = None
+ self.verify_script = None
+ self.pre_install = None
+ self.post_install = None
+ self.pre_uninstall = None
+ self.post_uninstall = None
+ self.prep = None
+ self.provides = None
+ self.requires = None
+ self.conflicts = None
+ self.build_requires = None
+ self.obsoletes = None
+
+ self.keep_temp = 0
+ self.use_rpm_opt_flags = 1
+ self.rpm3_mode = 1
+ self.no_autoreq = 0
+
+ self.force_arch = None
+ self.quiet = 0
+
+ def finalize_options(self):
+ self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
+ if self.rpm_base is None:
+ if not self.rpm3_mode:
+ raise DistutilsOptionError(
+ "you must specify --rpm-base in RPM 2 mode")
+ self.rpm_base = os.path.join(self.bdist_base, "rpm")
+
+ if self.python is None:
+ if self.fix_python:
+ self.python = sys.executable
+ else:
+ self.python = "python3"
+ elif self.fix_python:
+ raise DistutilsOptionError(
+ "--python and --fix-python are mutually exclusive options")
+
+ if os.name != 'posix':
+ raise DistutilsPlatformError("don't know how to create RPM "
+ "distributions on platform %s" % os.name)
+ if self.binary_only and self.source_only:
+ raise DistutilsOptionError(
+ "cannot supply both '--source-only' and '--binary-only'")
+
+ # don't pass CFLAGS to pure python distributions
+ if not self.distribution.has_ext_modules():
+ self.use_rpm_opt_flags = 0
+
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+ self.finalize_package_data()
+
+ def finalize_package_data(self):
+ self.ensure_string('group', "Development/Libraries")
+ self.ensure_string('vendor',
+ "%s <%s>" % (self.distribution.get_contact(),
+ self.distribution.get_contact_email()))
+ self.ensure_string('packager')
+ self.ensure_string_list('doc_files')
+ if isinstance(self.doc_files, list):
+ for readme in ('README', 'README.txt'):
+ if os.path.exists(readme) and readme not in self.doc_files:
+ self.doc_files.append(readme)
+
+ self.ensure_string('release', "1")
+ self.ensure_string('serial') # should it be an int?
+
+ self.ensure_string('distribution_name')
+
+ self.ensure_string('changelog')
+ # Format changelog correctly
+ self.changelog = self._format_changelog(self.changelog)
+
+ self.ensure_filename('icon')
+
+ self.ensure_filename('prep_script')
+ self.ensure_filename('build_script')
+ self.ensure_filename('install_script')
+ self.ensure_filename('clean_script')
+ self.ensure_filename('verify_script')
+ self.ensure_filename('pre_install')
+ self.ensure_filename('post_install')
+ self.ensure_filename('pre_uninstall')
+ self.ensure_filename('post_uninstall')
+
+ # XXX don't forget we punted on summaries and descriptions -- they
+ # should be handled here eventually!
+
+ # Now *this* is some meta-data that belongs in the setup script...
+ self.ensure_string_list('provides')
+ self.ensure_string_list('requires')
+ self.ensure_string_list('conflicts')
+ self.ensure_string_list('build_requires')
+ self.ensure_string_list('obsoletes')
+
+ self.ensure_string('force_arch')
+
+ def run(self):
+ if DEBUG:
+ print("before _get_package_data():")
+ print("vendor =", self.vendor)
+ print("packager =", self.packager)
+ print("doc_files =", self.doc_files)
+ print("changelog =", self.changelog)
+
+ # make directories
+ if self.spec_only:
+ spec_dir = self.dist_dir
+ self.mkpath(spec_dir)
+ else:
+ rpm_dir = {}
+ for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
+ rpm_dir[d] = os.path.join(self.rpm_base, d)
+ self.mkpath(rpm_dir[d])
+ spec_dir = rpm_dir['SPECS']
+
+ # Spec file goes into 'dist_dir' if '--spec-only specified',
+ # build/rpm.<plat> otherwise.
+ spec_path = os.path.join(spec_dir,
+ "%s.spec" % self.distribution.get_name())
+ self.execute(write_file,
+ (spec_path,
+ self._make_spec_file()),
+ "writing '%s'" % spec_path)
+
+ if self.spec_only: # stop if requested
+ return
+
+ # Make a source distribution and copy to SOURCES directory with
+ # optional icon.
+ saved_dist_files = self.distribution.dist_files[:]
+ sdist = self.reinitialize_command('sdist')
+ if self.use_bzip2:
+ sdist.formats = ['bztar']
+ else:
+ sdist.formats = ['gztar']
+ self.run_command('sdist')
+ self.distribution.dist_files = saved_dist_files
+
+ source = sdist.get_archive_files()[0]
+ source_dir = rpm_dir['SOURCES']
+ self.copy_file(source, source_dir)
+
+ if self.icon:
+ if os.path.exists(self.icon):
+ self.copy_file(self.icon, source_dir)
+ else:
+ raise DistutilsFileError(
+ "icon file '%s' does not exist" % self.icon)
+
+ # build package
+ log.info("building RPMs")
+ rpm_cmd = ['rpmbuild']
+
+ if self.source_only: # what kind of RPMs?
+ rpm_cmd.append('-bs')
+ elif self.binary_only:
+ rpm_cmd.append('-bb')
+ else:
+ rpm_cmd.append('-ba')
+ rpm_cmd.extend(['--define', '__python %s' % self.python])
+ if self.rpm3_mode:
+ rpm_cmd.extend(['--define',
+ '_topdir %s' % os.path.abspath(self.rpm_base)])
+ if not self.keep_temp:
+ rpm_cmd.append('--clean')
+
+ if self.quiet:
+ rpm_cmd.append('--quiet')
+
+ rpm_cmd.append(spec_path)
+ # Determine the binary rpm names that should be built out of this spec
+ # file
+ # Note that some of these may not be really built (if the file
+ # list is empty)
+ nvr_string = "%{name}-%{version}-%{release}"
+ src_rpm = nvr_string + ".src.rpm"
+ non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
+ q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
+ src_rpm, non_src_rpm, spec_path)
+
+ out = os.popen(q_cmd)
+ try:
+ binary_rpms = []
+ source_rpm = None
+ while True:
+ line = out.readline()
+ if not line:
+ break
+ l = line.strip().split()
+ assert(len(l) == 2)
+ binary_rpms.append(l[1])
+ # The source rpm is named after the first entry in the spec file
+ if source_rpm is None:
+ source_rpm = l[0]
+
+ status = out.close()
+ if status:
+ raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+
+ finally:
+ out.close()
+
+ self.spawn(rpm_cmd)
+
+ if not self.dry_run:
+ if self.distribution.has_ext_modules():
+ pyversion = get_python_version()
+ else:
+ pyversion = 'any'
+
+ if not self.binary_only:
+ srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
+ assert(os.path.exists(srpm))
+ self.move_file(srpm, self.dist_dir)
+ filename = os.path.join(self.dist_dir, source_rpm)
+ self.distribution.dist_files.append(
+ ('bdist_rpm', pyversion, filename))
+
+ if not self.source_only:
+ for rpm in binary_rpms:
+ rpm = os.path.join(rpm_dir['RPMS'], rpm)
+ if os.path.exists(rpm):
+ self.move_file(rpm, self.dist_dir)
+ filename = os.path.join(self.dist_dir,
+ os.path.basename(rpm))
+ self.distribution.dist_files.append(
+ ('bdist_rpm', pyversion, filename))
+
+ def _dist_path(self, path):
+ return os.path.join(self.dist_dir, os.path.basename(path))
+
+ def _make_spec_file(self):
+ """Generate the text of an RPM spec file and return it as a
+ list of strings (one per line).
+ """
+ # definitions and headers
+ spec_file = [
+ '%define name ' + self.distribution.get_name(),
+ '%define version ' + self.distribution.get_version().replace('-','_'),
+ '%define unmangled_version ' + self.distribution.get_version(),
+ '%define release ' + self.release.replace('-','_'),
+ '',
+ 'Summary: ' + self.distribution.get_description(),
+ ]
+
+ # Workaround for #14443 which affects some RPM based systems such as
+ # RHEL6 (and probably derivatives)
+ vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
+ # Generate a potential replacement value for __os_install_post (whilst
+ # normalizing the whitespace to simplify the test for whether the
+ # invocation of brp-python-bytecompile passes in __python):
+ vendor_hook = '\n'.join([' %s \\' % line.strip()
+ for line in vendor_hook.splitlines()])
+ problem = "brp-python-bytecompile \\\n"
+ fixed = "brp-python-bytecompile %{__python} \\\n"
+ fixed_hook = vendor_hook.replace(problem, fixed)
+ if fixed_hook != vendor_hook:
+ spec_file.append('# Workaround for http://bugs.python.org/issue14443')
+ spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
+
+ # put locale summaries into spec file
+ # XXX not supported for now (hard to put a dictionary
+ # in a config file -- arg!)
+ #for locale in self.summaries.keys():
+ # spec_file.append('Summary(%s): %s' % (locale,
+ # self.summaries[locale]))
+
+ spec_file.extend([
+ 'Name: %{name}',
+ 'Version: %{version}',
+ 'Release: %{release}',])
+
+ # XXX yuck! this filename is available from the "sdist" command,
+ # but only after it has run: and we create the spec file before
+ # running "sdist", in case of --spec-only.
+ if self.use_bzip2:
+ spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
+ else:
+ spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
+
+ spec_file.extend([
+ 'License: ' + self.distribution.get_license(),
+ 'Group: ' + self.group,
+ 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
+ 'Prefix: %{_prefix}', ])
+
+ if not self.force_arch:
+ # noarch if no extension modules
+ if not self.distribution.has_ext_modules():
+ spec_file.append('BuildArch: noarch')
+ else:
+ spec_file.append( 'BuildArch: %s' % self.force_arch )
+
+ for field in ('Vendor',
+ 'Packager',
+ 'Provides',
+ 'Requires',
+ 'Conflicts',
+ 'Obsoletes',
+ ):
+ val = getattr(self, field.lower())
+ if isinstance(val, list):
+ spec_file.append('%s: %s' % (field, ' '.join(val)))
+ elif val is not None:
+ spec_file.append('%s: %s' % (field, val))
+
+
+ if self.distribution.get_url() != 'UNKNOWN':
+ spec_file.append('Url: ' + self.distribution.get_url())
+
+ if self.distribution_name:
+ spec_file.append('Distribution: ' + self.distribution_name)
+
+ if self.build_requires:
+ spec_file.append('BuildRequires: ' +
+ ' '.join(self.build_requires))
+
+ if self.icon:
+ spec_file.append('Icon: ' + os.path.basename(self.icon))
+
+ if self.no_autoreq:
+ spec_file.append('AutoReq: 0')
+
+ spec_file.extend([
+ '',
+ '%description',
+ self.distribution.get_long_description()
+ ])
+
+ # put locale descriptions into spec file
+ # XXX again, suppressed because config file syntax doesn't
+ # easily support this ;-(
+ #for locale in self.descriptions.keys():
+ # spec_file.extend([
+ # '',
+ # '%description -l ' + locale,
+ # self.descriptions[locale],
+ # ])
+
+ # rpm scripts
+ # figure out default build script
+ def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
+ def_build = "%s build" % def_setup_call
+ if self.use_rpm_opt_flags:
+ def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
+
+ # insert contents of files
+
+ # XXX this is kind of misleading: user-supplied options are files
+ # that we open and interpolate into the spec file, but the defaults
+ # are just text that we drop in as-is. Hmmm.
+
+ install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
+ '--record=INSTALLED_FILES') % def_setup_call
+
+ script_options = [
+ ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
+ ('build', 'build_script', def_build),
+ ('install', 'install_script', install_cmd),
+ ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
+ ('verifyscript', 'verify_script', None),
+ ('pre', 'pre_install', None),
+ ('post', 'post_install', None),
+ ('preun', 'pre_uninstall', None),
+ ('postun', 'post_uninstall', None),
+ ]
+
+ for (rpm_opt, attr, default) in script_options:
+ # Insert contents of file referred to, if no file is referred to
+ # use 'default' as contents of script
+ val = getattr(self, attr)
+ if val or default:
+ spec_file.extend([
+ '',
+ '%' + rpm_opt,])
+ if val:
+ with open(val) as f:
+ spec_file.extend(f.read().split('\n'))
+ else:
+ spec_file.append(default)
+
+
+ # files section
+ spec_file.extend([
+ '',
+ '%files -f INSTALLED_FILES',
+ '%defattr(-,root,root)',
+ ])
+
+ if self.doc_files:
+ spec_file.append('%doc ' + ' '.join(self.doc_files))
+
+ if self.changelog:
+ spec_file.extend([
+ '',
+ '%changelog',])
+ spec_file.extend(self.changelog)
+
+ return spec_file
+
+ def _format_changelog(self, changelog):
+ """Format the changelog correctly and convert it to a list of strings
+ """
+ if not changelog:
+ return changelog
+ new_changelog = []
+ for line in changelog.strip().split('\n'):
+ line = line.strip()
+ if line[0] == '*':
+ new_changelog.extend(['', line])
+ elif line[0] == '-':
+ new_changelog.append(line)
+ else:
+ new_changelog.append(' ' + line)
+
+ # strip trailing newline inserted by first changelog entry
+ if not new_changelog[0]:
+ del new_changelog[0]
+
+ return new_changelog
diff --git a/setuptools/_distutils/command/bdist_wininst.py b/setuptools/_distutils/command/bdist_wininst.py
new file mode 100644
index 0000000..0e9ddaa
--- /dev/null
+++ b/setuptools/_distutils/command/bdist_wininst.py
@@ -0,0 +1,377 @@
+"""distutils.command.bdist_wininst
+
+Implements the Distutils 'bdist_wininst' command: create a windows installer
+exe-program."""
+
+import os
+import sys
+import warnings
+from distutils.core import Command
+from distutils.util import get_platform
+from distutils.dir_util import remove_tree
+from distutils.errors import *
+from distutils.sysconfig import get_python_version
+from distutils import log
+
+class bdist_wininst(Command):
+
+ description = "create an executable installer for MS Windows"
+
+ user_options = [('bdist-dir=', None,
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_platform()),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('target-version=', None,
+ "require a specific python version" +
+ " on the target system"),
+ ('no-target-compile', 'c',
+ "do not compile .py to .pyc on the target system"),
+ ('no-target-optimize', 'o',
+ "do not compile .py to .pyo (optimized) "
+ "on the target system"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('bitmap=', 'b',
+ "bitmap to use for the installer instead of python-powered logo"),
+ ('title=', 't',
+ "title to display on the installer background instead of default"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ('install-script=', None,
+ "basename of installation script to be run after "
+ "installation or before deinstallation"),
+ ('pre-install-script=', None,
+ "Fully qualified filename of a script to be run before "
+ "any files are installed. This script need not be in the "
+ "distribution"),
+ ('user-access-control=', None,
+ "specify Vista's UAC handling - 'none'/default=no "
+ "handling, 'auto'=use UAC if target Python installed for "
+ "all users, 'force'=always use UAC"),
+ ]
+
+ boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+ 'skip-build']
+
+ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
+ _unsupported = (sys.platform != "win32")
+
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ warnings.warn("bdist_wininst command is deprecated since Python 3.8, "
+ "use bdist_wheel (wheel packages) instead",
+ DeprecationWarning, 2)
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.no_target_compile = 0
+ self.no_target_optimize = 0
+ self.target_version = None
+ self.dist_dir = None
+ self.bitmap = None
+ self.title = None
+ self.skip_build = None
+ self.install_script = None
+ self.pre_install_script = None
+ self.user_access_control = None
+
+
+ def finalize_options(self):
+ self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
+
+ if self.bdist_dir is None:
+ if self.skip_build and self.plat_name:
+ # If build is skipped and plat_name is overridden, bdist will
+ # not see the correct 'plat_name' - so set that up manually.
+ bdist = self.distribution.get_command_obj('bdist')
+ bdist.plat_name = self.plat_name
+ # next the command will be initialized using that name
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'wininst')
+
+ if not self.target_version:
+ self.target_version = ""
+
+ if not self.skip_build and self.distribution.has_ext_modules():
+ short_version = get_python_version()
+ if self.target_version and self.target_version != short_version:
+ raise DistutilsOptionError(
+ "target version can only be %s, or the '--skip-build'" \
+ " option must be specified" % (short_version,))
+ self.target_version = short_version
+
+ self.set_undefined_options('bdist',
+ ('dist_dir', 'dist_dir'),
+ ('plat_name', 'plat_name'),
+ )
+
+ if self.install_script:
+ for script in self.distribution.scripts:
+ if self.install_script == os.path.basename(script):
+ break
+ else:
+ raise DistutilsOptionError(
+ "install_script '%s' not found in scripts"
+ % self.install_script)
+
+ def run(self):
+ if (sys.platform != "win32" and
+ (self.distribution.has_ext_modules() or
+ self.distribution.has_c_libraries())):
+ raise DistutilsPlatformError \
+ ("distribution contains extensions and/or C libraries; "
+ "must be compiled on a Windows 32 platform")
+
+ if not self.skip_build:
+ self.run_command('build')
+
+ install = self.reinitialize_command('install', reinit_subcommands=1)
+ install.root = self.bdist_dir
+ install.skip_build = self.skip_build
+ install.warn_dir = 0
+ install.plat_name = self.plat_name
+
+ install_lib = self.reinitialize_command('install_lib')
+ # we do not want to include pyc or pyo files
+ install_lib.compile = 0
+ install_lib.optimize = 0
+
+ if self.distribution.has_ext_modules():
+ # If we are building an installer for a Python version other
+ # than the one we are currently running, then we need to ensure
+ # our build_lib reflects the other Python version rather than ours.
+ # Note that for target_version!=sys.version, we must have skipped the
+ # build step, so there is no issue with enforcing the build of this
+ # version.
+ target_version = self.target_version
+ if not target_version:
+ assert self.skip_build, "Should have already checked this"
+ target_version = '%d.%d' % sys.version_info[:2]
+ plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+ build = self.get_finalized_command('build')
+ build.build_lib = os.path.join(build.build_base,
+ 'lib' + plat_specifier)
+
+ # Use a custom scheme for the zip-file, because we have to decide
+ # at installation time which scheme to use.
+ for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+ value = key.upper()
+ if key == 'headers':
+ value = value + '/Include/$dist_name'
+ setattr(install,
+ 'install_' + key,
+ value)
+
+ log.info("installing to %s", self.bdist_dir)
+ install.ensure_finalized()
+
+ # avoid warning of 'install_lib' about installing
+ # into a directory not in sys.path
+ sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+ install.run()
+
+ del sys.path[0]
+
+ # And make an archive relative to the root of the
+ # pseudo-installation tree.
+ from tempfile import mktemp
+ archive_basename = mktemp()
+ fullname = self.distribution.get_fullname()
+ arcname = self.make_archive(archive_basename, "zip",
+ root_dir=self.bdist_dir)
+ # create an exe containing the zip-file
+ self.create_exe(arcname, fullname, self.bitmap)
+ if self.distribution.has_ext_modules():
+ pyversion = get_python_version()
+ else:
+ pyversion = 'any'
+ self.distribution.dist_files.append(('bdist_wininst', pyversion,
+ self.get_installer_filename(fullname)))
+ # remove the zip-file again
+ log.debug("removing temporary file '%s'", arcname)
+ os.remove(arcname)
+
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ def get_inidata(self):
+ # Return data describing the installation.
+ lines = []
+ metadata = self.distribution.metadata
+
+ # Write the [metadata] section.
+ lines.append("[metadata]")
+
+ # 'info' will be displayed in the installer's dialog box,
+ # describing the items to be installed.
+ info = (metadata.long_description or '') + '\n'
+
+ # Escape newline characters
+ def escape(s):
+ return s.replace("\n", "\\n")
+
+ for name in ["author", "author_email", "description", "maintainer",
+ "maintainer_email", "name", "url", "version"]:
+ data = getattr(metadata, name, "")
+ if data:
+ info = info + ("\n %s: %s" % \
+ (name.capitalize(), escape(data)))
+ lines.append("%s=%s" % (name, escape(data)))
+
+ # The [setup] section contains entries controlling
+ # the installer runtime.
+ lines.append("\n[Setup]")
+ if self.install_script:
+ lines.append("install_script=%s" % self.install_script)
+ lines.append("info=%s" % escape(info))
+ lines.append("target_compile=%d" % (not self.no_target_compile))
+ lines.append("target_optimize=%d" % (not self.no_target_optimize))
+ if self.target_version:
+ lines.append("target_version=%s" % self.target_version)
+ if self.user_access_control:
+ lines.append("user_access_control=%s" % self.user_access_control)
+
+ title = self.title or self.distribution.get_fullname()
+ lines.append("title=%s" % escape(title))
+ import time
+ import distutils
+ build_info = "Built %s with distutils-%s" % \
+ (time.ctime(time.time()), distutils.__version__)
+ lines.append("build_info=%s" % build_info)
+ return "\n".join(lines)
+
+ def create_exe(self, arcname, fullname, bitmap=None):
+ import struct
+
+ self.mkpath(self.dist_dir)
+
+ cfgdata = self.get_inidata()
+
+ installer_name = self.get_installer_filename(fullname)
+ self.announce("creating %s" % installer_name)
+
+ if bitmap:
+ with open(bitmap, "rb") as f:
+ bitmapdata = f.read()
+ bitmaplen = len(bitmapdata)
+ else:
+ bitmaplen = 0
+
+ with open(installer_name, "wb") as file:
+ file.write(self.get_exe_bytes())
+ if bitmap:
+ file.write(bitmapdata)
+
+ # Convert cfgdata from unicode to ascii, mbcs encoded
+ if isinstance(cfgdata, str):
+ cfgdata = cfgdata.encode("mbcs")
+
+ # Append the pre-install script
+ cfgdata = cfgdata + b"\0"
+ if self.pre_install_script:
+ # We need to normalize newlines, so we open in text mode and
+ # convert back to bytes. "latin-1" simply avoids any possible
+ # failures.
+ with open(self.pre_install_script, "r",
+ encoding="latin-1") as script:
+ script_data = script.read().encode("latin-1")
+ cfgdata = cfgdata + script_data + b"\n\0"
+ else:
+ # empty pre-install script
+ cfgdata = cfgdata + b"\0"
+ file.write(cfgdata)
+
+ # The 'magic number' 0x1234567B is used to make sure that the
+ # binary layout of 'cfgdata' is what the wininst.exe binary
+ # expects. If the layout changes, increment that number, make
+ # the corresponding changes to the wininst.exe sources, and
+ # recompile them.
+ header = struct.pack("<iii",
+ 0x1234567B, # tag
+ len(cfgdata), # length
+ bitmaplen, # number of bytes in bitmap
+ )
+ file.write(header)
+ with open(arcname, "rb") as f:
+ file.write(f.read())
+
+ def get_installer_filename(self, fullname):
+ # Factored out to allow overriding in subclasses
+ if self.target_version:
+ # if we create an installer for a specific python version,
+ # it's better to include this in the name
+ installer_name = os.path.join(self.dist_dir,
+ "%s.%s-py%s.exe" %
+ (fullname, self.plat_name, self.target_version))
+ else:
+ installer_name = os.path.join(self.dist_dir,
+ "%s.%s.exe" % (fullname, self.plat_name))
+ return installer_name
+
+ def get_exe_bytes(self):
+ # If a target-version other than the current version has been
+ # specified, then using the MSVC version from *this* build is no good.
+ # Without actually finding and executing the target version and parsing
+ # its sys.version, we just hard-code our knowledge of old versions.
+ # NOTE: Possible alternative is to allow "--target-version" to
+ # specify a Python executable rather than a simple version string.
+ # We can then execute this program to obtain any info we need, such
+ # as the real sys.version string for the build.
+ cur_version = get_python_version()
+
+ # If the target version is *later* than us, then we assume they
+ # use what we use
+ # string compares seem wrong, but are what sysconfig.py itself uses
+ if self.target_version and self.target_version < cur_version:
+ if self.target_version < "2.4":
+ bv = '6.0'
+ elif self.target_version == "2.4":
+ bv = '7.1'
+ elif self.target_version == "2.5":
+ bv = '8.0'
+ elif self.target_version <= "3.2":
+ bv = '9.0'
+ elif self.target_version <= "3.4":
+ bv = '10.0'
+ else:
+ bv = '14.0'
+ else:
+ # for current version - use authoritative check.
+ try:
+ from msvcrt import CRT_ASSEMBLY_VERSION
+ except ImportError:
+ # cross-building, so assume the latest version
+ bv = '14.0'
+ else:
+ # as far as we know, CRT is binary compatible based on
+ # the first field, so assume 'x.0' until proven otherwise
+ major = CRT_ASSEMBLY_VERSION.partition('.')[0]
+ bv = major + '.0'
+
+
+ # wininst-x.y.exe is in the same directory as this file
+ directory = os.path.dirname(__file__)
+ # we must use a wininst-x.y.exe built with the same C compiler
+ # used for python. XXX What about mingw, borland, and so on?
+
+ # if plat_name starts with "win" but is not "win32"
+ # we want to strip "win" and leave the rest (e.g. -amd64)
+ # for all other cases, we don't want any suffix
+ if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
+ sfix = self.plat_name[3:]
+ else:
+ sfix = ''
+
+ filename = os.path.join(directory, "wininst-%s%s.exe" % (bv, sfix))
+ f = open(filename, "rb")
+ try:
+ return f.read()
+ finally:
+ f.close()
diff --git a/setuptools/_distutils/command/build.py b/setuptools/_distutils/command/build.py
new file mode 100644
index 0000000..4355a63
--- /dev/null
+++ b/setuptools/_distutils/command/build.py
@@ -0,0 +1,157 @@
+"""distutils.command.build
+
+Implements the Distutils 'build' command."""
+
+import sys, os
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError
+from distutils.util import get_platform
+
+
+def show_compilers():
+ from distutils.ccompiler import show_compilers
+ show_compilers()
+
+
+class build(Command):
+
+ description = "build everything needed to install"
+
+ user_options = [
+ ('build-base=', 'b',
+ "base directory for build library"),
+ ('build-purelib=', None,
+ "build directory for platform-neutral distributions"),
+ ('build-platlib=', None,
+ "build directory for platform-specific distributions"),
+ ('build-lib=', None,
+ "build directory for all distribution (defaults to either " +
+ "build-purelib or build-platlib"),
+ ('build-scripts=', None,
+ "build directory for scripts"),
+ ('build-temp=', 't',
+ "temporary build directory"),
+ ('plat-name=', 'p',
+ "platform name to build for, if supported "
+ "(default: %s)" % get_platform()),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ('parallel=', 'j',
+ "number of parallel build jobs"),
+ ('debug', 'g',
+ "compile extensions and libraries with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('executable=', 'e',
+ "specify final destination interpreter path (build.py)"),
+ ]
+
+ boolean_options = ['debug', 'force']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.build_base = 'build'
+ # these are decided only after 'build_base' has its final value
+ # (unless overridden by the user or client)
+ self.build_purelib = None
+ self.build_platlib = None
+ self.build_lib = None
+ self.build_temp = None
+ self.build_scripts = None
+ self.compiler = None
+ self.plat_name = None
+ self.debug = None
+ self.force = 0
+ self.executable = None
+ self.parallel = None
+
+ def finalize_options(self):
+ if self.plat_name is None:
+ self.plat_name = get_platform()
+ else:
+ # plat-name only supported for windows (other platforms are
+ # supported via ./configure flags, if at all). Avoid misleading
+ # other platforms.
+ if os.name != 'nt':
+ raise DistutilsOptionError(
+ "--plat-name only supported on Windows (try "
+ "using './configure --help' on your platform)")
+
+ plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2])
+
+ # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+ # share the same build directories. Doing so confuses the build
+ # process for C modules
+ if hasattr(sys, 'gettotalrefcount'):
+ plat_specifier += '-pydebug'
+
+ # 'build_purelib' and 'build_platlib' just default to 'lib' and
+ # 'lib.<plat>' under the base build directory. We only use one of
+ # them for a given distribution, though --
+ if self.build_purelib is None:
+ self.build_purelib = os.path.join(self.build_base, 'lib')
+ if self.build_platlib is None:
+ self.build_platlib = os.path.join(self.build_base,
+ 'lib' + plat_specifier)
+
+ # 'build_lib' is the actual directory that we will use for this
+ # particular module distribution -- if user didn't supply it, pick
+ # one of 'build_purelib' or 'build_platlib'.
+ if self.build_lib is None:
+ if self.distribution.has_ext_modules():
+ self.build_lib = self.build_platlib
+ else:
+ self.build_lib = self.build_purelib
+
+ # 'build_temp' -- temporary directory for compiler turds,
+ # "build/temp.<plat>"
+ if self.build_temp is None:
+ self.build_temp = os.path.join(self.build_base,
+ 'temp' + plat_specifier)
+ if self.build_scripts is None:
+ self.build_scripts = os.path.join(self.build_base,
+ 'scripts-%d.%d' % sys.version_info[:2])
+
+ if self.executable is None and sys.executable:
+ self.executable = os.path.normpath(sys.executable)
+
+ if isinstance(self.parallel, str):
+ try:
+ self.parallel = int(self.parallel)
+ except ValueError:
+ raise DistutilsOptionError("parallel should be an integer")
+
+ def run(self):
+ # Run all relevant sub-commands. This will be some subset of:
+ # - build_py - pure Python modules
+ # - build_clib - standalone C libraries
+ # - build_ext - Python extensions
+ # - build_scripts - (Python) scripts
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+
+ # -- Predicates for the sub-command list ---------------------------
+
+ def has_pure_modules(self):
+ return self.distribution.has_pure_modules()
+
+ def has_c_libraries(self):
+ return self.distribution.has_c_libraries()
+
+ def has_ext_modules(self):
+ return self.distribution.has_ext_modules()
+
+ def has_scripts(self):
+ return self.distribution.has_scripts()
+
+
+ sub_commands = [('build_py', has_pure_modules),
+ ('build_clib', has_c_libraries),
+ ('build_ext', has_ext_modules),
+ ('build_scripts', has_scripts),
+ ]
diff --git a/setuptools/_distutils/command/build_clib.py b/setuptools/_distutils/command/build_clib.py
new file mode 100644
index 0000000..3e20ef2
--- /dev/null
+++ b/setuptools/_distutils/command/build_clib.py
@@ -0,0 +1,209 @@
+"""distutils.command.build_clib
+
+Implements the Distutils 'build_clib' command, to build a C/C++ library
+that is included in the module distribution and needed by an extension
+module."""
+
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files. Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste. Sigh.
+
+import os
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+def show_compilers():
+ from distutils.ccompiler import show_compilers
+ show_compilers()
+
+
+class build_clib(Command):
+
+ description = "build C/C++ libraries used by Python extensions"
+
+ user_options = [
+ ('build-clib=', 'b',
+ "directory to build C/C++ libraries to"),
+ ('build-temp=', 't',
+ "directory to put temporary build by-products"),
+ ('debug', 'g',
+ "compile with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ]
+
+ boolean_options = ['debug', 'force']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.build_clib = None
+ self.build_temp = None
+
+ # List of libraries to build
+ self.libraries = None
+
+ # Compilation options for all libraries
+ self.include_dirs = None
+ self.define = None
+ self.undef = None
+ self.debug = None
+ self.force = 0
+ self.compiler = None
+
+
+ def finalize_options(self):
+ # This might be confusing: both build-clib and build-temp default
+ # to build-temp as defined by the "build" command. This is because
+ # I think that C libraries are really just temporary build
+ # by-products, at least from the point of view of building Python
+ # extensions -- but I want to keep my options open.
+ self.set_undefined_options('build',
+ ('build_temp', 'build_clib'),
+ ('build_temp', 'build_temp'),
+ ('compiler', 'compiler'),
+ ('debug', 'debug'),
+ ('force', 'force'))
+
+ self.libraries = self.distribution.libraries
+ if self.libraries:
+ self.check_library_list(self.libraries)
+
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ if isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ # XXX same as for build_ext -- what about 'self.define' and
+ # 'self.undef' ?
+
+
+ def run(self):
+ if not self.libraries:
+ return
+
+ # Yech -- this is cut 'n pasted from build_ext.py!
+ from distutils.ccompiler import new_compiler
+ self.compiler = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run,
+ force=self.force)
+ customize_compiler(self.compiler)
+
+ if self.include_dirs is not None:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name,value) in self.define:
+ self.compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ self.compiler.undefine_macro(macro)
+
+ self.build_libraries(self.libraries)
+
+
+ def check_library_list(self, libraries):
+ """Ensure that the list of libraries is valid.
+
+ `library` is presumably provided as a command option 'libraries'.
+ This method checks that it is a list of 2-tuples, where the tuples
+ are (library_name, build_info_dict).
+
+ Raise DistutilsSetupError if the structure is invalid anywhere;
+ just returns otherwise.
+ """
+ if not isinstance(libraries, list):
+ raise DistutilsSetupError(
+ "'libraries' option must be a list of tuples")
+
+ for lib in libraries:
+ if not isinstance(lib, tuple) and len(lib) != 2:
+ raise DistutilsSetupError(
+ "each element of 'libraries' must a 2-tuple")
+
+ name, build_info = lib
+
+ if not isinstance(name, str):
+ raise DistutilsSetupError(
+ "first element of each tuple in 'libraries' "
+ "must be a string (the library name)")
+
+ if '/' in name or (os.sep != '/' and os.sep in name):
+ raise DistutilsSetupError("bad library name '%s': "
+ "may not contain directory separators" % lib[0])
+
+ if not isinstance(build_info, dict):
+ raise DistutilsSetupError(
+ "second element of each tuple in 'libraries' "
+ "must be a dictionary (build info)")
+
+
+ def get_library_names(self):
+ # Assume the library list is valid -- 'check_library_list()' is
+ # called from 'finalize_options()', so it should be!
+ if not self.libraries:
+ return None
+
+ lib_names = []
+ for (lib_name, build_info) in self.libraries:
+ lib_names.append(lib_name)
+ return lib_names
+
+
+ def get_source_files(self):
+ self.check_library_list(self.libraries)
+ filenames = []
+ for (lib_name, build_info) in self.libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
+
+ filenames.extend(sources)
+ return filenames
+
+
+ def build_libraries(self, libraries):
+ for (lib_name, build_info) in libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
+ sources = list(sources)
+
+ log.info("building '%s' library", lib_name)
+
+ # First, compile the source code to object files in the library
+ # directory. (This should probably change to putting object
+ # files in a temporary build directory.)
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ objects = self.compiler.compile(sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ debug=self.debug)
+
+ # Now "link" the object files together into a static library.
+ # (On Unix at least, this isn't really linking -- it just
+ # builds an archive. Whatever.)
+ self.compiler.create_static_lib(objects, lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug)
diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py
new file mode 100644
index 0000000..181671b
--- /dev/null
+++ b/setuptools/_distutils/command/build_ext.py
@@ -0,0 +1,755 @@
+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+import contextlib
+import os
+import re
+import sys
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.sysconfig import get_config_h_filename
+from distutils.dep_util import newer_group
+from distutils.extension import Extension
+from distutils.util import get_platform
+from distutils import log
+from . import py37compat
+
+from site import USER_BASE
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+ (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+def show_compilers ():
+ from distutils.ccompiler import show_compilers
+ show_compilers()
+
+
+class build_ext(Command):
+
+ description = "build C/C++ extensions (compile/link to build directory)"
+
+ # XXX thoughts on how to deal with complex command-line options like
+ # these, i.e. how to make it so fancy_getopt can suck them off the
+ # command line and make it look like setup.py defined the appropriate
+ # lists of tuples of what-have-you.
+ # - each command needs a callback to process its command-line options
+ # - Command.__init__() needs access to its share of the whole
+ # command line (must ultimately come from
+ # Distribution.parse_command_line())
+ # - it then calls the current command class' option-parsing
+ # callback to deal with weird options like -D, which have to
+ # parse the option text and churn out some custom data
+ # structure
+ # - that data structure (in this case, a list of 2-tuples)
+ # will then be present in the command object by the time
+ # we get to finalize_options() (i.e. the constructor
+ # takes care of both command-line and client options
+ # in between initialize_options() and finalize_options())
+
+ sep_by = " (separated by '%s')" % os.pathsep
+ user_options = [
+ ('build-lib=', 'b',
+ "directory for compiled extension modules"),
+ ('build-temp=', 't',
+ "directory for temporary files (build by-products)"),
+ ('plat-name=', 'p',
+ "platform name to cross-compile for, if supported "
+ "(default: %s)" % get_platform()),
+ ('inplace', 'i',
+ "ignore build-lib and put compiled extensions into the source " +
+ "directory alongside your pure Python modules"),
+ ('include-dirs=', 'I',
+ "list of directories to search for header files" + sep_by),
+ ('define=', 'D',
+ "C preprocessor macros to define"),
+ ('undef=', 'U',
+ "C preprocessor macros to undefine"),
+ ('libraries=', 'l',
+ "external C libraries to link with"),
+ ('library-dirs=', 'L',
+ "directories to search for external C libraries" + sep_by),
+ ('rpath=', 'R',
+ "directories to search for shared C libraries at runtime"),
+ ('link-objects=', 'O',
+ "extra explicit link objects to include in the link"),
+ ('debug', 'g',
+ "compile/link with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ('parallel=', 'j',
+ "number of parallel build jobs"),
+ ('swig-cpp', None,
+ "make SWIG create C++ files (default is C)"),
+ ('swig-opts=', None,
+ "list of SWIG command line options"),
+ ('swig=', None,
+ "path to the SWIG executable"),
+ ('user', None,
+ "add user include, library and rpath")
+ ]
+
+ boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options(self):
+ self.extensions = None
+ self.build_lib = None
+ self.plat_name = None
+ self.build_temp = None
+ self.inplace = 0
+ self.package = None
+
+ self.include_dirs = None
+ self.define = None
+ self.undef = None
+ self.libraries = None
+ self.library_dirs = None
+ self.rpath = None
+ self.link_objects = None
+ self.debug = None
+ self.force = None
+ self.compiler = None
+ self.swig = None
+ self.swig_cpp = None
+ self.swig_opts = None
+ self.user = None
+ self.parallel = None
+
+ def finalize_options(self):
+ from distutils import sysconfig
+
+ self.set_undefined_options('build',
+ ('build_lib', 'build_lib'),
+ ('build_temp', 'build_temp'),
+ ('compiler', 'compiler'),
+ ('debug', 'debug'),
+ ('force', 'force'),
+ ('parallel', 'parallel'),
+ ('plat_name', 'plat_name'),
+ )
+
+ if self.package is None:
+ self.package = self.distribution.ext_package
+
+ self.extensions = self.distribution.ext_modules
+
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
+ # etc.) are in the include search path.
+ py_include = sysconfig.get_python_inc()
+ plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ if isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ # If in a virtualenv, add its include directory
+ # Issue 16116
+ if sys.exec_prefix != sys.base_exec_prefix:
+ self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
+
+ # Put the Python "system" include dir at the end, so that
+ # any local include dirs take precedence.
+ self.include_dirs.extend(py_include.split(os.path.pathsep))
+ if plat_py_include != py_include:
+ self.include_dirs.extend(
+ plat_py_include.split(os.path.pathsep))
+
+ self.ensure_string_list('libraries')
+ self.ensure_string_list('link_objects')
+
+ # Life is easier if we're not forever checking for None, so
+ # simplify these options to empty lists if unset
+ if self.libraries is None:
+ self.libraries = []
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, str):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ if self.rpath is None:
+ self.rpath = []
+ elif isinstance(self.rpath, str):
+ self.rpath = self.rpath.split(os.pathsep)
+
+ # for extensions under windows use different directories
+ # for Release and Debug builds.
+ # also Python's library directory must be appended to library_dirs
+ if os.name == 'nt':
+ # the 'libs' directory is for binary installs - we assume that
+ # must be the *native* platform. But we don't really support
+ # cross-compiling via a binary install anyway, so we let it go.
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+ if sys.base_exec_prefix != sys.prefix: # Issue 16116
+ self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
+ if self.debug:
+ self.build_temp = os.path.join(self.build_temp, "Debug")
+ else:
+ self.build_temp = os.path.join(self.build_temp, "Release")
+
+ # Append the source distribution include and library directories,
+ # this allows distutils on windows to work in the source tree
+ self.include_dirs.append(os.path.dirname(get_config_h_filename()))
+ self.library_dirs.append(sys.base_exec_prefix)
+
+ # Use the .lib files for the correct architecture
+ if self.plat_name == 'win32':
+ suffix = 'win32'
+ else:
+ # win-amd64
+ suffix = self.plat_name[4:]
+ new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+ if suffix:
+ new_lib = os.path.join(new_lib, suffix)
+ self.library_dirs.append(new_lib)
+
+ # For extensions under Cygwin, Python's library directory must be
+ # appended to library_dirs
+ if sys.platform[:6] == 'cygwin':
+ if not sysconfig.python_build:
+ # building third party extensions
+ self.library_dirs.append(os.path.join(sys.prefix, "lib",
+ "python" + get_python_version(),
+ "config"))
+ else:
+ # building python standard extensions
+ self.library_dirs.append('.')
+
+ # For building extensions with a shared Python library,
+ # Python's library directory must be appended to library_dirs
+ # See Issues: #1600860, #4366
+ if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
+ if not sysconfig.python_build:
+ # building third party extensions
+ self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+ else:
+ # building python standard extensions
+ self.library_dirs.append('.')
+
+ # The argument parsing will result in self.define being a string, but
+ # it has to be a list of 2-tuples. All the preprocessor symbols
+ # specified by the 'define' option will be set to '1'. Multiple
+ # symbols can be separated with commas.
+
+ if self.define:
+ defines = self.define.split(',')
+ self.define = [(symbol, '1') for symbol in defines]
+
+ # The option for macros to undefine is also a string from the
+ # option parsing, but has to be a list. Multiple symbols can also
+ # be separated with commas here.
+ if self.undef:
+ self.undef = self.undef.split(',')
+
+ if self.swig_opts is None:
+ self.swig_opts = []
+ else:
+ self.swig_opts = self.swig_opts.split(' ')
+
+ # Finally add the user include and library directories if requested
+ if self.user:
+ user_include = os.path.join(USER_BASE, "include")
+ user_lib = os.path.join(USER_BASE, "lib")
+ if os.path.isdir(user_include):
+ self.include_dirs.append(user_include)
+ if os.path.isdir(user_lib):
+ self.library_dirs.append(user_lib)
+ self.rpath.append(user_lib)
+
+ if isinstance(self.parallel, str):
+ try:
+ self.parallel = int(self.parallel)
+ except ValueError:
+ raise DistutilsOptionError("parallel should be an integer")
+
+ def run(self):
+ from distutils.ccompiler import new_compiler
+
+ # 'self.extensions', as supplied by setup.py, is a list of
+ # Extension instances. See the documentation for Extension (in
+ # distutils.extension) for details.
+ #
+ # For backwards compatibility with Distutils 0.8.2 and earlier, we
+ # also allow the 'extensions' list to be a list of tuples:
+ # (ext_name, build_info)
+ # where build_info is a dictionary containing everything that
+ # Extension instances do except the name, with a few things being
+ # differently named. We convert these 2-tuples to Extension
+ # instances as needed.
+
+ if not self.extensions:
+ return
+
+ # If we were asked to build any C/C++ libraries, make sure that the
+ # directory where we put them is in the library search path for
+ # linking extensions.
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.libraries.extend(build_clib.get_library_names() or [])
+ self.library_dirs.append(build_clib.build_clib)
+
+ # Setup the CCompiler object that we'll use to do all the
+ # compiling and linking
+ self.compiler = new_compiler(compiler=self.compiler,
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
+ customize_compiler(self.compiler)
+ # If we are cross-compiling, init the compiler now (if we are not
+ # cross-compiling, init would not hurt, but people may rely on
+ # late initialization of compiler even if they shouldn't...)
+ if os.name == 'nt' and self.plat_name != get_platform():
+ self.compiler.initialize(self.plat_name)
+
+ # And make sure that any compile/link-related options (which might
+ # come from the command-line or from the setup script) are set in
+ # that CCompiler object -- that way, they automatically apply to
+ # all compiling and linking done here.
+ if self.include_dirs is not None:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name, value) in self.define:
+ self.compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ self.compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ self.compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ self.compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ self.compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ self.compiler.set_link_objects(self.link_objects)
+
+ # Now actually compile and link everything.
+ self.build_extensions()
+
+ def check_extensions_list(self, extensions):
+ """Ensure that the list of extensions (presumably provided as a
+ command option 'extensions') is valid, i.e. it is a list of
+ Extension objects. We also support the old-style list of 2-tuples,
+ where the tuples are (ext_name, build_info), which are converted to
+ Extension instances here.
+
+ Raise DistutilsSetupError if the structure is invalid anywhere;
+ just returns otherwise.
+ """
+ if not isinstance(extensions, list):
+ raise DistutilsSetupError(
+ "'ext_modules' option must be a list of Extension instances")
+
+ for i, ext in enumerate(extensions):
+ if isinstance(ext, Extension):
+ continue # OK! (assume type-checking done
+ # by Extension constructor)
+
+ if not isinstance(ext, tuple) or len(ext) != 2:
+ raise DistutilsSetupError(
+ "each element of 'ext_modules' option must be an "
+ "Extension instance or 2-tuple")
+
+ ext_name, build_info = ext
+
+ log.warn("old-style (ext_name, build_info) tuple found in "
+ "ext_modules for extension '%s' "
+ "-- please convert to Extension instance", ext_name)
+
+ if not (isinstance(ext_name, str) and
+ extension_name_re.match(ext_name)):
+ raise DistutilsSetupError(
+ "first element of each tuple in 'ext_modules' "
+ "must be the extension name (a string)")
+
+ if not isinstance(build_info, dict):
+ raise DistutilsSetupError(
+ "second element of each tuple in 'ext_modules' "
+ "must be a dictionary (build info)")
+
+ # OK, the (ext_name, build_info) dict is type-safe: convert it
+ # to an Extension instance.
+ ext = Extension(ext_name, build_info['sources'])
+
+ # Easy stuff: one-to-one mapping from dict elements to
+ # instance attributes.
+ for key in ('include_dirs', 'library_dirs', 'libraries',
+ 'extra_objects', 'extra_compile_args',
+ 'extra_link_args'):
+ val = build_info.get(key)
+ if val is not None:
+ setattr(ext, key, val)
+
+ # Medium-easy stuff: same syntax/semantics, different names.
+ ext.runtime_library_dirs = build_info.get('rpath')
+ if 'def_file' in build_info:
+ log.warn("'def_file' element of build info dict "
+ "no longer supported")
+
+ # Non-trivial stuff: 'macros' split into 'define_macros'
+ # and 'undef_macros'.
+ macros = build_info.get('macros')
+ if macros:
+ ext.define_macros = []
+ ext.undef_macros = []
+ for macro in macros:
+ if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
+ raise DistutilsSetupError(
+ "'macros' element of build info dict "
+ "must be 1- or 2-tuple")
+ if len(macro) == 1:
+ ext.undef_macros.append(macro[0])
+ elif len(macro) == 2:
+ ext.define_macros.append(macro)
+
+ extensions[i] = ext
+
+ def get_source_files(self):
+ self.check_extensions_list(self.extensions)
+ filenames = []
+
+ # Wouldn't it be neat if we knew the names of header files too...
+ for ext in self.extensions:
+ filenames.extend(ext.sources)
+ return filenames
+
+ def get_outputs(self):
+ # Sanity check the 'extensions' list -- can't assume this is being
+ # done in the same run as a 'build_extensions()' call (in fact, we
+ # can probably assume that it *isn't*!).
+ self.check_extensions_list(self.extensions)
+
+ # And build the list of output (built) filenames. Note that this
+ # ignores the 'inplace' flag, and assumes everything goes in the
+ # "build" tree.
+ outputs = []
+ for ext in self.extensions:
+ outputs.append(self.get_ext_fullpath(ext.name))
+ return outputs
+
+ def build_extensions(self):
+ # First, sanity-check the 'extensions' list
+ self.check_extensions_list(self.extensions)
+ if self.parallel:
+ self._build_extensions_parallel()
+ else:
+ self._build_extensions_serial()
+
+ def _build_extensions_parallel(self):
+ workers = self.parallel
+ if self.parallel is True:
+ workers = os.cpu_count() # may return None
+ try:
+ from concurrent.futures import ThreadPoolExecutor
+ except ImportError:
+ workers = None
+
+ if workers is None:
+ self._build_extensions_serial()
+ return
+
+ with ThreadPoolExecutor(max_workers=workers) as executor:
+ futures = [executor.submit(self.build_extension, ext)
+ for ext in self.extensions]
+ for ext, fut in zip(self.extensions, futures):
+ with self._filter_build_errors(ext):
+ fut.result()
+
+ def _build_extensions_serial(self):
+ for ext in self.extensions:
+ with self._filter_build_errors(ext):
+ self.build_extension(ext)
+
+ @contextlib.contextmanager
+ def _filter_build_errors(self, ext):
+ try:
+ yield
+ except (CCompilerError, DistutilsError, CompileError) as e:
+ if not ext.optional:
+ raise
+ self.warn('building extension "%s" failed: %s' %
+ (ext.name, e))
+
+ def build_extension(self, ext):
+ sources = ext.sources
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'ext_modules' option (extension '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % ext.name)
+ # sort to make the resulting .so file build reproducible
+ sources = sorted(sources)
+
+ ext_path = self.get_ext_fullpath(ext.name)
+ depends = sources + ext.depends
+ if not (self.force or newer_group(depends, ext_path, 'newer')):
+ log.debug("skipping '%s' extension (up-to-date)", ext.name)
+ return
+ else:
+ log.info("building '%s' extension", ext.name)
+
+ # First, scan the sources for SWIG definition files (.i), run
+ # SWIG on 'em to create .c files, and modify the sources list
+ # accordingly.
+ sources = self.swig_sources(sources, ext)
+
+ # Next, compile the source code to object files.
+
+ # XXX not honouring 'define_macros' or 'undef_macros' -- the
+ # CCompiler API needs to change to accommodate this, and I
+ # want to do one thing at a time!
+
+ # Two possible sources for extra compiler arguments:
+ # - 'extra_compile_args' in Extension object
+ # - CFLAGS environment variable (not particularly
+ # elegant, but people seem to expect it and I
+ # guess it's useful)
+ # The environment variable should take precedence, and
+ # any sensible compiler will give precedence to later
+ # command line args. Hence we combine them in order:
+ extra_args = ext.extra_compile_args or []
+
+ macros = ext.define_macros[:]
+ for undef in ext.undef_macros:
+ macros.append((undef,))
+
+ objects = self.compiler.compile(sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=ext.include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_args,
+ depends=ext.depends)
+
+ # XXX outdated variable, kept here in case third-part code
+ # needs it.
+ self._built_objects = objects[:]
+
+ # Now link the object files together into a "shared object" --
+ # of course, first we have to figure out all the other things
+ # that go into the mix.
+ if ext.extra_objects:
+ objects.extend(ext.extra_objects)
+ extra_args = ext.extra_link_args or []
+
+ # Detect target language, if not provided
+ language = ext.language or self.compiler.detect_language(sources)
+
+ self.compiler.link_shared_object(
+ objects, ext_path,
+ libraries=self.get_libraries(ext),
+ library_dirs=ext.library_dirs,
+ runtime_library_dirs=ext.runtime_library_dirs,
+ extra_postargs=extra_args,
+ export_symbols=self.get_export_symbols(ext),
+ debug=self.debug,
+ build_temp=self.build_temp,
+ target_lang=language)
+
+ def swig_sources(self, sources, extension):
+ """Walk the list of source files in 'sources', looking for SWIG
+ interface (.i) files. Run SWIG on all that are found, and
+ return a modified 'sources' list with SWIG source files replaced
+ by the generated C (or C++) files.
+ """
+ new_sources = []
+ swig_sources = []
+ swig_targets = {}
+
+ # XXX this drops generated C/C++ files into the source tree, which
+ # is fine for developers who want to distribute the generated
+ # source -- but there should be an option to put SWIG output in
+ # the temp dir.
+
+ if self.swig_cpp:
+ log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+ if self.swig_cpp or ('-c++' in self.swig_opts) or \
+ ('-c++' in extension.swig_opts):
+ target_ext = '.cpp'
+ else:
+ target_ext = '.c'
+
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext == ".i": # SWIG interface file
+ new_sources.append(base + '_wrap' + target_ext)
+ swig_sources.append(source)
+ swig_targets[source] = new_sources[-1]
+ else:
+ new_sources.append(source)
+
+ if not swig_sources:
+ return new_sources
+
+ swig = self.swig or self.find_swig()
+ swig_cmd = [swig, "-python"]
+ swig_cmd.extend(self.swig_opts)
+ if self.swig_cpp:
+ swig_cmd.append("-c++")
+
+ # Do not override commandline arguments
+ if not self.swig_opts:
+ for o in extension.swig_opts:
+ swig_cmd.append(o)
+
+ for source in swig_sources:
+ target = swig_targets[source]
+ log.info("swigging %s to %s", source, target)
+ self.spawn(swig_cmd + ["-o", target, source])
+
+ return new_sources
+
+ def find_swig(self):
+ """Return the name of the SWIG executable. On Unix, this is
+ just "swig" -- it should be in the PATH. Tries a bit harder on
+ Windows.
+ """
+ if os.name == "posix":
+ return "swig"
+ elif os.name == "nt":
+ # Look for SWIG in its standard installation directory on
+ # Windows (or so I presume!). If we find it there, great;
+ # if not, act like Unix and assume it's in the PATH.
+ for vers in ("1.3", "1.2", "1.1"):
+ fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+ if os.path.isfile(fn):
+ return fn
+ else:
+ return "swig.exe"
+ else:
+ raise DistutilsPlatformError(
+ "I don't know how to find (much less run) SWIG "
+ "on platform '%s'" % os.name)
+
+ # -- Name generators -----------------------------------------------
+ # (extension names, filenames, whatever)
+ def get_ext_fullpath(self, ext_name):
+ """Returns the path of the filename for a given extension.
+
+ The file is located in `build_lib` or directly in the package
+ (inplace option).
+ """
+ fullname = self.get_ext_fullname(ext_name)
+ modpath = fullname.split('.')
+ filename = self.get_ext_filename(modpath[-1])
+
+ if not self.inplace:
+ # no further work needed
+ # returning :
+ # build_dir/package/path/filename
+ filename = os.path.join(*modpath[:-1]+[filename])
+ return os.path.join(self.build_lib, filename)
+
+ # the inplace option requires to find the package directory
+ # using the build_py command for that
+ package = '.'.join(modpath[0:-1])
+ build_py = self.get_finalized_command('build_py')
+ package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+ # returning
+ # package_dir/filename
+ return os.path.join(package_dir, filename)
+
+ def get_ext_fullname(self, ext_name):
+ """Returns the fullname of a given extension name.
+
+ Adds the `package.` prefix"""
+ if self.package is None:
+ return ext_name
+ else:
+ return self.package + '.' + ext_name
+
+ def get_ext_filename(self, ext_name):
+ r"""Convert the name of an extension (eg. "foo.bar") into the name
+ of the file from which it will be loaded (eg. "foo/bar.so", or
+ "foo\bar.pyd").
+ """
+ from distutils.sysconfig import get_config_var
+ ext_path = ext_name.split('.')
+ ext_suffix = get_config_var('EXT_SUFFIX')
+ return os.path.join(*ext_path) + ext_suffix
+
+ def get_export_symbols(self, ext):
+ """Return the list of symbols that a shared extension has to
+ export. This either uses 'ext.export_symbols' or, if it's not
+ provided, "PyInit_" + module_name. Only relevant on Windows, where
+ the .pyd file (DLL) must export the module "PyInit_" function.
+ """
+ name = ext.name.split('.')[-1]
+ try:
+ # Unicode module name support as defined in PEP-489
+ # https://www.python.org/dev/peps/pep-0489/#export-hook-name
+ name.encode('ascii')
+ except UnicodeEncodeError:
+ suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
+ else:
+ suffix = "_" + name
+
+ initfunc_name = "PyInit" + suffix
+ if initfunc_name not in ext.export_symbols:
+ ext.export_symbols.append(initfunc_name)
+ return ext.export_symbols
+
+ def get_libraries(self, ext):
+ """Return the list of libraries to link against when building a
+ shared extension. On most platforms, this is just 'ext.libraries';
+ on Windows, we add the Python library (eg. python20.dll).
+ """
+ # The python library is always needed on Windows. For MSVC, this
+ # is redundant, since the library is mentioned in a pragma in
+ # pyconfig.h that MSVC groks. The other Windows compilers all seem
+ # to need it mentioned explicitly, though, so that's what we do.
+ # Append '_d' to the python import library on debug builds.
+ if sys.platform == "win32":
+ from distutils._msvccompiler import MSVCCompiler
+ if not isinstance(self.compiler, MSVCCompiler):
+ template = "python%d%d"
+ if self.debug:
+ template = template + '_d'
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ else:
+ # On Android only the main executable and LD_PRELOADs are considered
+ # to be RTLD_GLOBAL, all the dependencies of the main executable
+ # remain RTLD_LOCAL and so the shared libraries must be linked with
+ # libpython when python is built with a shared python library (issue
+ # bpo-21536).
+ # On Cygwin (and if required, other POSIX-like platforms based on
+ # Windows like MinGW) it is simply necessary that all symbols in
+ # shared libraries are resolved at link time.
+ from distutils.sysconfig import get_config_var
+ link_libpython = False
+ if get_config_var('Py_ENABLE_SHARED'):
+ # A native build on an Android device or on Cygwin
+ if hasattr(sys, 'getandroidapilevel'):
+ link_libpython = True
+ elif sys.platform == 'cygwin':
+ link_libpython = True
+ elif '_PYTHON_HOST_PLATFORM' in os.environ:
+ # We are cross-compiling for one of the relevant platforms
+ if get_config_var('ANDROID_API_LEVEL') != 0:
+ link_libpython = True
+ elif get_config_var('MACHDEP') == 'cygwin':
+ link_libpython = True
+
+ if link_libpython:
+ ldversion = get_config_var('LDVERSION')
+ return ext.libraries + ['python' + ldversion]
+
+ return ext.libraries + py37compat.pythonlib()
diff --git a/setuptools/_distutils/command/build_py.py b/setuptools/_distutils/command/build_py.py
new file mode 100644
index 0000000..7ef9bce
--- /dev/null
+++ b/setuptools/_distutils/command/build_py.py
@@ -0,0 +1,392 @@
+"""distutils.command.build_py
+
+Implements the Distutils 'build_py' command."""
+
+import os
+import importlib.util
+import sys
+import glob
+
+from distutils.core import Command
+from distutils.errors import *
+from distutils.util import convert_path
+from distutils import log
+
+class build_py (Command):
+
+ description = "\"build\" pure Python modules (copy to build directory)"
+
+ user_options = [
+ ('build-lib=', 'd', "directory to \"build\" (copy) to"),
+ ('compile', 'c', "compile .py to .pyc"),
+ ('no-compile', None, "don't compile .py files [default]"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+ ]
+
+ boolean_options = ['compile', 'force']
+ negative_opt = {'no-compile' : 'compile'}
+
+ def initialize_options(self):
+ self.build_lib = None
+ self.py_modules = None
+ self.package = None
+ self.package_data = None
+ self.package_dir = None
+ self.compile = 0
+ self.optimize = 0
+ self.force = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_lib', 'build_lib'),
+ ('force', 'force'))
+
+ # Get the distribution options that are aliases for build_py
+ # options -- list of packages and list of modules.
+ self.packages = self.distribution.packages
+ self.py_modules = self.distribution.py_modules
+ self.package_data = self.distribution.package_data
+ self.package_dir = {}
+ if self.distribution.package_dir:
+ for name, path in self.distribution.package_dir.items():
+ self.package_dir[name] = convert_path(path)
+ self.data_files = self.get_data_files()
+
+ # Ick, copied straight from install_lib.py (fancy_getopt needs a
+ # type system! Hell, *everything* needs a type system!!!)
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ assert 0 <= self.optimize <= 2
+ except (ValueError, AssertionError):
+ raise DistutilsOptionError("optimize must be 0, 1, or 2")
+
+ def run(self):
+ # XXX copy_file by default preserves atime and mtime. IMHO this is
+ # the right thing to do, but perhaps it should be an option -- in
+ # particular, a site administrator might want installed files to
+ # reflect the time of installation rather than the last
+ # modification time before the installed release.
+
+ # XXX copy_file by default preserves mode, which appears to be the
+ # wrong thing to do: if a file is read-only in the working
+ # directory, we want it to be installed read/write so that the next
+ # installation of the same module distribution can overwrite it
+ # without problems. (This might be a Unix-specific issue.) Thus
+ # we turn off 'preserve_mode' when copying to the build directory,
+ # since the build directory is supposed to be exactly what the
+ # installation will look like (ie. we preserve mode when
+ # installing).
+
+ # Two options control which modules will be installed: 'packages'
+ # and 'py_modules'. The former lets us work with whole packages, not
+ # specifying individual modules at all; the latter is for
+ # specifying modules one-at-a-time.
+
+ if self.py_modules:
+ self.build_modules()
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ self.byte_compile(self.get_outputs(include_bytecode=0))
+
+ def get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+ data = []
+ if not self.packages:
+ return data
+ for package in self.packages:
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Length of path to strip from found files
+ plen = 0
+ if src_dir:
+ plen = len(src_dir)+1
+
+ # Strip directory from globbed filenames
+ filenames = [
+ file[plen:] for file in self.find_data_files(package, src_dir)
+ ]
+ data.append((package, src_dir, build_dir, filenames))
+ return data
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'"""
+ globs = (self.package_data.get('', [])
+ + self.package_data.get(package, []))
+ files = []
+ for pattern in globs:
+ # Each pattern has to be converted to a platform-specific path
+ filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
+ # Files that match more than one pattern are only added once
+ files.extend([fn for fn in filelist if fn not in files
+ and os.path.isfile(fn)])
+ return files
+
+ def build_package_data(self):
+ """Copy data files into build directory"""
+ lastdir = None
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ self.mkpath(os.path.dirname(target))
+ self.copy_file(os.path.join(src_dir, filename), target,
+ preserve_mode=False)
+
+ def get_package_dir(self, package):
+ """Return the directory, relative to the top of the source
+ distribution, where package 'package' should be found
+ (at least according to the 'package_dir' option, if any)."""
+ path = package.split('.')
+
+ if not self.package_dir:
+ if path:
+ return os.path.join(*path)
+ else:
+ return ''
+ else:
+ tail = []
+ while path:
+ try:
+ pdir = self.package_dir['.'.join(path)]
+ except KeyError:
+ tail.insert(0, path[-1])
+ del path[-1]
+ else:
+ tail.insert(0, pdir)
+ return os.path.join(*tail)
+ else:
+ # Oops, got all the way through 'path' without finding a
+ # match in package_dir. If package_dir defines a directory
+ # for the root (nameless) package, then fallback on it;
+ # otherwise, we might as well have not consulted
+ # package_dir at all, as we just use the directory implied
+ # by 'tail' (which should be the same as the original value
+ # of 'path' at this point).
+ pdir = self.package_dir.get('')
+ if pdir is not None:
+ tail.insert(0, pdir)
+
+ if tail:
+ return os.path.join(*tail)
+ else:
+ return ''
+
+ def check_package(self, package, package_dir):
+ # Empty dir name means current directory, which we can probably
+ # assume exists. Also, os.path.exists and isdir don't know about
+ # my "empty string means current dir" convention, so we have to
+ # circumvent them.
+ if package_dir != "":
+ if not os.path.exists(package_dir):
+ raise DistutilsFileError(
+ "package directory '%s' does not exist" % package_dir)
+ if not os.path.isdir(package_dir):
+ raise DistutilsFileError(
+ "supposed package directory '%s' exists, "
+ "but is not a directory" % package_dir)
+
+ # Require __init__.py for all but the "root package"
+ if package:
+ init_py = os.path.join(package_dir, "__init__.py")
+ if os.path.isfile(init_py):
+ return init_py
+ else:
+ log.warn(("package init file '%s' not found " +
+ "(or not a regular file)"), init_py)
+
+ # Either not in a package at all (__init__.py not expected), or
+ # __init__.py doesn't exist -- so don't return the filename.
+ return None
+
+ def check_module(self, module, module_file):
+ if not os.path.isfile(module_file):
+ log.warn("file %s (for module %s) not found", module_file, module)
+ return False
+ else:
+ return True
+
+ def find_package_modules(self, package, package_dir):
+ self.check_package(package, package_dir)
+ module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
+ modules = []
+ setup_script = os.path.abspath(self.distribution.script_name)
+
+ for f in module_files:
+ abs_f = os.path.abspath(f)
+ if abs_f != setup_script:
+ module = os.path.splitext(os.path.basename(f))[0]
+ modules.append((package, module, f))
+ else:
+ self.debug_print("excluding %s" % setup_script)
+ return modules
+
+ def find_modules(self):
+ """Finds individually-specified Python modules, ie. those listed by
+ module name in 'self.py_modules'. Returns a list of tuples (package,
+ module_base, filename): 'package' is a tuple of the path through
+ package-space to the module; 'module_base' is the bare (no
+ packages, no dots) module name, and 'filename' is the path to the
+ ".py" file (relative to the distribution root) that implements the
+ module.
+ """
+ # Map package names to tuples of useful info about the package:
+ # (package_dir, checked)
+ # package_dir - the directory where we'll find source files for
+ # this package
+ # checked - true if we have checked that the package directory
+ # is valid (exists, contains __init__.py, ... ?)
+ packages = {}
+
+ # List of (package, module, filename) tuples to return
+ modules = []
+
+ # We treat modules-in-packages almost the same as toplevel modules,
+ # just the "package" for a toplevel is empty (either an empty
+ # string or empty list, depending on context). Differences:
+ # - don't check for __init__.py in directory for empty package
+ for module in self.py_modules:
+ path = module.split('.')
+ package = '.'.join(path[0:-1])
+ module_base = path[-1]
+
+ try:
+ (package_dir, checked) = packages[package]
+ except KeyError:
+ package_dir = self.get_package_dir(package)
+ checked = 0
+
+ if not checked:
+ init_py = self.check_package(package, package_dir)
+ packages[package] = (package_dir, 1)
+ if init_py:
+ modules.append((package, "__init__", init_py))
+
+ # XXX perhaps we should also check for just .pyc files
+ # (so greedy closed-source bastards can distribute Python
+ # modules too)
+ module_file = os.path.join(package_dir, module_base + ".py")
+ if not self.check_module(module, module_file):
+ continue
+
+ modules.append((package, module_base, module_file))
+
+ return modules
+
+ def find_all_modules(self):
+ """Compute the list of all modules that will be built, whether
+ they are specified one-module-at-a-time ('self.py_modules') or
+ by whole packages ('self.packages'). Return a list of tuples
+ (package, module, module_file), just like 'find_modules()' and
+ 'find_package_modules()' do."""
+ modules = []
+ if self.py_modules:
+ modules.extend(self.find_modules())
+ if self.packages:
+ for package in self.packages:
+ package_dir = self.get_package_dir(package)
+ m = self.find_package_modules(package, package_dir)
+ modules.extend(m)
+ return modules
+
+ def get_source_files(self):
+ return [module[-1] for module in self.find_all_modules()]
+
+ def get_module_outfile(self, build_dir, package, module):
+ outfile_path = [build_dir] + list(package) + [module + ".py"]
+ return os.path.join(*outfile_path)
+
+ def get_outputs(self, include_bytecode=1):
+ modules = self.find_all_modules()
+ outputs = []
+ for (package, module, module_file) in modules:
+ package = package.split('.')
+ filename = self.get_module_outfile(self.build_lib, package, module)
+ outputs.append(filename)
+ if include_bytecode:
+ if self.compile:
+ outputs.append(importlib.util.cache_from_source(
+ filename, optimization=''))
+ if self.optimize > 0:
+ outputs.append(importlib.util.cache_from_source(
+ filename, optimization=self.optimize))
+
+ outputs += [
+ os.path.join(build_dir, filename)
+ for package, src_dir, build_dir, filenames in self.data_files
+ for filename in filenames
+ ]
+
+ return outputs
+
+ def build_module(self, module, module_file, package):
+ if isinstance(package, str):
+ package = package.split('.')
+ elif not isinstance(package, (list, tuple)):
+ raise TypeError(
+ "'package' must be a string (dot-separated), list, or tuple")
+
+ # Now put the module source file into the "build" area -- this is
+ # easy, we just copy it somewhere under self.build_lib (the build
+ # directory for Python source).
+ outfile = self.get_module_outfile(self.build_lib, package, module)
+ dir = os.path.dirname(outfile)
+ self.mkpath(dir)
+ return self.copy_file(module_file, outfile, preserve_mode=0)
+
+ def build_modules(self):
+ modules = self.find_modules()
+ for (package, module, module_file) in modules:
+ # Now "build" the module -- ie. copy the source file to
+ # self.build_lib (the build directory for Python source).
+ # (Actually, it gets copied to the directory for this package
+ # under self.build_lib.)
+ self.build_module(module, module_file, package)
+
+ def build_packages(self):
+ for package in self.packages:
+ # Get list of (package, module, module_file) tuples based on
+ # scanning the package directory. 'package' is only included
+ # in the tuple so that 'find_modules()' and
+ # 'find_package_tuples()' have a consistent interface; it's
+ # ignored here (apart from a sanity check). Also, 'module' is
+ # the *unqualified* module name (ie. no dots, no package -- we
+ # already know its package!), and 'module_file' is the path to
+ # the .py file, relative to the current directory
+ # (ie. including 'package_dir').
+ package_dir = self.get_package_dir(package)
+ modules = self.find_package_modules(package, package_dir)
+
+ # Now loop over the modules we found, "building" each one (just
+ # copy it to self.build_lib).
+ for (package_, module, module_file) in modules:
+ assert package == package_
+ self.build_module(module, module_file, package)
+
+ def byte_compile(self, files):
+ if sys.dont_write_bytecode:
+ self.warn('byte-compiling is disabled, skipping.')
+ return
+
+ from distutils.util import byte_compile
+ prefix = self.build_lib
+ if prefix[-1] != os.sep:
+ prefix = prefix + os.sep
+
+ # XXX this code is essentially the same as the 'byte_compile()
+ # method of the "install_lib" command, except for the determination
+ # of the 'prefix' string. Hmmm.
+ if self.compile:
+ byte_compile(files, optimize=0,
+ force=self.force, prefix=prefix, dry_run=self.dry_run)
+ if self.optimize > 0:
+ byte_compile(files, optimize=self.optimize,
+ force=self.force, prefix=prefix, dry_run=self.dry_run)
diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py
new file mode 100644
index 0000000..e3312cf
--- /dev/null
+++ b/setuptools/_distutils/command/build_scripts.py
@@ -0,0 +1,152 @@
+"""distutils.command.build_scripts
+
+Implements the Distutils 'build_scripts' command."""
+
+import os, re
+from stat import ST_MODE
+from distutils import sysconfig
+from distutils.core import Command
+from distutils.dep_util import newer
+from distutils.util import convert_path
+from distutils import log
+import tokenize
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts(Command):
+
+ description = "\"build\" scripts (copy and fixup #! line)"
+
+ user_options = [
+ ('build-dir=', 'd', "directory to \"build\" (copy) to"),
+ ('force', 'f', "forcibly build everything (ignore file timestamps"),
+ ('executable=', 'e', "specify final destination interpreter path"),
+ ]
+
+ boolean_options = ['force']
+
+
+ def initialize_options(self):
+ self.build_dir = None
+ self.scripts = None
+ self.force = None
+ self.executable = None
+ self.outfiles = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_scripts', 'build_dir'),
+ ('force', 'force'),
+ ('executable', 'executable'))
+ self.scripts = self.distribution.scripts
+
+ def get_source_files(self):
+ return self.scripts
+
+ def run(self):
+ if not self.scripts:
+ return
+ self.copy_scripts()
+
+
+ def copy_scripts(self):
+ r"""Copy each script listed in 'self.scripts'; if it's marked as a
+ Python script in the Unix way (first line matches 'first_line_re',
+ ie. starts with "\#!" and contains "python"), then adjust the first
+ line to refer to the current Python interpreter as we copy.
+ """
+ self.mkpath(self.build_dir)
+ outfiles = []
+ updated_files = []
+ for script in self.scripts:
+ adjust = False
+ script = convert_path(script)
+ outfile = os.path.join(self.build_dir, os.path.basename(script))
+ outfiles.append(outfile)
+
+ if not self.force and not newer(script, outfile):
+ log.debug("not copying %s (up-to-date)", script)
+ continue
+
+ # Always open the file, but ignore failures in dry-run mode --
+ # that way, we'll get accurate feedback if we can read the
+ # script.
+ try:
+ f = open(script, "rb")
+ except OSError:
+ if not self.dry_run:
+ raise
+ f = None
+ else:
+ encoding, lines = tokenize.detect_encoding(f.readline)
+ f.seek(0)
+ first_line = f.readline()
+ if not first_line:
+ self.warn("%s is an empty file (skipping)" % script)
+ continue
+
+ match = first_line_re.match(first_line)
+ if match:
+ adjust = True
+ post_interp = match.group(1) or b''
+
+ if adjust:
+ log.info("copying and adjusting %s -> %s", script,
+ self.build_dir)
+ updated_files.append(outfile)
+ if not self.dry_run:
+ if not sysconfig.python_build:
+ executable = self.executable
+ else:
+ executable = os.path.join(
+ sysconfig.get_config_var("BINDIR"),
+ "python%s%s" % (sysconfig.get_config_var("VERSION"),
+ sysconfig.get_config_var("EXE")))
+ executable = os.fsencode(executable)
+ shebang = b"#!" + executable + post_interp + b"\n"
+ # Python parser starts to read a script using UTF-8 until
+ # it gets a #coding:xxx cookie. The shebang has to be the
+ # first line of a file, the #coding:xxx cookie cannot be
+ # written before. So the shebang has to be decodable from
+ # UTF-8.
+ try:
+ shebang.decode('utf-8')
+ except UnicodeDecodeError:
+ raise ValueError(
+ "The shebang ({!r}) is not decodable "
+ "from utf-8".format(shebang))
+ # If the script is encoded to a custom encoding (use a
+ # #coding:xxx cookie), the shebang has to be decodable from
+ # the script encoding too.
+ try:
+ shebang.decode(encoding)
+ except UnicodeDecodeError:
+ raise ValueError(
+ "The shebang ({!r}) is not decodable "
+ "from the script encoding ({})"
+ .format(shebang, encoding))
+ with open(outfile, "wb") as outf:
+ outf.write(shebang)
+ outf.writelines(f.readlines())
+ if f:
+ f.close()
+ else:
+ if f:
+ f.close()
+ updated_files.append(outfile)
+ self.copy_file(script, outfile)
+
+ if os.name == 'posix':
+ for file in outfiles:
+ if self.dry_run:
+ log.info("changing mode of %s", file)
+ else:
+ oldmode = os.stat(file)[ST_MODE] & 0o7777
+ newmode = (oldmode | 0o555) & 0o7777
+ if newmode != oldmode:
+ log.info("changing mode of %s from %o to %o",
+ file, oldmode, newmode)
+ os.chmod(file, newmode)
+ # XXX should we modify self.outfiles?
+ return outfiles, updated_files
diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py
new file mode 100644
index 0000000..525540b
--- /dev/null
+++ b/setuptools/_distutils/command/check.py
@@ -0,0 +1,148 @@
+"""distutils.command.check
+
+Implements the Distutils 'check' command.
+"""
+from distutils.core import Command
+from distutils.errors import DistutilsSetupError
+
+try:
+ # docutils is installed
+ from docutils.utils import Reporter
+ from docutils.parsers.rst import Parser
+ from docutils import frontend
+ from docutils import nodes
+
+ class SilentReporter(Reporter):
+
+ def __init__(self, source, report_level, halt_level, stream=None,
+ debug=0, encoding='ascii', error_handler='replace'):
+ self.messages = []
+ super().__init__(source, report_level, halt_level, stream,
+ debug, encoding, error_handler)
+
+ def system_message(self, level, message, *children, **kwargs):
+ self.messages.append((level, message, children, kwargs))
+ return nodes.system_message(message, level=level,
+ type=self.levels[level],
+ *children, **kwargs)
+
+ HAS_DOCUTILS = True
+except Exception:
+ # Catch all exceptions because exceptions besides ImportError probably
+ # indicate that docutils is not ported to Py3k.
+ HAS_DOCUTILS = False
+
+class check(Command):
+ """This command checks the meta-data of the package.
+ """
+ description = ("perform some checks on the package")
+ user_options = [('metadata', 'm', 'Verify meta-data'),
+ ('restructuredtext', 'r',
+ ('Checks if long string meta-data syntax '
+ 'are reStructuredText-compliant')),
+ ('strict', 's',
+ 'Will exit with an error if a check fails')]
+
+ boolean_options = ['metadata', 'restructuredtext', 'strict']
+
+ def initialize_options(self):
+ """Sets default values for options."""
+ self.restructuredtext = 0
+ self.metadata = 1
+ self.strict = 0
+ self._warnings = 0
+
+ def finalize_options(self):
+ pass
+
+ def warn(self, msg):
+ """Counts the number of warnings that occurs."""
+ self._warnings += 1
+ return Command.warn(self, msg)
+
+ def run(self):
+ """Runs the command."""
+ # perform the various tests
+ if self.metadata:
+ self.check_metadata()
+ if self.restructuredtext:
+ if HAS_DOCUTILS:
+ self.check_restructuredtext()
+ elif self.strict:
+ raise DistutilsSetupError('The docutils package is needed.')
+
+ # let's raise an error in strict mode, if we have at least
+ # one warning
+ if self.strict and self._warnings > 0:
+ raise DistutilsSetupError('Please correct your package.')
+
+ def check_metadata(self):
+ """Ensures that all required elements of meta-data are supplied.
+
+ Required fields:
+ name, version, URL
+
+ Recommended fields:
+ (author and author_email) or (maintainer and maintainer_email))
+
+ Warns if any are missing.
+ """
+ metadata = self.distribution.metadata
+
+ missing = []
+ for attr in ('name', 'version', 'url'):
+ if not (hasattr(metadata, attr) and getattr(metadata, attr)):
+ missing.append(attr)
+
+ if missing:
+ self.warn("missing required meta-data: %s" % ', '.join(missing))
+ if metadata.author:
+ if not metadata.author_email:
+ self.warn("missing meta-data: if 'author' supplied, " +
+ "'author_email' should be supplied too")
+ elif metadata.maintainer:
+ if not metadata.maintainer_email:
+ self.warn("missing meta-data: if 'maintainer' supplied, " +
+ "'maintainer_email' should be supplied too")
+ else:
+ self.warn("missing meta-data: either (author and author_email) " +
+ "or (maintainer and maintainer_email) " +
+ "should be supplied")
+
+ def check_restructuredtext(self):
+ """Checks if the long string fields are reST-compliant."""
+ data = self.distribution.get_long_description()
+ for warning in self._check_rst_data(data):
+ line = warning[-1].get('line')
+ if line is None:
+ warning = warning[1]
+ else:
+ warning = '%s (line %s)' % (warning[1], line)
+ self.warn(warning)
+
+ def _check_rst_data(self, data):
+ """Returns warnings when the provided data doesn't compile."""
+ # the include and csv_table directives need this to be a path
+ source_path = self.distribution.script_name or 'setup.py'
+ parser = Parser()
+ settings = frontend.OptionParser(components=(Parser,)).get_default_values()
+ settings.tab_width = 4
+ settings.pep_references = None
+ settings.rfc_references = None
+ reporter = SilentReporter(source_path,
+ settings.report_level,
+ settings.halt_level,
+ stream=settings.warning_stream,
+ debug=settings.debug,
+ encoding=settings.error_encoding,
+ error_handler=settings.error_encoding_error_handler)
+
+ document = nodes.document(settings, reporter, source=source_path)
+ document.note_source(source_path, -1)
+ try:
+ parser.parse(data, document)
+ except AttributeError as e:
+ reporter.messages.append(
+ (-1, 'Could not finish the parsing: %s.' % e, '', {}))
+
+ return reporter.messages
diff --git a/setuptools/_distutils/command/clean.py b/setuptools/_distutils/command/clean.py
new file mode 100644
index 0000000..0cb2701
--- /dev/null
+++ b/setuptools/_distutils/command/clean.py
@@ -0,0 +1,76 @@
+"""distutils.command.clean
+
+Implements the Distutils 'clean' command."""
+
+# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
+
+import os
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils import log
+
+class clean(Command):
+
+ description = "clean up temporary files from 'build' command"
+ user_options = [
+ ('build-base=', 'b',
+ "base build directory (default: 'build.build-base')"),
+ ('build-lib=', None,
+ "build directory for all modules (default: 'build.build-lib')"),
+ ('build-temp=', 't',
+ "temporary build directory (default: 'build.build-temp')"),
+ ('build-scripts=', None,
+ "build directory for scripts (default: 'build.build-scripts')"),
+ ('bdist-base=', None,
+ "temporary directory for built distributions"),
+ ('all', 'a',
+ "remove all build output, not just temporary by-products")
+ ]
+
+ boolean_options = ['all']
+
+ def initialize_options(self):
+ self.build_base = None
+ self.build_lib = None
+ self.build_temp = None
+ self.build_scripts = None
+ self.bdist_base = None
+ self.all = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'),
+ ('build_lib', 'build_lib'),
+ ('build_scripts', 'build_scripts'),
+ ('build_temp', 'build_temp'))
+ self.set_undefined_options('bdist',
+ ('bdist_base', 'bdist_base'))
+
+ def run(self):
+ # remove the build/temp.<plat> directory (unless it's already
+ # gone)
+ if os.path.exists(self.build_temp):
+ remove_tree(self.build_temp, dry_run=self.dry_run)
+ else:
+ log.debug("'%s' does not exist -- can't clean it",
+ self.build_temp)
+
+ if self.all:
+ # remove build directories
+ for directory in (self.build_lib,
+ self.bdist_base,
+ self.build_scripts):
+ if os.path.exists(directory):
+ remove_tree(directory, dry_run=self.dry_run)
+ else:
+ log.warn("'%s' does not exist -- can't clean it",
+ directory)
+
+ # just for the heck of it, try to remove the base build directory:
+ # we might have emptied it right now, but if not we don't care
+ if not self.dry_run:
+ try:
+ os.rmdir(self.build_base)
+ log.info("removing '%s'", self.build_base)
+ except OSError:
+ pass
diff --git a/setuptools/_distutils/command/config.py b/setuptools/_distutils/command/config.py
new file mode 100644
index 0000000..aeda408
--- /dev/null
+++ b/setuptools/_distutils/command/config.py
@@ -0,0 +1,344 @@
+"""distutils.command.config
+
+Implements the Distutils 'config' command, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications. The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands. Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+import os, re
+
+from distutils.core import Command
+from distutils.errors import DistutilsExecError
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+LANG_EXT = {"c": ".c", "c++": ".cxx"}
+
+class config(Command):
+
+ description = "prepare to build"
+
+ user_options = [
+ ('compiler=', None,
+ "specify the compiler type"),
+ ('cc=', None,
+ "specify the compiler executable"),
+ ('include-dirs=', 'I',
+ "list of directories to search for header files"),
+ ('define=', 'D',
+ "C preprocessor macros to define"),
+ ('undef=', 'U',
+ "C preprocessor macros to undefine"),
+ ('libraries=', 'l',
+ "external C libraries to link with"),
+ ('library-dirs=', 'L',
+ "directories to search for external C libraries"),
+
+ ('noisy', None,
+ "show every action (compile, link, run, ...) taken"),
+ ('dump-source', None,
+ "dump generated source files before attempting to compile them"),
+ ]
+
+
+ # The three standard command methods: since the "config" command
+ # does nothing by default, these are empty.
+
+ def initialize_options(self):
+ self.compiler = None
+ self.cc = None
+ self.include_dirs = None
+ self.libraries = None
+ self.library_dirs = None
+
+ # maximal output for now
+ self.noisy = 1
+ self.dump_source = 1
+
+ # list of temporary files generated along-the-way that we have
+ # to clean at some point
+ self.temp_files = []
+
+ def finalize_options(self):
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ elif isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ if self.libraries is None:
+ self.libraries = []
+ elif isinstance(self.libraries, str):
+ self.libraries = [self.libraries]
+
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, str):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ def run(self):
+ pass
+
+ # Utility methods for actual "config" commands. The interfaces are
+ # loosely based on Autoconf macros of similar names. Sub-classes
+ # may use these freely.
+
+ def _check_compiler(self):
+ """Check that 'self.compiler' really is a CCompiler object;
+ if not, make it one.
+ """
+ # We do this late, and only on-demand, because this is an expensive
+ # import.
+ from distutils.ccompiler import CCompiler, new_compiler
+ if not isinstance(self.compiler, CCompiler):
+ self.compiler = new_compiler(compiler=self.compiler,
+ dry_run=self.dry_run, force=1)
+ customize_compiler(self.compiler)
+ if self.include_dirs:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.libraries:
+ self.compiler.set_libraries(self.libraries)
+ if self.library_dirs:
+ self.compiler.set_library_dirs(self.library_dirs)
+
+ def _gen_temp_sourcefile(self, body, headers, lang):
+ filename = "_configtest" + LANG_EXT[lang]
+ with open(filename, "w") as file:
+ if headers:
+ for header in headers:
+ file.write("#include <%s>\n" % header)
+ file.write("\n")
+ file.write(body)
+ if body[-1] != "\n":
+ file.write("\n")
+ return filename
+
+ def _preprocess(self, body, headers, include_dirs, lang):
+ src = self._gen_temp_sourcefile(body, headers, lang)
+ out = "_configtest.i"
+ self.temp_files.extend([src, out])
+ self.compiler.preprocess(src, out, include_dirs=include_dirs)
+ return (src, out)
+
+ def _compile(self, body, headers, include_dirs, lang):
+ src = self._gen_temp_sourcefile(body, headers, lang)
+ if self.dump_source:
+ dump_file(src, "compiling '%s':" % src)
+ (obj,) = self.compiler.object_filenames([src])
+ self.temp_files.extend([src, obj])
+ self.compiler.compile([src], include_dirs=include_dirs)
+ return (src, obj)
+
+ def _link(self, body, headers, include_dirs, libraries, library_dirs,
+ lang):
+ (src, obj) = self._compile(body, headers, include_dirs, lang)
+ prog = os.path.splitext(os.path.basename(src))[0]
+ self.compiler.link_executable([obj], prog,
+ libraries=libraries,
+ library_dirs=library_dirs,
+ target_lang=lang)
+
+ if self.compiler.exe_extension is not None:
+ prog = prog + self.compiler.exe_extension
+ self.temp_files.append(prog)
+
+ return (src, obj, prog)
+
+ def _clean(self, *filenames):
+ if not filenames:
+ filenames = self.temp_files
+ self.temp_files = []
+ log.info("removing: %s", ' '.join(filenames))
+ for filename in filenames:
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+
+
+ # XXX these ignore the dry-run flag: what to do, what to do? even if
+ # you want a dry-run build, you still need some sort of configuration
+ # info. My inclination is to make it up to the real config command to
+ # consult 'dry_run', and assume a default (minimal) configuration if
+ # true. The problem with trying to do it here is that you'd have to
+ # return either true or false from all the 'try' methods, neither of
+ # which is correct.
+
+ # XXX need access to the header search path and maybe default macros.
+
+ def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+ """Construct a source file from 'body' (a string containing lines
+ of C/C++ code) and 'headers' (a list of header files to include)
+ and run it through the preprocessor. Return true if the
+ preprocessor succeeded, false if there were any errors.
+ ('body' probably isn't of much use, but what the heck.)
+ """
+ from distutils.ccompiler import CompileError
+ self._check_compiler()
+ ok = True
+ try:
+ self._preprocess(body, headers, include_dirs, lang)
+ except CompileError:
+ ok = False
+
+ self._clean()
+ return ok
+
+ def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
+ lang="c"):
+ """Construct a source file (just like 'try_cpp()'), run it through
+ the preprocessor, and return true if any line of the output matches
+ 'pattern'. 'pattern' should either be a compiled regex object or a
+ string containing a regex. If both 'body' and 'headers' are None,
+ preprocesses an empty file -- which can be useful to determine the
+ symbols the preprocessor and compiler set by default.
+ """
+ self._check_compiler()
+ src, out = self._preprocess(body, headers, include_dirs, lang)
+
+ if isinstance(pattern, str):
+ pattern = re.compile(pattern)
+
+ with open(out) as file:
+ match = False
+ while True:
+ line = file.readline()
+ if line == '':
+ break
+ if pattern.search(line):
+ match = True
+ break
+
+ self._clean()
+ return match
+
+ def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+ """Try to compile a source file built from 'body' and 'headers'.
+ Return true on success, false otherwise.
+ """
+ from distutils.ccompiler import CompileError
+ self._check_compiler()
+ try:
+ self._compile(body, headers, include_dirs, lang)
+ ok = True
+ except CompileError:
+ ok = False
+
+ log.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+ def try_link(self, body, headers=None, include_dirs=None, libraries=None,
+ library_dirs=None, lang="c"):
+ """Try to compile and link a source file, built from 'body' and
+ 'headers', to executable form. Return true on success, false
+ otherwise.
+ """
+ from distutils.ccompiler import CompileError, LinkError
+ self._check_compiler()
+ try:
+ self._link(body, headers, include_dirs,
+ libraries, library_dirs, lang)
+ ok = True
+ except (CompileError, LinkError):
+ ok = False
+
+ log.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+ def try_run(self, body, headers=None, include_dirs=None, libraries=None,
+ library_dirs=None, lang="c"):
+ """Try to compile, link to an executable, and run a program
+ built from 'body' and 'headers'. Return true on success, false
+ otherwise.
+ """
+ from distutils.ccompiler import CompileError, LinkError
+ self._check_compiler()
+ try:
+ src, obj, exe = self._link(body, headers, include_dirs,
+ libraries, library_dirs, lang)
+ self.spawn([exe])
+ ok = True
+ except (CompileError, LinkError, DistutilsExecError):
+ ok = False
+
+ log.info(ok and "success!" or "failure.")
+ self._clean()
+ return ok
+
+
+ # -- High-level methods --------------------------------------------
+ # (these are the ones that are actually likely to be useful
+ # when implementing a real-world config command!)
+
+ def check_func(self, func, headers=None, include_dirs=None,
+ libraries=None, library_dirs=None, decl=0, call=0):
+ """Determine if function 'func' is available by constructing a
+ source file that refers to 'func', and compiles and links it.
+ If everything succeeds, returns true; otherwise returns false.
+
+ The constructed source file starts out by including the header
+ files listed in 'headers'. If 'decl' is true, it then declares
+ 'func' (as "int func()"); you probably shouldn't supply 'headers'
+ and set 'decl' true in the same call, or you might get errors about
+ a conflicting declarations for 'func'. Finally, the constructed
+ 'main()' function either references 'func' or (if 'call' is true)
+ calls it. 'libraries' and 'library_dirs' are used when
+ linking.
+ """
+ self._check_compiler()
+ body = []
+ if decl:
+ body.append("int %s ();" % func)
+ body.append("int main () {")
+ if call:
+ body.append(" %s();" % func)
+ else:
+ body.append(" %s;" % func)
+ body.append("}")
+ body = "\n".join(body) + "\n"
+
+ return self.try_link(body, headers, include_dirs,
+ libraries, library_dirs)
+
+ def check_lib(self, library, library_dirs=None, headers=None,
+ include_dirs=None, other_libraries=[]):
+ """Determine if 'library' is available to be linked against,
+ without actually checking that any particular symbols are provided
+ by it. 'headers' will be used in constructing the source file to
+ be compiled, but the only effect of this is to check if all the
+ header files listed are available. Any libraries listed in
+ 'other_libraries' will be included in the link, in case 'library'
+ has symbols that depend on other libraries.
+ """
+ self._check_compiler()
+ return self.try_link("int main (void) { }", headers, include_dirs,
+ [library] + other_libraries, library_dirs)
+
+ def check_header(self, header, include_dirs=None, library_dirs=None,
+ lang="c"):
+ """Determine if the system header file named by 'header_file'
+ exists and can be found by the preprocessor; return true if so,
+ false otherwise.
+ """
+ return self.try_cpp(body="/* No body */", headers=[header],
+ include_dirs=include_dirs)
+
+def dump_file(filename, head=None):
+ """Dumps a file content into log.info.
+
+ If head is not None, will be dumped before the file content.
+ """
+ if head is None:
+ log.info('%s', filename)
+ else:
+ log.info(head)
+ file = open(filename)
+ try:
+ log.info(file.read())
+ finally:
+ file.close()
diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py
new file mode 100644
index 0000000..41c17d8
--- /dev/null
+++ b/setuptools/_distutils/command/install.py
@@ -0,0 +1,781 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+import sys
+import os
+import contextlib
+import sysconfig
+import itertools
+
+from distutils import log
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.sysconfig import get_config_vars
+from distutils.errors import DistutilsPlatformError
+from distutils.file_util import write_file
+from distutils.util import convert_path, subst_vars, change_root
+from distutils.util import get_platform
+from distutils.errors import DistutilsOptionError
+from .. import _collections
+
+from site import USER_BASE
+from site import USER_SITE
+HAS_USER_SITE = True
+
+WINDOWS_SCHEME = {
+ 'purelib': '{base}/Lib/site-packages',
+ 'platlib': '{base}/Lib/site-packages',
+ 'headers': '{base}/Include/{dist_name}',
+ 'scripts': '{base}/Scripts',
+ 'data' : '{base}',
+}
+
+INSTALL_SCHEMES = {
+ 'posix_prefix': {
+ 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
+ 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages',
+ 'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
+ 'scripts': '{base}/bin',
+ 'data' : '{base}',
+ },
+ 'posix_home': {
+ 'purelib': '{base}/lib/{implementation_lower}',
+ 'platlib': '{base}/{platlibdir}/{implementation_lower}',
+ 'headers': '{base}/include/{implementation_lower}/{dist_name}',
+ 'scripts': '{base}/bin',
+ 'data' : '{base}',
+ },
+ 'nt': WINDOWS_SCHEME,
+ 'pypy': {
+ 'purelib': '{base}/site-packages',
+ 'platlib': '{base}/site-packages',
+ 'headers': '{base}/include/{dist_name}',
+ 'scripts': '{base}/bin',
+ 'data' : '{base}',
+ },
+ 'pypy_nt': {
+ 'purelib': '{base}/site-packages',
+ 'platlib': '{base}/site-packages',
+ 'headers': '{base}/include/{dist_name}',
+ 'scripts': '{base}/Scripts',
+ 'data' : '{base}',
+ },
+ }
+
+# user site schemes
+if HAS_USER_SITE:
+ INSTALL_SCHEMES['nt_user'] = {
+ 'purelib': '{usersite}',
+ 'platlib': '{usersite}',
+ 'headers': '{userbase}/{implementation}{py_version_nodot_plat}/Include/{dist_name}',
+ 'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts',
+ 'data' : '{userbase}',
+ }
+
+ INSTALL_SCHEMES['posix_user'] = {
+ 'purelib': '{usersite}',
+ 'platlib': '{usersite}',
+ 'headers':
+ '{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
+ 'scripts': '{userbase}/bin',
+ 'data' : '{userbase}',
+ }
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every installation scheme above,
+# and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+
+def _load_sysconfig_schemes():
+ with contextlib.suppress(AttributeError):
+ return {
+ scheme: sysconfig.get_paths(scheme, expand=False)
+ for scheme in sysconfig.get_scheme_names()
+ }
+
+
+def _load_schemes():
+ """
+ Extend default schemes with schemes from sysconfig.
+ """
+
+ sysconfig_schemes = _load_sysconfig_schemes() or {}
+
+ return {
+ scheme: {
+ **INSTALL_SCHEMES.get(scheme, {}),
+ **sysconfig_schemes.get(scheme, {}),
+ }
+ for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
+ }
+
+
+def _get_implementation():
+ if hasattr(sys, 'pypy_version_info'):
+ return 'PyPy'
+ else:
+ return 'Python'
+
+
+def _select_scheme(ob, name):
+ scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
+ vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
+
+
+def _remove_set(ob, attrs):
+ """
+ Include only attrs that are None in ob.
+ """
+ return {
+ key: value
+ for key, value in attrs.items()
+ if getattr(ob, key) is None
+ }
+
+
+def _resolve_scheme(name):
+ os_name, sep, key = name.partition('_')
+ try:
+ resolved = sysconfig.get_preferred_scheme(key)
+ except Exception:
+ resolved = _pypy_hack(name)
+ return resolved
+
+
+def _load_scheme(name):
+ return _load_schemes()[name]
+
+
+def _inject_headers(name, scheme):
+ """
+ Given a scheme name and the resolved scheme,
+ if the scheme does not include headers, resolve
+ the fallback scheme for the name and use headers
+ from it. pypa/distutils#88
+ """
+ # Bypass the preferred scheme, which may not
+ # have defined headers.
+ fallback = _load_scheme(_pypy_hack(name))
+ scheme.setdefault('headers', fallback['headers'])
+ return scheme
+
+
+def _scheme_attrs(scheme):
+ """Resolve install directories by applying the install schemes."""
+ return {
+ f'install_{key}': scheme[key]
+ for key in SCHEME_KEYS
+ }
+
+
+def _pypy_hack(name):
+ PY37 = sys.version_info < (3, 8)
+ old_pypy = hasattr(sys, 'pypy_version_info') and PY37
+ prefix = not name.endswith(('_user', '_home'))
+ pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
+ return pypy_name if old_pypy and prefix else name
+
+
+class install(Command):
+
+ description = "install everything from build directory"
+
+ user_options = [
+ # Select installation scheme and set base director(y|ies)
+ ('prefix=', None,
+ "installation prefix"),
+ ('exec-prefix=', None,
+ "(Unix only) prefix for platform-specific files"),
+ ('home=', None,
+ "(Unix only) home directory to install under"),
+
+ # Or, just set the base director(y|ies)
+ ('install-base=', None,
+ "base installation directory (instead of --prefix or --home)"),
+ ('install-platbase=', None,
+ "base installation directory for platform-specific files " +
+ "(instead of --exec-prefix or --home)"),
+ ('root=', None,
+ "install everything relative to this alternate root directory"),
+
+ # Or, explicitly set the installation scheme
+ ('install-purelib=', None,
+ "installation directory for pure Python module distributions"),
+ ('install-platlib=', None,
+ "installation directory for non-pure module distributions"),
+ ('install-lib=', None,
+ "installation directory for all module distributions " +
+ "(overrides --install-purelib and --install-platlib)"),
+
+ ('install-headers=', None,
+ "installation directory for C/C++ headers"),
+ ('install-scripts=', None,
+ "installation directory for Python scripts"),
+ ('install-data=', None,
+ "installation directory for data files"),
+
+ # Byte-compilation options -- see install_lib.py for details, as
+ # these are duplicated from there (but only install_lib does
+ # anything with them).
+ ('compile', 'c', "compile .py to .pyc [default]"),
+ ('no-compile', None, "don't compile .py files"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+
+ # Miscellaneous control options
+ ('force', 'f',
+ "force installation (overwrite any existing files)"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+
+ # Where to install documentation (eventually!)
+ #('doc-format=', None, "format of documentation to generate"),
+ #('install-man=', None, "directory for Unix man pages"),
+ #('install-html=', None, "directory for HTML documentation"),
+ #('install-info=', None, "directory for GNU info files"),
+
+ ('record=', None,
+ "filename in which to record list of installed files"),
+ ]
+
+ boolean_options = ['compile', 'force', 'skip-build']
+
+ if HAS_USER_SITE:
+ user_options.append(('user', None,
+ "install in user site-package '%s'" % USER_SITE))
+ boolean_options.append('user')
+
+ negative_opt = {'no-compile' : 'compile'}
+
+
+ def initialize_options(self):
+ """Initializes options."""
+ # High-level options: these select both an installation base
+ # and scheme.
+ self.prefix = None
+ self.exec_prefix = None
+ self.home = None
+ self.user = 0
+
+ # These select only the installation base; it's up to the user to
+ # specify the installation scheme (currently, that means supplying
+ # the --install-{platlib,purelib,scripts,data} options).
+ self.install_base = None
+ self.install_platbase = None
+ self.root = None
+
+ # These options are the actual installation directories; if not
+ # supplied by the user, they are filled in using the installation
+ # scheme implied by prefix/exec-prefix/home and the contents of
+ # that installation scheme.
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_userbase = USER_BASE
+ self.install_usersite = USER_SITE
+
+ self.compile = None
+ self.optimize = None
+
+ # Deprecated
+ # These two are for putting non-packagized distributions into their
+ # own directory and creating a .pth file if it makes sense.
+ # 'extra_path' comes from the setup file; 'install_path_file' can
+ # be turned off if it makes no sense to install a .pth file. (But
+ # better to install it uselessly than to guess wrong and not
+ # install it when it's necessary and would be used!) Currently,
+ # 'install_path_file' is always true unless some outsider meddles
+ # with it.
+ self.extra_path = None
+ self.install_path_file = 1
+
+ # 'force' forces installation, even if target files are not
+ # out-of-date. 'skip_build' skips running the "build" command,
+ # handy if you know it's not necessary. 'warn_dir' (which is *not*
+ # a user option, it's just there so the bdist_* commands can turn
+ # it off) determines whether we warn about installing to a
+ # directory not in sys.path.
+ self.force = 0
+ self.skip_build = 0
+ self.warn_dir = 1
+
+ # These are only here as a conduit from the 'build' command to the
+ # 'install_*' commands that do the real work. ('build_base' isn't
+ # actually used anywhere, but it might be useful in future.) They
+ # are not user options, because if the user told the install
+ # command where the build directory is, that wouldn't affect the
+ # build command.
+ self.build_base = None
+ self.build_lib = None
+
+ # Not defined yet because we don't know anything about
+ # documentation yet.
+ #self.install_man = None
+ #self.install_html = None
+ #self.install_info = None
+
+ self.record = None
+
+
+ # -- Option finalizing methods -------------------------------------
+ # (This is rather more involved than for most commands,
+ # because this is where the policy for installing third-
+ # party Python modules on various platforms given a wide
+ # array of user input is decided. Yes, it's quite complex!)
+
+ def finalize_options(self):
+ """Finalizes options."""
+ # This method (and its helpers, like 'finalize_unix()',
+ # 'finalize_other()', and 'select_scheme()') is where the default
+ # installation directories for modules, extension modules, and
+ # anything else we care to install from a Python module
+ # distribution. Thus, this code makes a pretty important policy
+ # statement about how third-party stuff is added to a Python
+ # installation! Note that the actual work of installation is done
+ # by the relatively simple 'install_*' commands; they just take
+ # their orders from the installation directory options determined
+ # here.
+
+ # Check for errors/inconsistencies in the options; first, stuff
+ # that's wrong on any platform.
+
+ if ((self.prefix or self.exec_prefix or self.home) and
+ (self.install_base or self.install_platbase)):
+ raise DistutilsOptionError(
+ "must supply either prefix/exec-prefix/home or " +
+ "install-base/install-platbase -- not both")
+
+ if self.home and (self.prefix or self.exec_prefix):
+ raise DistutilsOptionError(
+ "must supply either home or prefix/exec-prefix -- not both")
+
+ if self.user and (self.prefix or self.exec_prefix or self.home or
+ self.install_base or self.install_platbase):
+ raise DistutilsOptionError("can't combine user with prefix, "
+ "exec_prefix/home, or install_(plat)base")
+
+ # Next, stuff that's wrong (or dubious) only on certain platforms.
+ if os.name != "posix":
+ if self.exec_prefix:
+ self.warn("exec-prefix option ignored on this platform")
+ self.exec_prefix = None
+
+ # Now the interesting logic -- so interesting that we farm it out
+ # to other methods. The goal of these methods is to set the final
+ # values for the install_{lib,scripts,data,...} options, using as
+ # input a heady brew of prefix, exec_prefix, home, install_base,
+ # install_platbase, user-supplied versions of
+ # install_{purelib,platlib,lib,scripts,data,...}, and the
+ # install schemes. Phew!
+
+ self.dump_dirs("pre-finalize_{unix,other}")
+
+ if os.name == 'posix':
+ self.finalize_unix()
+ else:
+ self.finalize_other()
+
+ self.dump_dirs("post-finalize_{unix,other}()")
+
+ # Expand configuration variables, tilde, etc. in self.install_base
+ # and self.install_platbase -- that way, we can use $base or
+ # $platbase in the other installation directories and not worry
+ # about needing recursive variable expansion (shudder).
+
+ py_version = sys.version.split()[0]
+ (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+ try:
+ abiflags = sys.abiflags
+ except AttributeError:
+ # sys.abiflags may not be defined on all platforms.
+ abiflags = ''
+ local_vars = {
+ 'dist_name': self.distribution.get_name(),
+ 'dist_version': self.distribution.get_version(),
+ 'dist_fullname': self.distribution.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': '%d.%d' % sys.version_info[:2],
+ 'py_version_nodot': '%d%d' % sys.version_info[:2],
+ 'sys_prefix': prefix,
+ 'prefix': prefix,
+ 'sys_exec_prefix': exec_prefix,
+ 'exec_prefix': exec_prefix,
+ 'abiflags': abiflags,
+ 'platlibdir': getattr(sys, 'platlibdir', 'lib'),
+ 'implementation_lower': _get_implementation().lower(),
+ 'implementation': _get_implementation(),
+ }
+
+ # vars for compatibility on older Pythons
+ compat_vars = dict(
+ # Python 3.9 and earlier
+ py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''),
+ )
+
+ if HAS_USER_SITE:
+ local_vars['userbase'] = self.install_userbase
+ local_vars['usersite'] = self.install_usersite
+
+ self.config_vars = _collections.DictStack(
+ [compat_vars, sysconfig.get_config_vars(), local_vars])
+
+ self.expand_basedirs()
+
+ self.dump_dirs("post-expand_basedirs()")
+
+ # Now define config vars for the base directories so we can expand
+ # everything else.
+ local_vars['base'] = self.install_base
+ local_vars['platbase'] = self.install_platbase
+
+ if DEBUG:
+ from pprint import pprint
+ print("config vars:")
+ pprint(dict(self.config_vars))
+
+ # Expand "~" and configuration variables in the installation
+ # directories.
+ self.expand_dirs()
+
+ self.dump_dirs("post-expand_dirs()")
+
+ # Create directories in the home dir:
+ if self.user:
+ self.create_home_path()
+
+ # Pick the actual directory to install all modules to: either
+ # install_purelib or install_platlib, depending on whether this
+ # module distribution is pure or not. Of course, if the user
+ # already specified install_lib, use their selection.
+ if self.install_lib is None:
+ if self.distribution.has_ext_modules(): # has extensions: non-pure
+ self.install_lib = self.install_platlib
+ else:
+ self.install_lib = self.install_purelib
+
+
+ # Convert directories from Unix /-separated syntax to the local
+ # convention.
+ self.convert_paths('lib', 'purelib', 'platlib',
+ 'scripts', 'data', 'headers',
+ 'userbase', 'usersite')
+
+ # Deprecated
+ # Well, we're not actually fully completely finalized yet: we still
+ # have to deal with 'extra_path', which is the hack for allowing
+ # non-packagized module distributions (hello, Numerical Python!) to
+ # get their own directories.
+ self.handle_extra_path()
+ self.install_libbase = self.install_lib # needed for .pth file
+ self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+ # If a new root directory was supplied, make all the installation
+ # dirs relative to it.
+ if self.root is not None:
+ self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+ 'scripts', 'data', 'headers')
+
+ self.dump_dirs("after prepending root")
+
+ # Find out the build directories, ie. where to install from.
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'),
+ ('build_lib', 'build_lib'))
+
+ # Punt on doc directories for now -- after all, we're punting on
+ # documentation completely!
+
+ def dump_dirs(self, msg):
+ """Dumps the list of user options."""
+ if not DEBUG:
+ return
+ from distutils.fancy_getopt import longopt_xlate
+ log.debug(msg + ":")
+ for opt in self.user_options:
+ opt_name = opt[0]
+ if opt_name[-1] == "=":
+ opt_name = opt_name[0:-1]
+ if opt_name in self.negative_opt:
+ opt_name = self.negative_opt[opt_name]
+ opt_name = opt_name.translate(longopt_xlate)
+ val = not getattr(self, opt_name)
+ else:
+ opt_name = opt_name.translate(longopt_xlate)
+ val = getattr(self, opt_name)
+ log.debug(" %s: %s", opt_name, val)
+
+ def finalize_unix(self):
+ """Finalizes options for posix platforms."""
+ if self.install_base is not None or self.install_platbase is not None:
+ incomplete_scheme = (
+ (
+ self.install_lib is None and
+ self.install_purelib is None and
+ self.install_platlib is None
+ ) or
+ self.install_headers is None or
+ self.install_scripts is None or
+ self.install_data is None
+ )
+ if incomplete_scheme:
+ raise DistutilsOptionError(
+ "install-base or install-platbase supplied, but "
+ "installation scheme is incomplete")
+ return
+
+ if self.user:
+ if self.install_userbase is None:
+ raise DistutilsPlatformError(
+ "User base directory is not specified")
+ self.install_base = self.install_platbase = self.install_userbase
+ self.select_scheme("posix_user")
+ elif self.home is not None:
+ self.install_base = self.install_platbase = self.home
+ self.select_scheme("posix_home")
+ else:
+ if self.prefix is None:
+ if self.exec_prefix is not None:
+ raise DistutilsOptionError(
+ "must not supply exec-prefix without prefix")
+
+ # Allow Fedora to add components to the prefix
+ _prefix_addition = getattr(sysconfig, '_prefix_addition', "")
+
+ self.prefix = (
+ os.path.normpath(sys.prefix) + _prefix_addition)
+ self.exec_prefix = (
+ os.path.normpath(sys.exec_prefix) + _prefix_addition)
+
+ else:
+ if self.exec_prefix is None:
+ self.exec_prefix = self.prefix
+
+ self.install_base = self.prefix
+ self.install_platbase = self.exec_prefix
+ self.select_scheme("posix_prefix")
+
+ def finalize_other(self):
+ """Finalizes options for non-posix platforms"""
+ if self.user:
+ if self.install_userbase is None:
+ raise DistutilsPlatformError(
+ "User base directory is not specified")
+ self.install_base = self.install_platbase = self.install_userbase
+ self.select_scheme(os.name + "_user")
+ elif self.home is not None:
+ self.install_base = self.install_platbase = self.home
+ self.select_scheme("posix_home")
+ else:
+ if self.prefix is None:
+ self.prefix = os.path.normpath(sys.prefix)
+
+ self.install_base = self.install_platbase = self.prefix
+ try:
+ self.select_scheme(os.name)
+ except KeyError:
+ raise DistutilsPlatformError(
+ "I don't know how to install stuff on '%s'" % os.name)
+
+ def select_scheme(self, name):
+ _select_scheme(self, name)
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ val = subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Calls `os.path.expanduser` on install_base, install_platbase and
+ root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Calls `os.path.expanduser` on install dirs."""
+ self._expand_attrs(['install_purelib', 'install_platlib',
+ 'install_lib', 'install_headers',
+ 'install_scripts', 'install_data',])
+
+ def convert_paths(self, *names):
+ """Call `convert_path` over `names`."""
+ for name in names:
+ attr = "install_" + name
+ setattr(self, attr, convert_path(getattr(self, attr)))
+
+ def handle_extra_path(self):
+ """Set `path_file` and `extra_dirs` using `extra_path`."""
+ if self.extra_path is None:
+ self.extra_path = self.distribution.extra_path
+
+ if self.extra_path is not None:
+ log.warn(
+ "Distribution option extra_path is deprecated. "
+ "See issue27919 for details."
+ )
+ if isinstance(self.extra_path, str):
+ self.extra_path = self.extra_path.split(',')
+
+ if len(self.extra_path) == 1:
+ path_file = extra_dirs = self.extra_path[0]
+ elif len(self.extra_path) == 2:
+ path_file, extra_dirs = self.extra_path
+ else:
+ raise DistutilsOptionError(
+ "'extra_path' option must be a list, tuple, or "
+ "comma-separated string with 1 or 2 elements")
+
+ # convert to local form in case Unix notation used (as it
+ # should be in setup scripts)
+ extra_dirs = convert_path(extra_dirs)
+ else:
+ path_file = None
+ extra_dirs = ''
+
+ # XXX should we warn if path_file and not extra_dirs? (in which
+ # case the path file would be harmless but pointless)
+ self.path_file = path_file
+ self.extra_dirs = extra_dirs
+
+ def change_roots(self, *names):
+ """Change the install directories pointed by name using root."""
+ for name in names:
+ attr = "install_" + name
+ setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+ def create_home_path(self):
+ """Create directories under ~."""
+ if not self.user:
+ return
+ home = convert_path(os.path.expanduser("~"))
+ for name, path in self.config_vars.items():
+ if str(path).startswith(home) and not os.path.isdir(path):
+ self.debug_print("os.makedirs('%s', 0o700)" % path)
+ os.makedirs(path, 0o700)
+
+ # -- Command execution methods -------------------------------------
+
+ def run(self):
+ """Runs the command."""
+ # Obviously have to build before we can install
+ if not self.skip_build:
+ self.run_command('build')
+ # If we built for any other platform, we can't install.
+ build_plat = self.distribution.get_command_obj('build').plat_name
+ # check warn_dir - it is a clue that the 'install' is happening
+ # internally, and not to sys.path, so we don't check the platform
+ # matches what we are running.
+ if self.warn_dir and build_plat != get_platform():
+ raise DistutilsPlatformError("Can't install when "
+ "cross-compiling")
+
+ # Run all sub-commands (at least those that need to be run)
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ if self.path_file:
+ self.create_path_file()
+
+ # write list of installed files, if requested.
+ if self.record:
+ outputs = self.get_outputs()
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in range(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ self.execute(write_file,
+ (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record)
+
+ sys_path = map(os.path.normpath, sys.path)
+ sys_path = map(os.path.normcase, sys_path)
+ install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+ if (self.warn_dir and
+ not (self.path_file and self.install_path_file) and
+ install_lib not in sys_path):
+ log.debug(("modules installed to '%s', which is not in "
+ "Python's module search path (sys.path) -- "
+ "you'll have to change the search path yourself"),
+ self.install_lib)
+
+ def create_path_file(self):
+ """Creates the .pth file"""
+ filename = os.path.join(self.install_libbase,
+ self.path_file + ".pth")
+ if self.install_path_file:
+ self.execute(write_file,
+ (filename, [self.extra_dirs]),
+ "creating %s" % filename)
+ else:
+ self.warn("path file '%s' not created" % filename)
+
+
+ # -- Reporting methods ---------------------------------------------
+
+ def get_outputs(self):
+ """Assembles the outputs of all the sub-commands."""
+ outputs = []
+ for cmd_name in self.get_sub_commands():
+ cmd = self.get_finalized_command(cmd_name)
+ # Add the contents of cmd.get_outputs(), ensuring
+ # that outputs doesn't contain duplicate entries
+ for filename in cmd.get_outputs():
+ if filename not in outputs:
+ outputs.append(filename)
+
+ if self.path_file and self.install_path_file:
+ outputs.append(os.path.join(self.install_libbase,
+ self.path_file + ".pth"))
+
+ return outputs
+
+ def get_inputs(self):
+ """Returns the inputs of all the sub-commands"""
+ # XXX gee, this looks familiar ;-(
+ inputs = []
+ for cmd_name in self.get_sub_commands():
+ cmd = self.get_finalized_command(cmd_name)
+ inputs.extend(cmd.get_inputs())
+
+ return inputs
+
+ # -- Predicates for sub-command list -------------------------------
+
+ def has_lib(self):
+ """Returns true if the current distribution has any Python
+ modules to install."""
+ return (self.distribution.has_pure_modules() or
+ self.distribution.has_ext_modules())
+
+ def has_headers(self):
+ """Returns true if the current distribution has any headers to
+ install."""
+ return self.distribution.has_headers()
+
+ def has_scripts(self):
+ """Returns true if the current distribution has any scripts to.
+ install."""
+ return self.distribution.has_scripts()
+
+ def has_data(self):
+ """Returns true if the current distribution has any data to.
+ install."""
+ return self.distribution.has_data_files()
+
+ # 'sub_commands': a list of commands this command might have to run to
+ # get its work done. See cmd.py for more info.
+ sub_commands = [('install_lib', has_lib),
+ ('install_headers', has_headers),
+ ('install_scripts', has_scripts),
+ ('install_data', has_data),
+ ('install_egg_info', lambda self:True),
+ ]
diff --git a/setuptools/_distutils/command/install_data.py b/setuptools/_distutils/command/install_data.py
new file mode 100644
index 0000000..947cd76
--- /dev/null
+++ b/setuptools/_distutils/command/install_data.py
@@ -0,0 +1,79 @@
+"""distutils.command.install_data
+
+Implements the Distutils 'install_data' command, for installing
+platform-independent data files."""
+
+# contributed by Bastian Kleineidam
+
+import os
+from distutils.core import Command
+from distutils.util import change_root, convert_path
+
+class install_data(Command):
+
+ description = "install data files"
+
+ user_options = [
+ ('install-dir=', 'd',
+ "base directory for installing data files "
+ "(default: installation base dir)"),
+ ('root=', None,
+ "install everything relative to this alternate root directory"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ]
+
+ boolean_options = ['force']
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.outfiles = []
+ self.root = None
+ self.force = 0
+ self.data_files = self.distribution.data_files
+ self.warn_dir = 1
+
+ def finalize_options(self):
+ self.set_undefined_options('install',
+ ('install_data', 'install_dir'),
+ ('root', 'root'),
+ ('force', 'force'),
+ )
+
+ def run(self):
+ self.mkpath(self.install_dir)
+ for f in self.data_files:
+ if isinstance(f, str):
+ # it's a simple file, so copy it
+ f = convert_path(f)
+ if self.warn_dir:
+ self.warn("setup script did not provide a directory for "
+ "'%s' -- installing right in '%s'" %
+ (f, self.install_dir))
+ (out, _) = self.copy_file(f, self.install_dir)
+ self.outfiles.append(out)
+ else:
+ # it's a tuple with path to install to and a list of files
+ dir = convert_path(f[0])
+ if not os.path.isabs(dir):
+ dir = os.path.join(self.install_dir, dir)
+ elif self.root:
+ dir = change_root(self.root, dir)
+ self.mkpath(dir)
+
+ if f[1] == []:
+ # If there are no files listed, the user must be
+ # trying to create an empty directory, so add the
+ # directory to the list of output files.
+ self.outfiles.append(dir)
+ else:
+ # Copy files, adding them to the list of output files.
+ for data in f[1]:
+ data = convert_path(data)
+ (out, _) = self.copy_file(data, dir)
+ self.outfiles.append(out)
+
+ def get_inputs(self):
+ return self.data_files or []
+
+ def get_outputs(self):
+ return self.outfiles
diff --git a/setuptools/_distutils/command/install_egg_info.py b/setuptools/_distutils/command/install_egg_info.py
new file mode 100644
index 0000000..adc0323
--- /dev/null
+++ b/setuptools/_distutils/command/install_egg_info.py
@@ -0,0 +1,84 @@
+"""distutils.command.install_egg_info
+
+Implements the Distutils 'install_egg_info' command, for installing
+a package's PKG-INFO metadata."""
+
+
+from distutils.cmd import Command
+from distutils import log, dir_util
+import os, sys, re
+
+class install_egg_info(Command):
+ """Install an .egg-info file for the package"""
+
+ description = "Install package's PKG-INFO metadata as an .egg-info file"
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ]
+
+ def initialize_options(self):
+ self.install_dir = None
+
+ @property
+ def basename(self):
+ """
+ Allow basename to be overridden by child class.
+ Ref pypa/distutils#2.
+ """
+ return "%s-%s-py%d.%d.egg-info" % (
+ to_filename(safe_name(self.distribution.get_name())),
+ to_filename(safe_version(self.distribution.get_version())),
+ *sys.version_info[:2]
+ )
+
+ def finalize_options(self):
+ self.set_undefined_options('install_lib',('install_dir','install_dir'))
+ self.target = os.path.join(self.install_dir, self.basename)
+ self.outputs = [self.target]
+
+ def run(self):
+ target = self.target
+ if os.path.isdir(target) and not os.path.islink(target):
+ dir_util.remove_tree(target, dry_run=self.dry_run)
+ elif os.path.exists(target):
+ self.execute(os.unlink,(self.target,),"Removing "+target)
+ elif not os.path.isdir(self.install_dir):
+ self.execute(os.makedirs, (self.install_dir,),
+ "Creating "+self.install_dir)
+ log.info("Writing %s", target)
+ if not self.dry_run:
+ with open(target, 'w', encoding='UTF-8') as f:
+ self.distribution.metadata.write_pkg_file(f)
+
+ def get_outputs(self):
+ return self.outputs
+
+
+# The following routines are taken from setuptools' pkg_resources module and
+# can be replaced by importing them from pkg_resources once it is included
+# in the stdlib.
+
+def safe_name(name):
+ """Convert an arbitrary string to a standard distribution name
+
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+ """Convert an arbitrary string to a standard version string
+
+ Spaces become dots, and all other non-alphanumeric characters become
+ dashes, with runs of multiple dashes condensed to a single dash.
+ """
+ version = version.replace(' ','.')
+ return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+ """Convert a project or version name to its filename-escaped form
+
+ Any '-' characters are currently replaced with '_'.
+ """
+ return name.replace('-','_')
diff --git a/setuptools/_distutils/command/install_headers.py b/setuptools/_distutils/command/install_headers.py
new file mode 100644
index 0000000..9bb0b18
--- /dev/null
+++ b/setuptools/_distutils/command/install_headers.py
@@ -0,0 +1,47 @@
+"""distutils.command.install_headers
+
+Implements the Distutils 'install_headers' command, to install C/C++ header
+files to the Python include directory."""
+
+from distutils.core import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+
+ description = "install C/C++ header files"
+
+ user_options = [('install-dir=', 'd',
+ "directory to install header files to"),
+ ('force', 'f',
+ "force installation (overwrite existing files)"),
+ ]
+
+ boolean_options = ['force']
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.force = 0
+ self.outfiles = []
+
+ def finalize_options(self):
+ self.set_undefined_options('install',
+ ('install_headers', 'install_dir'),
+ ('force', 'force'))
+
+
+ def run(self):
+ headers = self.distribution.headers
+ if not headers:
+ return
+
+ self.mkpath(self.install_dir)
+ for header in headers:
+ (out, _) = self.copy_file(header, self.install_dir)
+ self.outfiles.append(out)
+
+ def get_inputs(self):
+ return self.distribution.headers or []
+
+ def get_outputs(self):
+ return self.outfiles
diff --git a/setuptools/_distutils/command/install_lib.py b/setuptools/_distutils/command/install_lib.py
new file mode 100644
index 0000000..6154cf0
--- /dev/null
+++ b/setuptools/_distutils/command/install_lib.py
@@ -0,0 +1,217 @@
+"""distutils.command.install_lib
+
+Implements the Distutils 'install_lib' command
+(install all Python modules)."""
+
+import os
+import importlib.util
+import sys
+
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError
+
+
+# Extension for Python source files.
+PYTHON_SOURCE_EXTENSION = ".py"
+
+class install_lib(Command):
+
+ description = "install all Python modules (extensions and pure Python)"
+
+ # The byte-compilation options are a tad confusing. Here are the
+ # possible scenarios:
+ # 1) no compilation at all (--no-compile --no-optimize)
+ # 2) compile .pyc only (--compile --no-optimize; default)
+ # 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
+ # 4) compile "opt-1" .pyc only (--no-compile --optimize)
+ # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
+ # 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
+ #
+ # The UI for this is two options, 'compile' and 'optimize'.
+ # 'compile' is strictly boolean, and only decides whether to
+ # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
+ # decides both whether to generate .pyc files and what level of
+ # optimization to use.
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ('build-dir=','b', "build directory (where to install from)"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ('compile', 'c', "compile .py to .pyc [default]"),
+ ('no-compile', None, "don't compile .py files"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('skip-build', None, "skip the build steps"),
+ ]
+
+ boolean_options = ['force', 'compile', 'skip-build']
+ negative_opt = {'no-compile' : 'compile'}
+
+ def initialize_options(self):
+ # let the 'install' command dictate our installation directory
+ self.install_dir = None
+ self.build_dir = None
+ self.force = 0
+ self.compile = None
+ self.optimize = None
+ self.skip_build = None
+
+ def finalize_options(self):
+ # Get all the information we need to install pure Python modules
+ # from the umbrella 'install' command -- build (source) directory,
+ # install (target) directory, and whether to compile .py files.
+ self.set_undefined_options('install',
+ ('build_lib', 'build_dir'),
+ ('install_lib', 'install_dir'),
+ ('force', 'force'),
+ ('compile', 'compile'),
+ ('optimize', 'optimize'),
+ ('skip_build', 'skip_build'),
+ )
+
+ if self.compile is None:
+ self.compile = True
+ if self.optimize is None:
+ self.optimize = False
+
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ if self.optimize not in (0, 1, 2):
+ raise AssertionError
+ except (ValueError, AssertionError):
+ raise DistutilsOptionError("optimize must be 0, 1, or 2")
+
+ def run(self):
+ # Make sure we have built everything we need first
+ self.build()
+
+ # Install everything: simply dump the entire contents of the build
+ # directory to the installation directory (that's the beauty of
+ # having a build directory!)
+ outfiles = self.install()
+
+ # (Optionally) compile .py to .pyc
+ if outfiles is not None and self.distribution.has_pure_modules():
+ self.byte_compile(outfiles)
+
+ # -- Top-level worker functions ------------------------------------
+ # (called from 'run()')
+
+ def build(self):
+ if not self.skip_build:
+ if self.distribution.has_pure_modules():
+ self.run_command('build_py')
+ if self.distribution.has_ext_modules():
+ self.run_command('build_ext')
+
+ def install(self):
+ if os.path.isdir(self.build_dir):
+ outfiles = self.copy_tree(self.build_dir, self.install_dir)
+ else:
+ self.warn("'%s' does not exist -- no Python modules to install" %
+ self.build_dir)
+ return
+ return outfiles
+
+ def byte_compile(self, files):
+ if sys.dont_write_bytecode:
+ self.warn('byte-compiling is disabled, skipping.')
+ return
+
+ from distutils.util import byte_compile
+
+ # Get the "--root" directory supplied to the "install" command,
+ # and use it as a prefix to strip off the purported filename
+ # encoded in bytecode files. This is far from complete, but it
+ # should at least generate usable bytecode in RPM distributions.
+ install_root = self.get_finalized_command('install').root
+
+ if self.compile:
+ byte_compile(files, optimize=0,
+ force=self.force, prefix=install_root,
+ dry_run=self.dry_run)
+ if self.optimize > 0:
+ byte_compile(files, optimize=self.optimize,
+ force=self.force, prefix=install_root,
+ verbose=self.verbose, dry_run=self.dry_run)
+
+
+ # -- Utility methods -----------------------------------------------
+
+ def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+ if not has_any:
+ return []
+
+ build_cmd = self.get_finalized_command(build_cmd)
+ build_files = build_cmd.get_outputs()
+ build_dir = getattr(build_cmd, cmd_option)
+
+ prefix_len = len(build_dir) + len(os.sep)
+ outputs = []
+ for file in build_files:
+ outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+ return outputs
+
+ def _bytecode_filenames(self, py_filenames):
+ bytecode_files = []
+ for py_file in py_filenames:
+ # Since build_py handles package data installation, the
+ # list of outputs can contain more than just .py files.
+ # Make sure we only report bytecode for the .py files.
+ ext = os.path.splitext(os.path.normcase(py_file))[1]
+ if ext != PYTHON_SOURCE_EXTENSION:
+ continue
+ if self.compile:
+ bytecode_files.append(importlib.util.cache_from_source(
+ py_file, optimization=''))
+ if self.optimize > 0:
+ bytecode_files.append(importlib.util.cache_from_source(
+ py_file, optimization=self.optimize))
+
+ return bytecode_files
+
+
+ # -- External interface --------------------------------------------
+ # (called by outsiders)
+
+ def get_outputs(self):
+ """Return the list of files that would be installed if this command
+ were actually run. Not affected by the "dry-run" flag or whether
+ modules have actually been built yet.
+ """
+ pure_outputs = \
+ self._mutate_outputs(self.distribution.has_pure_modules(),
+ 'build_py', 'build_lib',
+ self.install_dir)
+ if self.compile:
+ bytecode_outputs = self._bytecode_filenames(pure_outputs)
+ else:
+ bytecode_outputs = []
+
+ ext_outputs = \
+ self._mutate_outputs(self.distribution.has_ext_modules(),
+ 'build_ext', 'build_lib',
+ self.install_dir)
+
+ return pure_outputs + bytecode_outputs + ext_outputs
+
+ def get_inputs(self):
+ """Get the list of files that are input to this command, ie. the
+ files that get installed as they are named in the build tree.
+ The files in this list correspond one-to-one to the output
+ filenames returned by 'get_outputs()'.
+ """
+ inputs = []
+
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ inputs.extend(build_py.get_outputs())
+
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ inputs.extend(build_ext.get_outputs())
+
+ return inputs
diff --git a/setuptools/_distutils/command/install_scripts.py b/setuptools/_distutils/command/install_scripts.py
new file mode 100644
index 0000000..31a1130
--- /dev/null
+++ b/setuptools/_distutils/command/install_scripts.py
@@ -0,0 +1,60 @@
+"""distutils.command.install_scripts
+
+Implements the Distutils 'install_scripts' command, for installing
+Python scripts."""
+
+# contributed by Bastian Kleineidam
+
+import os
+from distutils.core import Command
+from distutils import log
+from stat import ST_MODE
+
+
+class install_scripts(Command):
+
+ description = "install scripts (Python or otherwise)"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install scripts to"),
+ ('build-dir=','b', "build directory (where to install from)"),
+ ('force', 'f', "force installation (overwrite existing files)"),
+ ('skip-build', None, "skip the build steps"),
+ ]
+
+ boolean_options = ['force', 'skip-build']
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.force = 0
+ self.build_dir = None
+ self.skip_build = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+ self.set_undefined_options('install',
+ ('install_scripts', 'install_dir'),
+ ('force', 'force'),
+ ('skip_build', 'skip_build'),
+ )
+
+ def run(self):
+ if not self.skip_build:
+ self.run_command('build_scripts')
+ self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+ if os.name == 'posix':
+ # Set the executable bits (owner, group, and world) on
+ # all the scripts we just installed.
+ for file in self.get_outputs():
+ if self.dry_run:
+ log.info("changing mode of %s", file)
+ else:
+ mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
+ log.info("changing mode of %s to %o", file, mode)
+ os.chmod(file, mode)
+
+ def get_inputs(self):
+ return self.distribution.scripts or []
+
+ def get_outputs(self):
+ return self.outfiles or []
diff --git a/setuptools/_distutils/command/py37compat.py b/setuptools/_distutils/command/py37compat.py
new file mode 100644
index 0000000..754715a
--- /dev/null
+++ b/setuptools/_distutils/command/py37compat.py
@@ -0,0 +1,30 @@
+import sys
+
+
+def _pythonlib_compat():
+ """
+ On Python 3.7 and earlier, distutils would include the Python
+ library. See pypa/distutils#9.
+ """
+ from distutils import sysconfig
+ if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
+ return
+
+ yield 'python{}.{}{}'.format(
+ sys.hexversion >> 24,
+ (sys.hexversion >> 16) & 0xff,
+ sysconfig.get_config_var('ABIFLAGS'),
+ )
+
+
+def compose(f1, f2):
+ return lambda *args, **kwargs: f1(f2(*args, **kwargs))
+
+
+pythonlib = (
+ compose(list, _pythonlib_compat)
+ if sys.version_info < (3, 8)
+ and sys.platform != 'darwin'
+ and sys.platform[:3] != 'aix'
+ else list
+)
diff --git a/setuptools/_distutils/command/register.py b/setuptools/_distutils/command/register.py
new file mode 100644
index 0000000..0fac94e
--- /dev/null
+++ b/setuptools/_distutils/command/register.py
@@ -0,0 +1,304 @@
+"""distutils.command.register
+
+Implements the Distutils 'register' command (register with the repository).
+"""
+
+# created 2002/10/21, Richard Jones
+
+import getpass
+import io
+import urllib.parse, urllib.request
+from warnings import warn
+
+from distutils.core import PyPIRCCommand
+from distutils.errors import *
+from distutils import log
+
+class register(PyPIRCCommand):
+
+ description = ("register the distribution with the Python package index")
+ user_options = PyPIRCCommand.user_options + [
+ ('list-classifiers', None,
+ 'list the valid Trove classifiers'),
+ ('strict', None ,
+ 'Will stop the registering if the meta-data are not fully compliant')
+ ]
+ boolean_options = PyPIRCCommand.boolean_options + [
+ 'verify', 'list-classifiers', 'strict']
+
+ sub_commands = [('check', lambda self: True)]
+
+ def initialize_options(self):
+ PyPIRCCommand.initialize_options(self)
+ self.list_classifiers = 0
+ self.strict = 0
+
+ def finalize_options(self):
+ PyPIRCCommand.finalize_options(self)
+ # setting options for the `check` subcommand
+ check_options = {'strict': ('register', self.strict),
+ 'restructuredtext': ('register', 1)}
+ self.distribution.command_options['check'] = check_options
+
+ def run(self):
+ self.finalize_options()
+ self._set_config()
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ if self.dry_run:
+ self.verify_metadata()
+ elif self.list_classifiers:
+ self.classifiers()
+ else:
+ self.send_metadata()
+
+ def check_metadata(self):
+ """Deprecated API."""
+ warn("distutils.command.register.check_metadata is deprecated, \
+ use the check command instead", PendingDeprecationWarning)
+ check = self.distribution.get_command_obj('check')
+ check.ensure_finalized()
+ check.strict = self.strict
+ check.restructuredtext = 1
+ check.run()
+
+ def _set_config(self):
+ ''' Reads the configuration file and set attributes.
+ '''
+ config = self._read_pypirc()
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+ self.repository = config['repository']
+ self.realm = config['realm']
+ self.has_config = True
+ else:
+ if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
+ raise ValueError('%s not found in .pypirc' % self.repository)
+ if self.repository == 'pypi':
+ self.repository = self.DEFAULT_REPOSITORY
+ self.has_config = False
+
+ def classifiers(self):
+ ''' Fetch the list of classifiers from the server.
+ '''
+ url = self.repository+'?:action=list_classifiers'
+ response = urllib.request.urlopen(url)
+ log.info(self._read_pypi_response(response))
+
+ def verify_metadata(self):
+ ''' Send the metadata to the package index server to be checked.
+ '''
+ # send the info to the server and report the result
+ (code, result) = self.post_to_server(self.build_post_data('verify'))
+ log.info('Server response (%s): %s', code, result)
+
+ def send_metadata(self):
+ ''' Send the metadata to the package index server.
+
+ Well, do the following:
+ 1. figure who the user is, and then
+ 2. send the data as a Basic auth'ed POST.
+
+ First we try to read the username/password from $HOME/.pypirc,
+ which is a ConfigParser-formatted file with a section
+ [distutils] containing username and password entries (both
+ in clear text). Eg:
+
+ [distutils]
+ index-servers =
+ pypi
+
+ [pypi]
+ username: fred
+ password: sekrit
+
+ Otherwise, to figure who the user is, we offer the user three
+ choices:
+
+ 1. use existing login,
+ 2. register as a new user, or
+ 3. set the password to a random string and email the user.
+
+ '''
+ # see if we can short-cut and get the username/password from the
+ # config
+ if self.has_config:
+ choice = '1'
+ username = self.username
+ password = self.password
+ else:
+ choice = 'x'
+ username = password = ''
+
+ # get the user's login info
+ choices = '1 2 3 4'.split()
+ while choice not in choices:
+ self.announce('''\
+We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''', log.INFO)
+ choice = input()
+ if not choice:
+ choice = '1'
+ elif choice not in choices:
+ print('Please choose one of the four options!')
+
+ if choice == '1':
+ # get the username and password
+ while not username:
+ username = input('Username: ')
+ while not password:
+ password = getpass.getpass('Password: ')
+
+ # set up the authentication
+ auth = urllib.request.HTTPPasswordMgr()
+ host = urllib.parse.urlparse(self.repository)[1]
+ auth.add_password(self.realm, host, username, password)
+ # send the info to the server and report the result
+ code, result = self.post_to_server(self.build_post_data('submit'),
+ auth)
+ self.announce('Server response (%s): %s' % (code, result),
+ log.INFO)
+
+ # possibly save the login
+ if code == 200:
+ if self.has_config:
+ # sharing the password in the distribution instance
+ # so the upload command can reuse it
+ self.distribution.password = password
+ else:
+ self.announce(('I can store your PyPI login so future '
+ 'submissions will be faster.'), log.INFO)
+ self.announce('(the login will be stored in %s)' % \
+ self._get_rc_file(), log.INFO)
+ choice = 'X'
+ while choice.lower() not in 'yn':
+ choice = input('Save your login (y/N)?')
+ if not choice:
+ choice = 'n'
+ if choice.lower() == 'y':
+ self._store_pypirc(username, password)
+
+ elif choice == '2':
+ data = {':action': 'user'}
+ data['name'] = data['password'] = data['email'] = ''
+ data['confirm'] = None
+ while not data['name']:
+ data['name'] = input('Username: ')
+ while data['password'] != data['confirm']:
+ while not data['password']:
+ data['password'] = getpass.getpass('Password: ')
+ while not data['confirm']:
+ data['confirm'] = getpass.getpass(' Confirm: ')
+ if data['password'] != data['confirm']:
+ data['password'] = ''
+ data['confirm'] = None
+ print("Password and confirm don't match!")
+ while not data['email']:
+ data['email'] = input(' EMail: ')
+ code, result = self.post_to_server(data)
+ if code != 200:
+ log.info('Server response (%s): %s', code, result)
+ else:
+ log.info('You will receive an email shortly.')
+ log.info(('Follow the instructions in it to '
+ 'complete registration.'))
+ elif choice == '3':
+ data = {':action': 'password_reset'}
+ data['email'] = ''
+ while not data['email']:
+ data['email'] = input('Your email address: ')
+ code, result = self.post_to_server(data)
+ log.info('Server response (%s): %s', code, result)
+
+ def build_post_data(self, action):
+ # figure the data to send - the metadata plus some additional
+ # information used by the package server
+ meta = self.distribution.metadata
+ data = {
+ ':action': action,
+ 'metadata_version' : '1.0',
+ 'name': meta.get_name(),
+ 'version': meta.get_version(),
+ 'summary': meta.get_description(),
+ 'home_page': meta.get_url(),
+ 'author': meta.get_contact(),
+ 'author_email': meta.get_contact_email(),
+ 'license': meta.get_licence(),
+ 'description': meta.get_long_description(),
+ 'keywords': meta.get_keywords(),
+ 'platform': meta.get_platforms(),
+ 'classifiers': meta.get_classifiers(),
+ 'download_url': meta.get_download_url(),
+ # PEP 314
+ 'provides': meta.get_provides(),
+ 'requires': meta.get_requires(),
+ 'obsoletes': meta.get_obsoletes(),
+ }
+ if data['provides'] or data['requires'] or data['obsoletes']:
+ data['metadata_version'] = '1.1'
+ return data
+
+ def post_to_server(self, data, auth=None):
+ ''' Post a query to the server, and return a string response.
+ '''
+ if 'name' in data:
+ self.announce('Registering %s to %s' % (data['name'],
+ self.repository),
+ log.INFO)
+ # Build up the MIME payload for the urllib2 POST data
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = '\n--' + boundary
+ end_boundary = sep_boundary + '--'
+ body = io.StringIO()
+ for key, value in data.items():
+ # handle multiple entries for the same name
+ if type(value) not in (type([]), type( () )):
+ value = [value]
+ for value in value:
+ value = str(value)
+ body.write(sep_boundary)
+ body.write('\nContent-Disposition: form-data; name="%s"'%key)
+ body.write("\n\n")
+ body.write(value)
+ if value and value[-1] == '\r':
+ body.write('\n') # write an extra newline (lurve Macs)
+ body.write(end_boundary)
+ body.write("\n")
+ body = body.getvalue().encode("utf-8")
+
+ # build the Request
+ headers = {
+ 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
+ 'Content-length': str(len(body))
+ }
+ req = urllib.request.Request(self.repository, body, headers)
+
+ # handle HTTP and include the Basic Auth handler
+ opener = urllib.request.build_opener(
+ urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
+ )
+ data = ''
+ try:
+ result = opener.open(req)
+ except urllib.error.HTTPError as e:
+ if self.show_response:
+ data = e.fp.read()
+ result = e.code, e.msg
+ except urllib.error.URLError as e:
+ result = 500, str(e)
+ else:
+ if self.show_response:
+ data = self._read_pypi_response(result)
+ result = 200, 'OK'
+ if self.show_response:
+ msg = '\n'.join(('-' * 75, data, '-' * 75))
+ self.announce(msg, log.INFO)
+ return result
diff --git a/setuptools/_distutils/command/sdist.py b/setuptools/_distutils/command/sdist.py
new file mode 100644
index 0000000..b4996fc
--- /dev/null
+++ b/setuptools/_distutils/command/sdist.py
@@ -0,0 +1,494 @@
+"""distutils.command.sdist
+
+Implements the Distutils 'sdist' command (create a source distribution)."""
+
+import os
+import sys
+from glob import glob
+from warnings import warn
+
+from distutils.core import Command
+from distutils import dir_util
+from distutils import file_util
+from distutils import archive_util
+from distutils.text_file import TextFile
+from distutils.filelist import FileList
+from distutils import log
+from distutils.util import convert_path
+from distutils.errors import DistutilsTemplateError, DistutilsOptionError
+
+
+def show_formats():
+ """Print all possible values for the 'formats' option (used by
+ the "--help-formats" command-line option).
+ """
+ from distutils.fancy_getopt import FancyGetopt
+ from distutils.archive_util import ARCHIVE_FORMATS
+ formats = []
+ for format in ARCHIVE_FORMATS.keys():
+ formats.append(("formats=" + format, None,
+ ARCHIVE_FORMATS[format][2]))
+ formats.sort()
+ FancyGetopt(formats).print_help(
+ "List of available source distribution formats:")
+
+
+class sdist(Command):
+
+ description = "create a source distribution (tarball, zip file, etc.)"
+
+ def checking_metadata(self):
+ """Callable used for the check sub-command.
+
+ Placed here so user_options can view it"""
+ return self.metadata_check
+
+ user_options = [
+ ('template=', 't',
+ "name of manifest template file [default: MANIFEST.in]"),
+ ('manifest=', 'm',
+ "name of manifest file [default: MANIFEST]"),
+ ('use-defaults', None,
+ "include the default file set in the manifest "
+ "[default; disable with --no-defaults]"),
+ ('no-defaults', None,
+ "don't include the default file set"),
+ ('prune', None,
+ "specifically exclude files/directories that should not be "
+ "distributed (build tree, RCS/CVS dirs, etc.) "
+ "[default; disable with --no-prune]"),
+ ('no-prune', None,
+ "don't automatically exclude anything"),
+ ('manifest-only', 'o',
+ "just regenerate the manifest and then stop "
+ "(implies --force-manifest)"),
+ ('force-manifest', 'f',
+ "forcibly regenerate the manifest and carry on as usual. "
+ "Deprecated: now the manifest is always regenerated."),
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ('metadata-check', None,
+ "Ensure that all required elements of meta-data "
+ "are supplied. Warn if any missing. [default]"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file [default: current group]"),
+ ]
+
+ boolean_options = ['use-defaults', 'prune',
+ 'manifest-only', 'force-manifest',
+ 'keep-temp', 'metadata-check']
+
+ help_options = [
+ ('help-formats', None,
+ "list available distribution formats", show_formats),
+ ]
+
+ negative_opt = {'no-defaults': 'use-defaults',
+ 'no-prune': 'prune' }
+
+ sub_commands = [('check', checking_metadata)]
+
+ READMES = ('README', 'README.txt', 'README.rst')
+
+ def initialize_options(self):
+ # 'template' and 'manifest' are, respectively, the names of
+ # the manifest template and manifest file.
+ self.template = None
+ self.manifest = None
+
+ # 'use_defaults': if true, we will include the default file set
+ # in the manifest
+ self.use_defaults = 1
+ self.prune = 1
+
+ self.manifest_only = 0
+ self.force_manifest = 0
+
+ self.formats = ['gztar']
+ self.keep_temp = 0
+ self.dist_dir = None
+
+ self.archive_files = None
+ self.metadata_check = 1
+ self.owner = None
+ self.group = None
+
+ def finalize_options(self):
+ if self.manifest is None:
+ self.manifest = "MANIFEST"
+ if self.template is None:
+ self.template = "MANIFEST.in"
+
+ self.ensure_string_list('formats')
+
+ bad_format = archive_util.check_archive_formats(self.formats)
+ if bad_format:
+ raise DistutilsOptionError(
+ "unknown archive format '%s'" % bad_format)
+
+ if self.dist_dir is None:
+ self.dist_dir = "dist"
+
+ def run(self):
+ # 'filelist' contains the list of files that will make up the
+ # manifest
+ self.filelist = FileList()
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ # Do whatever it takes to get the list of files to process
+ # (process the manifest template, read an existing manifest,
+ # whatever). File list is accumulated in 'self.filelist'.
+ self.get_file_list()
+
+ # If user just wanted us to regenerate the manifest, stop now.
+ if self.manifest_only:
+ return
+
+ # Otherwise, go ahead and create the source distribution tarball,
+ # or zipfile, or whatever.
+ self.make_distribution()
+
+ def check_metadata(self):
+ """Deprecated API."""
+ warn("distutils.command.sdist.check_metadata is deprecated, \
+ use the check command instead", PendingDeprecationWarning)
+ check = self.distribution.get_command_obj('check')
+ check.ensure_finalized()
+ check.run()
+
+ def get_file_list(self):
+ """Figure out the list of files to include in the source
+ distribution, and put it in 'self.filelist'. This might involve
+ reading the manifest template (and writing the manifest), or just
+ reading the manifest, or just using the default file set -- it all
+ depends on the user's options.
+ """
+ # new behavior when using a template:
+ # the file list is recalculated every time because
+ # even if MANIFEST.in or setup.py are not changed
+ # the user might have added some files in the tree that
+ # need to be included.
+ #
+ # This makes --force the default and only behavior with templates.
+ template_exists = os.path.isfile(self.template)
+ if not template_exists and self._manifest_is_not_generated():
+ self.read_manifest()
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ return
+
+ if not template_exists:
+ self.warn(("manifest template '%s' does not exist " +
+ "(using default file list)") %
+ self.template)
+ self.filelist.findall()
+
+ if self.use_defaults:
+ self.add_defaults()
+
+ if template_exists:
+ self.read_template()
+
+ if self.prune:
+ self.prune_file_list()
+
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ self.write_manifest()
+
+ def add_defaults(self):
+ """Add all the default files to self.filelist:
+ - README or README.txt
+ - setup.py
+ - test/test*.py
+ - all pure Python modules mentioned in setup script
+ - all files pointed by package_data (build_py)
+ - all files defined in data_files.
+ - all files defined as scripts.
+ - all C sources listed as part of extensions or C libraries
+ in the setup script (doesn't catch C headers!)
+ Warns if (README or README.txt) or setup.py are missing; everything
+ else is optional.
+ """
+ self._add_defaults_standards()
+ self._add_defaults_optional()
+ self._add_defaults_python()
+ self._add_defaults_data_files()
+ self._add_defaults_ext()
+ self._add_defaults_c_libs()
+ self._add_defaults_scripts()
+
+ @staticmethod
+ def _cs_path_exists(fspath):
+ """
+ Case-sensitive path existence check
+
+ >>> sdist._cs_path_exists(__file__)
+ True
+ >>> sdist._cs_path_exists(__file__.upper())
+ False
+ """
+ if not os.path.exists(fspath):
+ return False
+ # make absolute so we always have a directory
+ abspath = os.path.abspath(fspath)
+ directory, filename = os.path.split(abspath)
+ return filename in os.listdir(directory)
+
+ def _add_defaults_standards(self):
+ standards = [self.READMES, self.distribution.script_name]
+ for fn in standards:
+ if isinstance(fn, tuple):
+ alts = fn
+ got_it = False
+ for fn in alts:
+ if self._cs_path_exists(fn):
+ got_it = True
+ self.filelist.append(fn)
+ break
+
+ if not got_it:
+ self.warn("standard file not found: should have one of " +
+ ', '.join(alts))
+ else:
+ if self._cs_path_exists(fn):
+ self.filelist.append(fn)
+ else:
+ self.warn("standard file '%s' not found" % fn)
+
+ def _add_defaults_optional(self):
+ optional = ['test/test*.py', 'setup.cfg']
+ for pattern in optional:
+ files = filter(os.path.isfile, glob(pattern))
+ self.filelist.extend(files)
+
+ def _add_defaults_python(self):
+ # build_py is used to get:
+ # - python modules
+ # - files defined in package_data
+ build_py = self.get_finalized_command('build_py')
+
+ # getting python files
+ if self.distribution.has_pure_modules():
+ self.filelist.extend(build_py.get_source_files())
+
+ # getting package_data files
+ # (computed in build_py.data_files by build_py.finalize_options)
+ for pkg, src_dir, build_dir, filenames in build_py.data_files:
+ for filename in filenames:
+ self.filelist.append(os.path.join(src_dir, filename))
+
+ def _add_defaults_data_files(self):
+ # getting distribution.data_files
+ if self.distribution.has_data_files():
+ for item in self.distribution.data_files:
+ if isinstance(item, str):
+ # plain file
+ item = convert_path(item)
+ if os.path.isfile(item):
+ self.filelist.append(item)
+ else:
+ # a (dirname, filenames) tuple
+ dirname, filenames = item
+ for f in filenames:
+ f = convert_path(f)
+ if os.path.isfile(f):
+ self.filelist.append(f)
+
+ def _add_defaults_ext(self):
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ self.filelist.extend(build_ext.get_source_files())
+
+ def _add_defaults_c_libs(self):
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.filelist.extend(build_clib.get_source_files())
+
+ def _add_defaults_scripts(self):
+ if self.distribution.has_scripts():
+ build_scripts = self.get_finalized_command('build_scripts')
+ self.filelist.extend(build_scripts.get_source_files())
+
+ def read_template(self):
+ """Read and parse manifest template file named by self.template.
+
+ (usually "MANIFEST.in") The parsing and processing is done by
+ 'self.filelist', which updates itself accordingly.
+ """
+ log.info("reading manifest template '%s'", self.template)
+ template = TextFile(self.template, strip_comments=1, skip_blanks=1,
+ join_lines=1, lstrip_ws=1, rstrip_ws=1,
+ collapse_join=1)
+
+ try:
+ while True:
+ line = template.readline()
+ if line is None: # end of file
+ break
+
+ try:
+ self.filelist.process_template_line(line)
+ # the call above can raise a DistutilsTemplateError for
+ # malformed lines, or a ValueError from the lower-level
+ # convert_path function
+ except (DistutilsTemplateError, ValueError) as msg:
+ self.warn("%s, line %d: %s" % (template.filename,
+ template.current_line,
+ msg))
+ finally:
+ template.close()
+
+ def prune_file_list(self):
+ """Prune off branches that might slip into the file list as created
+ by 'read_template()', but really don't belong there:
+ * the build tree (typically "build")
+ * the release tree itself (only an issue if we ran "sdist"
+ previously with --keep-temp, or it aborted)
+ * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+ """
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+
+ self.filelist.exclude_pattern(None, prefix=build.build_base)
+ self.filelist.exclude_pattern(None, prefix=base_dir)
+
+ if sys.platform == 'win32':
+ seps = r'/|\\'
+ else:
+ seps = '/'
+
+ vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
+ '_darcs']
+ vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
+ self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
+
+ def write_manifest(self):
+ """Write the file list in 'self.filelist' (presumably as filled in
+ by 'add_defaults()' and 'read_template()') to the manifest file
+ named by 'self.manifest'.
+ """
+ if self._manifest_is_not_generated():
+ log.info("not writing to manually maintained "
+ "manifest file '%s'" % self.manifest)
+ return
+
+ content = self.filelist.files[:]
+ content.insert(0, '# file GENERATED by distutils, do NOT edit')
+ self.execute(file_util.write_file, (self.manifest, content),
+ "writing manifest file '%s'" % self.manifest)
+
+ def _manifest_is_not_generated(self):
+ # check for special comment used in 3.1.3 and higher
+ if not os.path.isfile(self.manifest):
+ return False
+
+ fp = open(self.manifest)
+ try:
+ first_line = fp.readline()
+ finally:
+ fp.close()
+ return first_line != '# file GENERATED by distutils, do NOT edit\n'
+
+ def read_manifest(self):
+ """Read the manifest file (named by 'self.manifest') and use it to
+ fill in 'self.filelist', the list of files to include in the source
+ distribution.
+ """
+ log.info("reading manifest file '%s'", self.manifest)
+ with open(self.manifest) as manifest:
+ for line in manifest:
+ # ignore comments and blank lines
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ self.filelist.append(line)
+
+ def make_release_tree(self, base_dir, files):
+ """Create the directory tree that will become the source
+ distribution archive. All directories implied by the filenames in
+ 'files' are created under 'base_dir', and then we hard link or copy
+ (if hard linking is unavailable) those files into place.
+ Essentially, this duplicates the developer's source tree, but in a
+ directory named after the distribution, containing only the files
+ to be distributed.
+ """
+ # Create all the directories under 'base_dir' necessary to
+ # put 'files' there; the 'mkpath()' is just so we don't die
+ # if the manifest happens to be empty.
+ self.mkpath(base_dir)
+ dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
+
+ # And walk over the list of files, either making a hard link (if
+ # os.link exists) to each one that doesn't already exist in its
+ # corresponding location under 'base_dir', or copying each file
+ # that's out-of-date in 'base_dir'. (Usually, all files will be
+ # out-of-date, because by default we blow away 'base_dir' when
+ # we're done making the distribution archives.)
+
+ if hasattr(os, 'link'): # can make hard links on this system
+ link = 'hard'
+ msg = "making hard links in %s..." % base_dir
+ else: # nope, have to copy
+ link = None
+ msg = "copying files to %s..." % base_dir
+
+ if not files:
+ log.warn("no files to distribute -- empty manifest?")
+ else:
+ log.info(msg)
+ for file in files:
+ if not os.path.isfile(file):
+ log.warn("'%s' not a regular file -- skipping", file)
+ else:
+ dest = os.path.join(base_dir, file)
+ self.copy_file(file, dest, link=link)
+
+ self.distribution.metadata.write_pkg_info(base_dir)
+
+ def make_distribution(self):
+ """Create the source distribution(s). First, we create the release
+ tree with 'make_release_tree()'; then, we create all required
+ archive files (according to 'self.formats') from the release tree.
+ Finally, we clean up by blowing away the release tree (unless
+ 'self.keep_temp' is true). The list of archive files created is
+ stored so it can be retrieved later by 'get_archive_files()'.
+ """
+ # Don't warn about missing meta-data here -- should be (and is!)
+ # done elsewhere.
+ base_dir = self.distribution.get_fullname()
+ base_name = os.path.join(self.dist_dir, base_dir)
+
+ self.make_release_tree(base_dir, self.filelist.files)
+ archive_files = [] # remember names of files we create
+ # tar archive must be created last to avoid overwrite and remove
+ if 'tar' in self.formats:
+ self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+ for fmt in self.formats:
+ file = self.make_archive(base_name, fmt, base_dir=base_dir,
+ owner=self.owner, group=self.group)
+ archive_files.append(file)
+ self.distribution.dist_files.append(('sdist', '', file))
+
+ self.archive_files = archive_files
+
+ if not self.keep_temp:
+ dir_util.remove_tree(base_dir, dry_run=self.dry_run)
+
+ def get_archive_files(self):
+ """Return the list of archive files created when the command
+ was run, or None if the command hasn't run yet.
+ """
+ return self.archive_files
diff --git a/setuptools/_distutils/command/upload.py b/setuptools/_distutils/command/upload.py
new file mode 100644
index 0000000..95e9fda
--- /dev/null
+++ b/setuptools/_distutils/command/upload.py
@@ -0,0 +1,214 @@
+"""
+distutils.command.upload
+
+Implements the Distutils 'upload' subcommand (upload package to a package
+index).
+"""
+
+import os
+import io
+import hashlib
+from base64 import standard_b64encode
+from urllib.request import urlopen, Request, HTTPError
+from urllib.parse import urlparse
+from distutils.errors import DistutilsError, DistutilsOptionError
+from distutils.core import PyPIRCCommand
+from distutils.spawn import spawn
+from distutils import log
+
+
+# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
+# https://bugs.python.org/issue40698
+_FILE_CONTENT_DIGESTS = {
+ "md5_digest": getattr(hashlib, "md5", None),
+ "sha256_digest": getattr(hashlib, "sha256", None),
+ "blake2_256_digest": getattr(hashlib, "blake2b", None),
+}
+
+
+class upload(PyPIRCCommand):
+
+ description = "upload binary package to PyPI"
+
+ user_options = PyPIRCCommand.user_options + [
+ ('sign', 's',
+ 'sign files to upload using gpg'),
+ ('identity=', 'i', 'GPG identity used to sign files'),
+ ]
+
+ boolean_options = PyPIRCCommand.boolean_options + ['sign']
+
+ def initialize_options(self):
+ PyPIRCCommand.initialize_options(self)
+ self.username = ''
+ self.password = ''
+ self.show_response = 0
+ self.sign = False
+ self.identity = None
+
+ def finalize_options(self):
+ PyPIRCCommand.finalize_options(self)
+ if self.identity and not self.sign:
+ raise DistutilsOptionError(
+ "Must use --sign for --identity to have meaning"
+ )
+ config = self._read_pypirc()
+ if config != {}:
+ self.username = config['username']
+ self.password = config['password']
+ self.repository = config['repository']
+ self.realm = config['realm']
+
+ # getting the password from the distribution
+ # if previously set by the register command
+ if not self.password and self.distribution.password:
+ self.password = self.distribution.password
+
+ def run(self):
+ if not self.distribution.dist_files:
+ msg = ("Must create and upload files in one command "
+ "(e.g. setup.py sdist upload)")
+ raise DistutilsOptionError(msg)
+ for command, pyversion, filename in self.distribution.dist_files:
+ self.upload_file(command, pyversion, filename)
+
+ def upload_file(self, command, pyversion, filename):
+ # Makes sure the repository URL is compliant
+ schema, netloc, url, params, query, fragments = \
+ urlparse(self.repository)
+ if params or query or fragments:
+ raise AssertionError("Incompatible url %s" % self.repository)
+
+ if schema not in ('http', 'https'):
+ raise AssertionError("unsupported schema " + schema)
+
+ # Sign if requested
+ if self.sign:
+ gpg_args = ["gpg", "--detach-sign", "-a", filename]
+ if self.identity:
+ gpg_args[2:2] = ["--local-user", self.identity]
+ spawn(gpg_args,
+ dry_run=self.dry_run)
+
+ # Fill in the data - send all the meta-data in case we need to
+ # register a new release
+ f = open(filename,'rb')
+ try:
+ content = f.read()
+ finally:
+ f.close()
+
+ meta = self.distribution.metadata
+ data = {
+ # action
+ ':action': 'file_upload',
+ 'protocol_version': '1',
+
+ # identify release
+ 'name': meta.get_name(),
+ 'version': meta.get_version(),
+
+ # file content
+ 'content': (os.path.basename(filename),content),
+ 'filetype': command,
+ 'pyversion': pyversion,
+
+ # additional meta-data
+ 'metadata_version': '1.0',
+ 'summary': meta.get_description(),
+ 'home_page': meta.get_url(),
+ 'author': meta.get_contact(),
+ 'author_email': meta.get_contact_email(),
+ 'license': meta.get_licence(),
+ 'description': meta.get_long_description(),
+ 'keywords': meta.get_keywords(),
+ 'platform': meta.get_platforms(),
+ 'classifiers': meta.get_classifiers(),
+ 'download_url': meta.get_download_url(),
+ # PEP 314
+ 'provides': meta.get_provides(),
+ 'requires': meta.get_requires(),
+ 'obsoletes': meta.get_obsoletes(),
+ }
+
+ data['comment'] = ''
+
+ # file content digests
+ for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
+ if digest_cons is None:
+ continue
+ try:
+ data[digest_name] = digest_cons(content).hexdigest()
+ except ValueError:
+ # hash digest not available or blocked by security policy
+ pass
+
+ if self.sign:
+ with open(filename + ".asc", "rb") as f:
+ data['gpg_signature'] = (os.path.basename(filename) + ".asc",
+ f.read())
+
+ # set up the authentication
+ user_pass = (self.username + ":" + self.password).encode('ascii')
+ # The exact encoding of the authentication string is debated.
+ # Anyway PyPI only accepts ascii for both username or password.
+ auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
+
+ # Build up the MIME payload for the POST data
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = b'\r\n--' + boundary.encode('ascii')
+ end_boundary = sep_boundary + b'--\r\n'
+ body = io.BytesIO()
+ for key, value in data.items():
+ title = '\r\nContent-Disposition: form-data; name="%s"' % key
+ # handle multiple entries for the same name
+ if not isinstance(value, list):
+ value = [value]
+ for value in value:
+ if type(value) is tuple:
+ title += '; filename="%s"' % value[0]
+ value = value[1]
+ else:
+ value = str(value).encode('utf-8')
+ body.write(sep_boundary)
+ body.write(title.encode('utf-8'))
+ body.write(b"\r\n\r\n")
+ body.write(value)
+ body.write(end_boundary)
+ body = body.getvalue()
+
+ msg = "Submitting %s to %s" % (filename, self.repository)
+ self.announce(msg, log.INFO)
+
+ # build the Request
+ headers = {
+ 'Content-type': 'multipart/form-data; boundary=%s' % boundary,
+ 'Content-length': str(len(body)),
+ 'Authorization': auth,
+ }
+
+ request = Request(self.repository, data=body,
+ headers=headers)
+ # send the data
+ try:
+ result = urlopen(request)
+ status = result.getcode()
+ reason = result.msg
+ except HTTPError as e:
+ status = e.code
+ reason = e.msg
+ except OSError as e:
+ self.announce(str(e), log.ERROR)
+ raise
+
+ if status == 200:
+ self.announce('Server response (%s): %s' % (status, reason),
+ log.INFO)
+ if self.show_response:
+ text = self._read_pypi_response(result)
+ msg = '\n'.join(('-' * 75, text, '-' * 75))
+ self.announce(msg, log.INFO)
+ else:
+ msg = 'Upload failed (%s): %s' % (status, reason)
+ self.announce(msg, log.ERROR)
+ raise DistutilsError(msg)
diff --git a/setuptools/_distutils/command/wininst-10.0-amd64.exe b/setuptools/_distutils/command/wininst-10.0-amd64.exe
new file mode 100644
index 0000000..6fa0dce
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-10.0-amd64.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-10.0.exe b/setuptools/_distutils/command/wininst-10.0.exe
new file mode 100644
index 0000000..afc3bc6
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-10.0.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-14.0-amd64.exe b/setuptools/_distutils/command/wininst-14.0-amd64.exe
new file mode 100644
index 0000000..253c2e2
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-14.0-amd64.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-14.0.exe b/setuptools/_distutils/command/wininst-14.0.exe
new file mode 100644
index 0000000..46f5f35
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-14.0.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-6.0.exe b/setuptools/_distutils/command/wininst-6.0.exe
new file mode 100644
index 0000000..f57c855
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-6.0.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-7.1.exe b/setuptools/_distutils/command/wininst-7.1.exe
new file mode 100644
index 0000000..1433bc1
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-7.1.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-8.0.exe b/setuptools/_distutils/command/wininst-8.0.exe
new file mode 100644
index 0000000..7403bfa
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-8.0.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-9.0-amd64.exe b/setuptools/_distutils/command/wininst-9.0-amd64.exe
new file mode 100644
index 0000000..94fbd43
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-9.0-amd64.exe
Binary files differ
diff --git a/setuptools/_distutils/command/wininst-9.0.exe b/setuptools/_distutils/command/wininst-9.0.exe
new file mode 100644
index 0000000..2ec261f
--- /dev/null
+++ b/setuptools/_distutils/command/wininst-9.0.exe
Binary files differ
diff --git a/setuptools/_distutils/config.py b/setuptools/_distutils/config.py
new file mode 100644
index 0000000..2171abd
--- /dev/null
+++ b/setuptools/_distutils/config.py
@@ -0,0 +1,130 @@
+"""distutils.pypirc
+
+Provides the PyPIRCCommand class, the base class for the command classes
+that uses .pypirc in the distutils.command package.
+"""
+import os
+from configparser import RawConfigParser
+
+from distutils.cmd import Command
+
+DEFAULT_PYPIRC = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:%s
+password:%s
+"""
+
+class PyPIRCCommand(Command):
+ """Base command that knows how to handle the .pypirc file
+ """
+ DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
+ DEFAULT_REALM = 'pypi'
+ repository = None
+ realm = None
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % \
+ DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server')]
+
+ boolean_options = ['show-response']
+
+ def _get_rc_file(self):
+ """Returns rc file path."""
+ return os.path.join(os.path.expanduser('~'), '.pypirc')
+
+ def _store_pypirc(self, username, password):
+ """Creates a default .pypirc file."""
+ rc = self._get_rc_file()
+ with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
+ f.write(DEFAULT_PYPIRC % (username, password))
+
+ def _read_pypirc(self):
+ """Reads the .pypirc file."""
+ rc = self._get_rc_file()
+ if os.path.exists(rc):
+ self.announce('Using PyPI login from %s' % rc)
+ repository = self.repository or self.DEFAULT_REPOSITORY
+
+ config = RawConfigParser()
+ config.read(rc)
+ sections = config.sections()
+ if 'distutils' in sections:
+ # let's get the list of servers
+ index_servers = config.get('distutils', 'index-servers')
+ _servers = [server.strip() for server in
+ index_servers.split('\n')
+ if server.strip() != '']
+ if _servers == []:
+ # nothing set, let's try to get the default pypi
+ if 'pypi' in sections:
+ _servers = ['pypi']
+ else:
+ # the file is not properly defined, returning
+ # an empty dict
+ return {}
+ for server in _servers:
+ current = {'server': server}
+ current['username'] = config.get(server, 'username')
+
+ # optional params
+ for key, default in (('repository',
+ self.DEFAULT_REPOSITORY),
+ ('realm', self.DEFAULT_REALM),
+ ('password', None)):
+ if config.has_option(server, key):
+ current[key] = config.get(server, key)
+ else:
+ current[key] = default
+
+ # work around people having "repository" for the "pypi"
+ # section of their config set to the HTTP (rather than
+ # HTTPS) URL
+ if (server == 'pypi' and
+ repository in (self.DEFAULT_REPOSITORY, 'pypi')):
+ current['repository'] = self.DEFAULT_REPOSITORY
+ return current
+
+ if (current['server'] == repository or
+ current['repository'] == repository):
+ return current
+ elif 'server-login' in sections:
+ # old format
+ server = 'server-login'
+ if config.has_option(server, 'repository'):
+ repository = config.get(server, 'repository')
+ else:
+ repository = self.DEFAULT_REPOSITORY
+ return {'username': config.get(server, 'username'),
+ 'password': config.get(server, 'password'),
+ 'repository': repository,
+ 'server': server,
+ 'realm': self.DEFAULT_REALM}
+
+ return {}
+
+ def _read_pypi_response(self, response):
+ """Read and decode a PyPI HTTP response."""
+ import cgi
+ content_type = response.getheader('content-type', 'text/plain')
+ encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
+ return response.read().decode(encoding)
+
+ def initialize_options(self):
+ """Initialize options."""
+ self.repository = None
+ self.realm = None
+ self.show_response = 0
+
+ def finalize_options(self):
+ """Finalizes options."""
+ if self.repository is None:
+ self.repository = self.DEFAULT_REPOSITORY
+ if self.realm is None:
+ self.realm = self.DEFAULT_REALM
diff --git a/setuptools/_distutils/core.py b/setuptools/_distutils/core.py
new file mode 100644
index 0000000..f43888e
--- /dev/null
+++ b/setuptools/_distutils/core.py
@@ -0,0 +1,249 @@
+"""distutils.core
+
+The only module that needs to be imported to use the Distutils; provides
+the 'setup' function (which is to be called from the setup script). Also
+indirectly provides the Distribution and Command classes, although they are
+really defined in distutils.dist and distutils.cmd.
+"""
+
+import os
+import sys
+import tokenize
+
+from distutils.debug import DEBUG
+from distutils.errors import *
+
+# Mainly import these so setup scripts can "from distutils.core import" them.
+from distutils.dist import Distribution
+from distutils.cmd import Command
+from distutils.config import PyPIRCCommand
+from distutils.extension import Extension
+
+# This is a barebones help message generated displayed when the user
+# runs the setup script with no arguments at all. More useful help
+# is generated with various --help options: global help, list commands,
+# and per-command help.
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+ or: %(script)s --help [cmd1 cmd2 ...]
+ or: %(script)s --help-commands
+ or: %(script)s cmd --help
+"""
+
+def gen_usage (script_name):
+ script = os.path.basename(script_name)
+ return USAGE % vars()
+
+
+# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
+_setup_stop_after = None
+_setup_distribution = None
+
+# Legal keyword arguments for the setup() function
+setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
+ 'name', 'version', 'author', 'author_email',
+ 'maintainer', 'maintainer_email', 'url', 'license',
+ 'description', 'long_description', 'keywords',
+ 'platforms', 'classifiers', 'download_url',
+ 'requires', 'provides', 'obsoletes',
+ )
+
+# Legal keyword arguments for the Extension constructor
+extension_keywords = ('name', 'sources', 'include_dirs',
+ 'define_macros', 'undef_macros',
+ 'library_dirs', 'libraries', 'runtime_library_dirs',
+ 'extra_objects', 'extra_compile_args', 'extra_link_args',
+ 'swig_opts', 'export_symbols', 'depends', 'language')
+
+def setup (**attrs):
+ """The gateway to the Distutils: do everything your setup script needs
+ to do, in a highly flexible and user-driven way. Briefly: create a
+ Distribution instance; find and parse config files; parse the command
+ line; run each Distutils command found there, customized by the options
+ supplied to 'setup()' (as keyword arguments), in config files, and on
+ the command line.
+
+ The Distribution instance might be an instance of a class supplied via
+ the 'distclass' keyword argument to 'setup'; if no such class is
+ supplied, then the Distribution class (in dist.py) is instantiated.
+ All other arguments to 'setup' (except for 'cmdclass') are used to set
+ attributes of the Distribution instance.
+
+ The 'cmdclass' argument, if supplied, is a dictionary mapping command
+ names to command classes. Each command encountered on the command line
+ will be turned into a command class, which is in turn instantiated; any
+ class found in 'cmdclass' is used in place of the default, which is
+ (for command 'foo_bar') class 'foo_bar' in module
+ 'distutils.command.foo_bar'. The command class must provide a
+ 'user_options' attribute which is a list of option specifiers for
+ 'distutils.fancy_getopt'. Any command-line options between the current
+ and the next command are used to set attributes of the current command
+ object.
+
+ When the entire command-line has been successfully parsed, calls the
+ 'run()' method on each command object in turn. This method will be
+ driven entirely by the Distribution object (which each command object
+ has a reference to, thanks to its constructor), and the
+ command-specific options that became attributes of each command
+ object.
+ """
+
+ global _setup_stop_after, _setup_distribution
+
+ # Determine the distribution class -- either caller-supplied or
+ # our Distribution (see below).
+ klass = attrs.get('distclass')
+ if klass:
+ del attrs['distclass']
+ else:
+ klass = Distribution
+
+ if 'script_name' not in attrs:
+ attrs['script_name'] = os.path.basename(sys.argv[0])
+ if 'script_args' not in attrs:
+ attrs['script_args'] = sys.argv[1:]
+
+ # Create the Distribution instance, using the remaining arguments
+ # (ie. everything except distclass) to initialize it
+ try:
+ _setup_distribution = dist = klass(attrs)
+ except DistutilsSetupError as msg:
+ if 'name' not in attrs:
+ raise SystemExit("error in setup command: %s" % msg)
+ else:
+ raise SystemExit("error in %s setup command: %s" % \
+ (attrs['name'], msg))
+
+ if _setup_stop_after == "init":
+ return dist
+
+ # Find and parse the config file(s): they will override options from
+ # the setup script, but be overridden by the command line.
+ dist.parse_config_files()
+
+ if DEBUG:
+ print("options (after parsing config files):")
+ dist.dump_option_dicts()
+
+ if _setup_stop_after == "config":
+ return dist
+
+ # Parse the command line and override config files; any
+ # command-line errors are the end user's fault, so turn them into
+ # SystemExit to suppress tracebacks.
+ try:
+ ok = dist.parse_command_line()
+ except DistutilsArgError as msg:
+ raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
+
+ if DEBUG:
+ print("options (after parsing command line):")
+ dist.dump_option_dicts()
+
+ if _setup_stop_after == "commandline":
+ return dist
+
+ # And finally, run all the commands found on the command line.
+ if ok:
+ return run_commands(dist)
+
+ return dist
+
+# setup ()
+
+
+def run_commands (dist):
+ """Given a Distribution object run all the commands,
+ raising ``SystemExit`` errors in the case of failure.
+
+ This function assumes that either ``sys.argv`` or ``dist.script_args``
+ is already set accordingly.
+ """
+ try:
+ dist.run_commands()
+ except KeyboardInterrupt:
+ raise SystemExit("interrupted")
+ except OSError as exc:
+ if DEBUG:
+ sys.stderr.write("error: %s\n" % (exc,))
+ raise
+ else:
+ raise SystemExit("error: %s" % (exc,))
+
+ except (DistutilsError,
+ CCompilerError) as msg:
+ if DEBUG:
+ raise
+ else:
+ raise SystemExit("error: " + str(msg))
+
+ return dist
+
+
+def run_setup (script_name, script_args=None, stop_after="run"):
+ """Run a setup script in a somewhat controlled environment, and
+ return the Distribution instance that drives things. This is useful
+ if you need to find out the distribution meta-data (passed as
+ keyword args from 'script' to 'setup()', or the contents of the
+ config files or command-line.
+
+ 'script_name' is a file that will be read and run with 'exec()';
+ 'sys.argv[0]' will be replaced with 'script' for the duration of the
+ call. 'script_args' is a list of strings; if supplied,
+ 'sys.argv[1:]' will be replaced by 'script_args' for the duration of
+ the call.
+
+ 'stop_after' tells 'setup()' when to stop processing; possible
+ values:
+ init
+ stop after the Distribution instance has been created and
+ populated with the keyword arguments to 'setup()'
+ config
+ stop after config files have been parsed (and their data
+ stored in the Distribution instance)
+ commandline
+ stop after the command-line ('sys.argv[1:]' or 'script_args')
+ have been parsed (and the data stored in the Distribution)
+ run [default]
+ stop after all commands have been run (the same as if 'setup()'
+ had been called in the usual way
+
+ Returns the Distribution instance, which provides all information
+ used to drive the Distutils.
+ """
+ if stop_after not in ('init', 'config', 'commandline', 'run'):
+ raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
+
+ global _setup_stop_after, _setup_distribution
+ _setup_stop_after = stop_after
+
+ save_argv = sys.argv.copy()
+ g = {'__file__': script_name, '__name__': '__main__'}
+ try:
+ try:
+ sys.argv[0] = script_name
+ if script_args is not None:
+ sys.argv[1:] = script_args
+ # tokenize.open supports automatic encoding detection
+ with tokenize.open(script_name) as f:
+ code = f.read().replace(r'\r\n', r'\n')
+ exec(code, g)
+ finally:
+ sys.argv = save_argv
+ _setup_stop_after = None
+ except SystemExit:
+ # Hmm, should we do something if exiting with a non-zero code
+ # (ie. error)?
+ pass
+
+ if _setup_distribution is None:
+ raise RuntimeError(("'distutils.core.setup()' was never called -- "
+ "perhaps '%s' is not a Distutils setup script?") % \
+ script_name)
+
+ # I wonder if the setup script's namespace -- g and l -- would be of
+ # any interest to callers?
+ #print "_setup_distribution:", _setup_distribution
+ return _setup_distribution
+
+# run_setup ()
diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py
new file mode 100644
index 0000000..c5c86d8
--- /dev/null
+++ b/setuptools/_distutils/cygwinccompiler.py
@@ -0,0 +1,362 @@
+"""distutils.cygwinccompiler
+
+Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
+handles the Cygwin port of the GNU C compiler to Windows. It also contains
+the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
+cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+# 1. you have to insert a __GNUC__ section in its config.h
+# 2. you have to generate an import library for its dll
+# - create a def-file for python??.dll
+# - create an import library using
+# dlltool --dllname python15.dll --def python15.def \
+# --output-lib libpython15.a
+#
+# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+# --export-all-symbols because it doesn't worked reliable in some
+# tested configurations. And because other windows compilers also
+# need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+# (after patching python's config.h and for C++ some other include files)
+# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+# (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+# - using gcc -mdll instead dllwrap doesn't work without -static because
+# it tries to link against dlls instead their import libraries. (If
+# it finds the dll first.)
+# By specifying -static we force ld to link against the import libraries,
+# this is windows standard and there are normally not the necessary symbols
+# in the dlls.
+# *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+# (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+# (ld supports -shared)
+# * llvm-mingw with Clang 11 works
+# (lld supports -shared)
+
+import os
+import sys
+import copy
+import shlex
+import warnings
+from subprocess import check_output
+
+from distutils.unixccompiler import UnixCCompiler
+from distutils.file_util import write_file
+from distutils.errors import (DistutilsExecError, CCompilerError,
+ CompileError, UnknownFileError)
+from distutils.version import LooseVersion, suppress_known_deprecation
+
+def get_msvcr():
+ """Include the appropriate MSVC runtime library if Python was built
+ with MSVC 7.0 or later.
+ """
+ msc_pos = sys.version.find('MSC v.')
+ if msc_pos != -1:
+ msc_ver = sys.version[msc_pos+6:msc_pos+10]
+ if msc_ver == '1300':
+ # MSVC 7.0
+ return ['msvcr70']
+ elif msc_ver == '1310':
+ # MSVC 7.1
+ return ['msvcr71']
+ elif msc_ver == '1400':
+ # VS2005 / MSVC 8.0
+ return ['msvcr80']
+ elif msc_ver == '1500':
+ # VS2008 / MSVC 9.0
+ return ['msvcr90']
+ elif msc_ver == '1600':
+ # VS2010 / MSVC 10.0
+ return ['msvcr100']
+ elif msc_ver == '1700':
+ # VS2012 / MSVC 11.0
+ return ['msvcr110']
+ elif msc_ver == '1800':
+ # VS2013 / MSVC 12.0
+ return ['msvcr120']
+ elif 1900 <= int(msc_ver) < 2000:
+ # VS2015 / MSVC 14.0
+ return ['ucrt', 'vcruntime140']
+ else:
+ raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+
+
+class CygwinCCompiler(UnixCCompiler):
+ """ Handles the Cygwin port of the GNU C compiler to Windows.
+ """
+ compiler_type = 'cygwin'
+ obj_extension = ".o"
+ static_lib_extension = ".a"
+ shared_lib_extension = ".dll"
+ static_lib_format = "lib%s%s"
+ shared_lib_format = "%s%s"
+ exe_extension = ".exe"
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+
+ super().__init__(verbose, dry_run, force)
+
+ status, details = check_config_h()
+ self.debug_print("Python's GCC status: %s (details: %s)" %
+ (status, details))
+ if status is not CONFIG_H_OK:
+ self.warn(
+ "Python's pyconfig.h doesn't seem to support your compiler. "
+ "Reason: %s. "
+ "Compiling may fail because of undefined preprocessor macros."
+ % details)
+
+ self.cc = os.environ.get('CC', 'gcc')
+ self.cxx = os.environ.get('CXX', 'g++')
+
+ self.linker_dll = self.cc
+ shared_option = "-shared"
+
+ self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc,
+ compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
+ compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
+ linker_exe='%s -mcygwin' % self.cc,
+ linker_so=('%s -mcygwin %s' %
+ (self.linker_dll, shared_option)))
+
+ # Include the appropriate MSVC runtime library if Python was built
+ # with MSVC 7.0 or later.
+ self.dll_libraries = get_msvcr()
+
+ @property
+ def gcc_version(self):
+ # Older numpy dependend on this existing to check for ancient
+ # gcc versions. This doesn't make much sense with clang etc so
+ # just hardcode to something recent.
+ # https://github.com/numpy/numpy/pull/20333
+ warnings.warn(
+ "gcc_version attribute of CygwinCCompiler is deprecated. "
+ "Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ with suppress_known_deprecation():
+ return LooseVersion("11.2.0")
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ """Compiles the source by spawning GCC and windres if needed."""
+ if ext == '.rc' or ext == '.res':
+ # gcc needs '.res' and '.rc' compiled to object files !!!
+ try:
+ self.spawn(["windres", "-i", src, "-o", obj])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ else: # for other files use the C-compiler
+ try:
+ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+ extra_postargs)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ def link(self, target_desc, objects, output_filename, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=0, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ """Link the objects."""
+ # use separate copies, so we can modify the lists
+ extra_preargs = copy.copy(extra_preargs or [])
+ libraries = copy.copy(libraries or [])
+ objects = copy.copy(objects or [])
+
+ # Additional libraries
+ libraries.extend(self.dll_libraries)
+
+ # handle export symbols by creating a def-file
+ # with executables this only works with gcc/ld as linker
+ if ((export_symbols is not None) and
+ (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+ # (The linker doesn't do anything if output is up-to-date.
+ # So it would probably better to check if we really need this,
+ # but for this we had to insert some unchanged parts of
+ # UnixCCompiler, and this is not what we want.)
+
+ # we want to put some files in the same directory as the
+ # object files are, build_temp doesn't help much
+ # where are the object files
+ temp_dir = os.path.dirname(objects[0])
+ # name of dll to give the helper files the same base name
+ (dll_name, dll_extension) = os.path.splitext(
+ os.path.basename(output_filename))
+
+ # generate the filenames for these files
+ def_file = os.path.join(temp_dir, dll_name + ".def")
+ lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+ # Generate .def file
+ contents = [
+ "LIBRARY %s" % os.path.basename(output_filename),
+ "EXPORTS"]
+ for sym in export_symbols:
+ contents.append(sym)
+ self.execute(write_file, (def_file, contents),
+ "writing %s" % def_file)
+
+ # next add options for def-file and to creating import libraries
+
+ # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+ #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
+ # for gcc/ld the def-file is specified as any object files
+ objects.append(def_file)
+
+ #end: if ((export_symbols is not None) and
+ # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+ # who wants symbols and a many times larger output file
+ # should explicitly switch the debug mode on
+ # otherwise we let ld strip the output file
+ # (On my machine: 10KiB < stripped_file < ??100KiB
+ # unstripped_file = stripped_file + XXX KiB
+ # ( XXX=254 for a typical python extension))
+ if not debug:
+ extra_preargs.append("-s")
+
+ UnixCCompiler.link(self, target_desc, objects, output_filename,
+ output_dir, libraries, library_dirs,
+ runtime_library_dirs,
+ None, # export_symbols, we do this in our def-file
+ debug, extra_preargs, extra_postargs, build_temp,
+ target_lang)
+
+ # -- Miscellaneous methods -----------------------------------------
+
+ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ """Adds supports for rc and res files."""
+ if output_dir is None:
+ output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+ base, ext = os.path.splitext(os.path.normcase(src_name))
+ if ext not in (self.src_extensions + ['.rc','.res']):
+ raise UnknownFileError("unknown file type '%s' (from '%s')" % \
+ (ext, src_name))
+ if strip_dir:
+ base = os.path.basename (base)
+ if ext in ('.res', '.rc'):
+ # these need to be compiled to object files
+ obj_names.append (os.path.join(output_dir,
+ base + ext + self.obj_extension))
+ else:
+ obj_names.append (os.path.join(output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler(CygwinCCompiler):
+ """ Handles the Mingw32 port of the GNU C compiler to Windows.
+ """
+ compiler_type = 'mingw32'
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+
+ super().__init__ (verbose, dry_run, force)
+
+ shared_option = "-shared"
+
+ if is_cygwincc(self.cc):
+ raise CCompilerError(
+ 'Cygwin gcc cannot be used with --compiler=mingw32')
+
+ self.set_executables(compiler='%s -O -Wall' % self.cc,
+ compiler_so='%s -mdll -O -Wall' % self.cc,
+ compiler_cxx='%s -O -Wall' % self.cxx,
+ linker_exe='%s' % self.cc,
+ linker_so='%s %s'
+ % (self.linker_dll, shared_option))
+
+ # Maybe we should also append -mthreads, but then the finished
+ # dlls need another dll (mingwm10.dll see Mingw32 docs)
+ # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+ # no additional libraries needed
+ self.dll_libraries=[]
+
+ # Include the appropriate MSVC runtime library if Python was built
+ # with MSVC 7.0 or later.
+ self.dll_libraries = get_msvcr()
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using an unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+ """Check if the current Python installation appears amenable to building
+ extensions with GCC.
+
+ Returns a tuple (status, details), where 'status' is one of the following
+ constants:
+
+ - CONFIG_H_OK: all is well, go ahead and compile
+ - CONFIG_H_NOTOK: doesn't look good
+ - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
+
+ 'details' is a human-readable string explaining the situation.
+
+ Note there are two ways to conclude "OK": either 'sys.version' contains
+ the string "GCC" (implying that this Python was built with GCC), or the
+ installed "pyconfig.h" contains the string "__GNUC__".
+ """
+
+ # XXX since this function also checks sys.version, it's not strictly a
+ # "pyconfig.h" check -- should probably be renamed...
+
+ from distutils import sysconfig
+
+ # if sys.version contains GCC then python was compiled with GCC, and the
+ # pyconfig.h file should be OK
+ if "GCC" in sys.version:
+ return CONFIG_H_OK, "sys.version mentions 'GCC'"
+
+ # Clang would also work
+ if "Clang" in sys.version:
+ return CONFIG_H_OK, "sys.version mentions 'Clang'"
+
+ # let's see if __GNUC__ is mentioned in python.h
+ fn = sysconfig.get_config_h_filename()
+ try:
+ config_h = open(fn)
+ try:
+ if "__GNUC__" in config_h.read():
+ return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
+ else:
+ return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
+ finally:
+ config_h.close()
+ except OSError as exc:
+ return (CONFIG_H_UNCERTAIN,
+ "couldn't read '%s': %s" % (fn, exc.strerror))
+
+def is_cygwincc(cc):
+ '''Try to determine if the compiler that would be used is from cygwin.'''
+ out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
+ return out_string.strip().endswith(b'cygwin')
+
+
+get_versions = None
+"""
+A stand-in for the previous get_versions() function to prevent failures
+when monkeypatched. See pypa/setuptools#2969.
+"""
diff --git a/setuptools/_distutils/debug.py b/setuptools/_distutils/debug.py
new file mode 100644
index 0000000..daf1660
--- /dev/null
+++ b/setuptools/_distutils/debug.py
@@ -0,0 +1,5 @@
+import os
+
+# If DISTUTILS_DEBUG is anything other than the empty string, we run in
+# debug mode.
+DEBUG = os.environ.get('DISTUTILS_DEBUG')
diff --git a/setuptools/_distutils/dep_util.py b/setuptools/_distutils/dep_util.py
new file mode 100644
index 0000000..d74f5e4
--- /dev/null
+++ b/setuptools/_distutils/dep_util.py
@@ -0,0 +1,92 @@
+"""distutils.dep_util
+
+Utility functions for simple, timestamp-based dependency of files
+and groups of files; also, function based entirely on such
+timestamp dependency analysis."""
+
+import os
+from distutils.errors import DistutilsFileError
+
+
+def newer (source, target):
+ """Return true if 'source' exists and is more recently modified than
+ 'target', or if 'source' exists and 'target' doesn't. Return false if
+ both exist and 'target' is the same age or younger than 'source'.
+ Raise DistutilsFileError if 'source' does not exist.
+ """
+ if not os.path.exists(source):
+ raise DistutilsFileError("file '%s' does not exist" %
+ os.path.abspath(source))
+ if not os.path.exists(target):
+ return 1
+
+ from stat import ST_MTIME
+ mtime1 = os.stat(source)[ST_MTIME]
+ mtime2 = os.stat(target)[ST_MTIME]
+
+ return mtime1 > mtime2
+
+# newer ()
+
+
+def newer_pairwise (sources, targets):
+ """Walk two filename lists in parallel, testing if each source is newer
+ than its corresponding target. Return a pair of lists (sources,
+ targets) where source is newer than target, according to the semantics
+ of 'newer()'.
+ """
+ if len(sources) != len(targets):
+ raise ValueError("'sources' and 'targets' must be same length")
+
+ # build a pair of lists (sources, targets) where source is newer
+ n_sources = []
+ n_targets = []
+ for i in range(len(sources)):
+ if newer(sources[i], targets[i]):
+ n_sources.append(sources[i])
+ n_targets.append(targets[i])
+
+ return (n_sources, n_targets)
+
+# newer_pairwise ()
+
+
+def newer_group (sources, target, missing='error'):
+ """Return true if 'target' is out-of-date with respect to any file
+ listed in 'sources'. In other words, if 'target' exists and is newer
+ than every file in 'sources', return false; otherwise return true.
+ 'missing' controls what we do when a source file is missing; the
+ default ("error") is to blow up with an OSError from inside 'stat()';
+ if it is "ignore", we silently drop any missing source files; if it is
+ "newer", any missing source files make us assume that 'target' is
+ out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+ carry out commands that wouldn't work because inputs are missing, but
+ that doesn't matter because you're not actually going to run the
+ commands).
+ """
+ # If the target doesn't even exist, then it's definitely out-of-date.
+ if not os.path.exists(target):
+ return 1
+
+ # Otherwise we have to find out the hard way: if *any* source file
+ # is more recent than 'target', then 'target' is out-of-date and
+ # we can immediately return true. If we fall through to the end
+ # of the loop, then 'target' is up-to-date and we return false.
+ from stat import ST_MTIME
+ target_mtime = os.stat(target)[ST_MTIME]
+ for source in sources:
+ if not os.path.exists(source):
+ if missing == 'error': # blow up when we stat() the file
+ pass
+ elif missing == 'ignore': # missing source dropped from
+ continue # target's dependency list
+ elif missing == 'newer': # missing source means target is
+ return 1 # out-of-date
+
+ source_mtime = os.stat(source)[ST_MTIME]
+ if source_mtime > target_mtime:
+ return 1
+ else:
+ return 0
+
+# newer_group ()
diff --git a/setuptools/_distutils/dir_util.py b/setuptools/_distutils/dir_util.py
new file mode 100644
index 0000000..d5cd8e3
--- /dev/null
+++ b/setuptools/_distutils/dir_util.py
@@ -0,0 +1,210 @@
+"""distutils.dir_util
+
+Utility functions for manipulating directories and directory trees."""
+
+import os
+import errno
+from distutils.errors import DistutilsFileError, DistutilsInternalError
+from distutils import log
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = {}
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def mkpath(name, mode=0o777, verbose=1, dry_run=0):
+ """Create a directory and any missing ancestor directories.
+
+ If the directory already exists (or if 'name' is the empty string, which
+ means the current directory, which of course exists), then do nothing.
+ Raise DistutilsFileError if unable to create some directory along the way
+ (eg. some sub-path exists, but is a file rather than a directory).
+ If 'verbose' is true, print a one-line summary of each mkdir to stdout.
+ Return the list of directories actually created.
+ """
+
+ global _path_created
+
+ # Detect a common bug -- name is None
+ if not isinstance(name, str):
+ raise DistutilsInternalError(
+ "mkpath: 'name' must be a string (got %r)" % (name,))
+
+ # XXX what's the better way to handle verbosity? print as we create
+ # each directory in the path (the current behaviour), or only announce
+ # the creation of the whole path? (quite easy to do the latter since
+ # we're not using a recursive algorithm)
+
+ name = os.path.normpath(name)
+ created_dirs = []
+ if os.path.isdir(name) or name == '':
+ return created_dirs
+ if _path_created.get(os.path.abspath(name)):
+ return created_dirs
+
+ (head, tail) = os.path.split(name)
+ tails = [tail] # stack of lone dirs to create
+
+ while head and tail and not os.path.isdir(head):
+ (head, tail) = os.path.split(head)
+ tails.insert(0, tail) # push next higher dir onto stack
+
+ # now 'head' contains the deepest directory that already exists
+ # (that is, the child of 'head' in 'name' is the highest directory
+ # that does *not* exist)
+ for d in tails:
+ #print "head = %s, d = %s: " % (head, d),
+ head = os.path.join(head, d)
+ abs_head = os.path.abspath(head)
+
+ if _path_created.get(abs_head):
+ continue
+
+ if verbose >= 1:
+ log.info("creating %s", head)
+
+ if not dry_run:
+ try:
+ os.mkdir(head, mode)
+ except OSError as exc:
+ if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
+ raise DistutilsFileError(
+ "could not create '%s': %s" % (head, exc.args[-1]))
+ created_dirs.append(head)
+
+ _path_created[abs_head] = 1
+ return created_dirs
+
+def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
+ """Create all the empty directories under 'base_dir' needed to put 'files'
+ there.
+
+ 'base_dir' is just the name of a directory which doesn't necessarily
+ exist yet; 'files' is a list of filenames to be interpreted relative to
+ 'base_dir'. 'base_dir' + the directory portion of every file in 'files'
+ will be created if it doesn't already exist. 'mode', 'verbose' and
+ 'dry_run' flags are as for 'mkpath()'.
+ """
+ # First get the list of directories to create
+ need_dir = set()
+ for file in files:
+ need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+
+ # Now create them
+ for dir in sorted(need_dir):
+ mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
+
+def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
+ preserve_symlinks=0, update=0, verbose=1, dry_run=0):
+ """Copy an entire directory tree 'src' to a new location 'dst'.
+
+ Both 'src' and 'dst' must be directory names. If 'src' is not a
+ directory, raise DistutilsFileError. If 'dst' does not exist, it is
+ created with 'mkpath()'. The end result of the copy is that every
+ file in 'src' is copied to 'dst', and directories under 'src' are
+ recursively copied to 'dst'. Return the list of files that were
+ copied or might have been copied, using their output name. The
+ return value is unaffected by 'update' or 'dry_run': it is simply
+ the list of all files under 'src', with the names changed to be
+ under 'dst'.
+
+ 'preserve_mode' and 'preserve_times' are the same as for
+ 'copy_file'; note that they only apply to regular files, not to
+ directories. If 'preserve_symlinks' is true, symlinks will be
+ copied as symlinks (on platforms that support them!); otherwise
+ (the default), the destination of the symlink will be copied.
+ 'update' and 'verbose' are the same as for 'copy_file'.
+ """
+ from distutils.file_util import copy_file
+
+ if not dry_run and not os.path.isdir(src):
+ raise DistutilsFileError(
+ "cannot copy tree '%s': not a directory" % src)
+ try:
+ names = os.listdir(src)
+ except OSError as e:
+ if dry_run:
+ names = []
+ else:
+ raise DistutilsFileError(
+ "error listing files in '%s': %s" % (src, e.strerror))
+
+ if not dry_run:
+ mkpath(dst, verbose=verbose)
+
+ outputs = []
+
+ for n in names:
+ src_name = os.path.join(src, n)
+ dst_name = os.path.join(dst, n)
+
+ if n.startswith('.nfs'):
+ # skip NFS rename files
+ continue
+
+ if preserve_symlinks and os.path.islink(src_name):
+ link_dest = os.readlink(src_name)
+ if verbose >= 1:
+ log.info("linking %s -> %s", dst_name, link_dest)
+ if not dry_run:
+ os.symlink(link_dest, dst_name)
+ outputs.append(dst_name)
+
+ elif os.path.isdir(src_name):
+ outputs.extend(
+ copy_tree(src_name, dst_name, preserve_mode,
+ preserve_times, preserve_symlinks, update,
+ verbose=verbose, dry_run=dry_run))
+ else:
+ copy_file(src_name, dst_name, preserve_mode,
+ preserve_times, update, verbose=verbose,
+ dry_run=dry_run)
+ outputs.append(dst_name)
+
+ return outputs
+
+def _build_cmdtuple(path, cmdtuples):
+ """Helper for remove_tree()."""
+ for f in os.listdir(path):
+ real_f = os.path.join(path,f)
+ if os.path.isdir(real_f) and not os.path.islink(real_f):
+ _build_cmdtuple(real_f, cmdtuples)
+ else:
+ cmdtuples.append((os.remove, real_f))
+ cmdtuples.append((os.rmdir, path))
+
+def remove_tree(directory, verbose=1, dry_run=0):
+ """Recursively remove an entire directory tree.
+
+ Any errors are ignored (apart from being reported to stdout if 'verbose'
+ is true).
+ """
+ global _path_created
+
+ if verbose >= 1:
+ log.info("removing '%s' (and everything under it)", directory)
+ if dry_run:
+ return
+ cmdtuples = []
+ _build_cmdtuple(directory, cmdtuples)
+ for cmd in cmdtuples:
+ try:
+ cmd[0](cmd[1])
+ # remove dir from cache if it's already there
+ abspath = os.path.abspath(cmd[1])
+ if abspath in _path_created:
+ del _path_created[abspath]
+ except OSError as exc:
+ log.warn("error removing %s: %s", directory, exc)
+
+def ensure_relative(path):
+ """Take the full path 'path', and make it a relative path.
+
+ This is useful to make 'path' the second argument to os.path.join().
+ """
+ drive, path = os.path.splitdrive(path)
+ if path[0:1] == os.sep:
+ path = drive + path[1:]
+ return path
diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py
new file mode 100644
index 0000000..37db4d6
--- /dev/null
+++ b/setuptools/_distutils/dist.py
@@ -0,0 +1,1257 @@
+"""distutils.dist
+
+Provides the Distribution class, which represents the module distribution
+being built/installed/distributed.
+"""
+
+import sys
+import os
+import re
+from email import message_from_file
+
+try:
+ import warnings
+except ImportError:
+ warnings = None
+
+from distutils.errors import *
+from distutils.fancy_getopt import FancyGetopt, translate_longopt
+from distutils.util import check_environ, strtobool, rfc822_escape
+from distutils import log
+from distutils.debug import DEBUG
+
+# Regex to define acceptable Distutils command names. This is not *quite*
+# the same as a Python NAME -- I don't allow leading underscores. The fact
+# that they're very similar is no coincidence; the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+
+def _ensure_list(value, fieldname):
+ if isinstance(value, str):
+ # a string containing comma separated values is okay. It will
+ # be converted to a list by Distribution.finalize_options().
+ pass
+ elif not isinstance(value, list):
+ # passing a tuple or an iterator perhaps, warn and convert
+ typename = type(value).__name__
+ msg = "Warning: '{fieldname}' should be a list, got type '{typename}'"
+ msg = msg.format(**locals())
+ log.log(log.WARN, msg)
+ value = list(value)
+ return value
+
+
+class Distribution:
+ """The core of the Distutils. Most of the work hiding behind 'setup'
+ is really done within a Distribution instance, which farms the work out
+ to the Distutils commands specified on the command line.
+
+ Setup scripts will almost never instantiate Distribution directly,
+ unless the 'setup()' function is totally inadequate to their needs.
+ However, it is conceivable that a setup script might wish to subclass
+ Distribution for some specialized purpose, and then pass the subclass
+ to 'setup()' as the 'distclass' keyword argument. If so, it is
+ necessary to respect the expectations that 'setup' has of Distribution.
+ See the code for 'setup()', in core.py, for details.
+ """
+
+ # 'global_options' describes the command-line options that may be
+ # supplied to the setup script prior to any actual commands.
+ # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
+ # these global options. This list should be kept to a bare minimum,
+ # since every global option is also valid as a command option -- and we
+ # don't want to pollute the commands with too many options that they
+ # have minimal control over.
+ # The fourth entry for verbose means that it can be repeated.
+ global_options = [
+ ('verbose', 'v', "run verbosely (default)", 1),
+ ('quiet', 'q', "run quietly (turns verbosity off)"),
+ ('dry-run', 'n', "don't actually do anything"),
+ ('help', 'h', "show detailed help message"),
+ ('no-user-cfg', None,
+ 'ignore pydistutils.cfg in your home directory'),
+ ]
+
+ # 'common_usage' is a short (2-3 line) string describing the common
+ # usage of the setup script.
+ common_usage = """\
+Common commands: (see '--help-commands' for more)
+
+ setup.py build will build the package underneath 'build/'
+ setup.py install will install the package
+"""
+
+ # options that are not propagated to the commands
+ display_options = [
+ ('help-commands', None,
+ "list all available commands"),
+ ('name', None,
+ "print package name"),
+ ('version', 'V',
+ "print package version"),
+ ('fullname', None,
+ "print <package name>-<version>"),
+ ('author', None,
+ "print the author's name"),
+ ('author-email', None,
+ "print the author's email address"),
+ ('maintainer', None,
+ "print the maintainer's name"),
+ ('maintainer-email', None,
+ "print the maintainer's email address"),
+ ('contact', None,
+ "print the maintainer's name if known, else the author's"),
+ ('contact-email', None,
+ "print the maintainer's email address if known, else the author's"),
+ ('url', None,
+ "print the URL for this package"),
+ ('license', None,
+ "print the license of the package"),
+ ('licence', None,
+ "alias for --license"),
+ ('description', None,
+ "print the package description"),
+ ('long-description', None,
+ "print the long package description"),
+ ('platforms', None,
+ "print the list of platforms"),
+ ('classifiers', None,
+ "print the list of classifiers"),
+ ('keywords', None,
+ "print the list of keywords"),
+ ('provides', None,
+ "print the list of packages/modules provided"),
+ ('requires', None,
+ "print the list of packages/modules required"),
+ ('obsoletes', None,
+ "print the list of packages/modules made obsolete")
+ ]
+ display_option_names = [translate_longopt(x[0]) for x in display_options]
+
+ # negative options are options that exclude other options
+ negative_opt = {'quiet': 'verbose'}
+
+ # -- Creation/initialization methods -------------------------------
+
+ def __init__(self, attrs=None):
+ """Construct a new Distribution instance: initialize all the
+ attributes of a Distribution, and then use 'attrs' (a dictionary
+ mapping attribute names to values) to assign some of those
+ attributes their "real" values. (Any attributes not mentioned in
+ 'attrs' will be assigned to some null value: 0, None, an empty list
+ or dictionary, etc.) Most importantly, initialize the
+ 'command_obj' attribute to the empty dictionary; this will be
+ filled in with real command objects by 'parse_command_line()'.
+ """
+
+ # Default values for our command-line options
+ self.verbose = 1
+ self.dry_run = 0
+ self.help = 0
+ for attr in self.display_option_names:
+ setattr(self, attr, 0)
+
+ # Store the distribution meta-data (name, version, author, and so
+ # forth) in a separate object -- we're getting to have enough
+ # information here (and enough command-line options) that it's
+ # worth it. Also delegate 'get_XXX()' methods to the 'metadata'
+ # object in a sneaky and underhanded (but efficient!) way.
+ self.metadata = DistributionMetadata()
+ for basename in self.metadata._METHOD_BASENAMES:
+ method_name = "get_" + basename
+ setattr(self, method_name, getattr(self.metadata, method_name))
+
+ # 'cmdclass' maps command names to class objects, so we
+ # can 1) quickly figure out which class to instantiate when
+ # we need to create a new command object, and 2) have a way
+ # for the setup script to override command classes
+ self.cmdclass = {}
+
+ # 'command_packages' is a list of packages in which commands
+ # are searched for. The factory for command 'foo' is expected
+ # to be named 'foo' in the module 'foo' in one of the packages
+ # named here. This list is searched from the left; an error
+ # is raised if no named package provides the command being
+ # searched for. (Always access using get_command_packages().)
+ self.command_packages = None
+
+ # 'script_name' and 'script_args' are usually set to sys.argv[0]
+ # and sys.argv[1:], but they can be overridden when the caller is
+ # not necessarily a setup script run from the command-line.
+ self.script_name = None
+ self.script_args = None
+
+ # 'command_options' is where we store command options between
+ # parsing them (from config files, the command-line, etc.) and when
+ # they are actually needed -- ie. when the command in question is
+ # instantiated. It is a dictionary of dictionaries of 2-tuples:
+ # command_options = { command_name : { option : (source, value) } }
+ self.command_options = {}
+
+ # 'dist_files' is the list of (command, pyversion, file) that
+ # have been created by any dist commands run so far. This is
+ # filled regardless of whether the run is dry or not. pyversion
+ # gives sysconfig.get_python_version() if the dist file is
+ # specific to a Python version, 'any' if it is good for all
+ # Python versions on the target platform, and '' for a source
+ # file. pyversion should not be used to specify minimum or
+ # maximum required Python versions; use the metainfo for that
+ # instead.
+ self.dist_files = []
+
+ # These options are really the business of various commands, rather
+ # than of the Distribution itself. We provide aliases for them in
+ # Distribution as a convenience to the developer.
+ self.packages = None
+ self.package_data = {}
+ self.package_dir = None
+ self.py_modules = None
+ self.libraries = None
+ self.headers = None
+ self.ext_modules = None
+ self.ext_package = None
+ self.include_dirs = None
+ self.extra_path = None
+ self.scripts = None
+ self.data_files = None
+ self.password = ''
+
+ # And now initialize bookkeeping stuff that can't be supplied by
+ # the caller at all. 'command_obj' maps command names to
+ # Command instances -- that's how we enforce that every command
+ # class is a singleton.
+ self.command_obj = {}
+
+ # 'have_run' maps command names to boolean values; it keeps track
+ # of whether we have actually run a particular command, to make it
+ # cheap to "run" a command whenever we think we might need to -- if
+ # it's already been done, no need for expensive filesystem
+ # operations, we just check the 'have_run' dictionary and carry on.
+ # It's only safe to query 'have_run' for a command class that has
+ # been instantiated -- a false value will be inserted when the
+ # command object is created, and replaced with a true value when
+ # the command is successfully run. Thus it's probably best to use
+ # '.get()' rather than a straight lookup.
+ self.have_run = {}
+
+ # Now we'll use the attrs dictionary (ultimately, keyword args from
+ # the setup script) to possibly override any or all of these
+ # distribution options.
+
+ if attrs:
+ # Pull out the set of command options and work on them
+ # specifically. Note that this order guarantees that aliased
+ # command options will override any supplied redundantly
+ # through the general options dictionary.
+ options = attrs.get('options')
+ if options is not None:
+ del attrs['options']
+ for (command, cmd_options) in options.items():
+ opt_dict = self.get_option_dict(command)
+ for (opt, val) in cmd_options.items():
+ opt_dict[opt] = ("setup script", val)
+
+ if 'licence' in attrs:
+ attrs['license'] = attrs['licence']
+ del attrs['licence']
+ msg = "'licence' distribution option is deprecated; use 'license'"
+ if warnings is not None:
+ warnings.warn(msg)
+ else:
+ sys.stderr.write(msg + "\n")
+
+ # Now work on the rest of the attributes. Any attribute that's
+ # not already defined is invalid!
+ for (key, val) in attrs.items():
+ if hasattr(self.metadata, "set_" + key):
+ getattr(self.metadata, "set_" + key)(val)
+ elif hasattr(self.metadata, key):
+ setattr(self.metadata, key, val)
+ elif hasattr(self, key):
+ setattr(self, key, val)
+ else:
+ msg = "Unknown distribution option: %s" % repr(key)
+ warnings.warn(msg)
+
+ # no-user-cfg is handled before other command line args
+ # because other args override the config files, and this
+ # one is needed before we can load the config files.
+ # If attrs['script_args'] wasn't passed, assume false.
+ #
+ # This also make sure we just look at the global options
+ self.want_user_cfg = True
+
+ if self.script_args is not None:
+ for arg in self.script_args:
+ if not arg.startswith('-'):
+ break
+ if arg == '--no-user-cfg':
+ self.want_user_cfg = False
+ break
+
+ self.finalize_options()
+
+ def get_option_dict(self, command):
+ """Get the option dictionary for a given command. If that
+ command's option dictionary hasn't been created yet, then create it
+ and return the new dictionary; otherwise, return the existing
+ option dictionary.
+ """
+ dict = self.command_options.get(command)
+ if dict is None:
+ dict = self.command_options[command] = {}
+ return dict
+
+ def dump_option_dicts(self, header=None, commands=None, indent=""):
+ from pprint import pformat
+
+ if commands is None: # dump all command option dicts
+ commands = sorted(self.command_options.keys())
+
+ if header is not None:
+ self.announce(indent + header)
+ indent = indent + " "
+
+ if not commands:
+ self.announce(indent + "no commands known yet")
+ return
+
+ for cmd_name in commands:
+ opt_dict = self.command_options.get(cmd_name)
+ if opt_dict is None:
+ self.announce(indent +
+ "no option dict for '%s' command" % cmd_name)
+ else:
+ self.announce(indent +
+ "option dict for '%s' command:" % cmd_name)
+ out = pformat(opt_dict)
+ for line in out.split('\n'):
+ self.announce(indent + " " + line)
+
+ # -- Config file finding/parsing methods ---------------------------
+
+ def find_config_files(self):
+ """Find as many configuration files as should be processed for this
+ platform, and return a list of filenames in the order in which they
+ should be parsed. The filenames returned are guaranteed to exist
+ (modulo nasty race conditions).
+
+ There are three possible config files: distutils.cfg in the
+ Distutils installation directory (ie. where the top-level
+ Distutils __inst__.py file lives), a file in the user's home
+ directory named .pydistutils.cfg on Unix and pydistutils.cfg
+ on Windows/Mac; and setup.cfg in the current directory.
+
+ The file in the user's home directory can be disabled with the
+ --no-user-cfg option.
+ """
+ files = []
+ check_environ()
+
+ # Where to look for the system-wide Distutils config file
+ sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
+
+ # Look for the system config file
+ sys_file = os.path.join(sys_dir, "distutils.cfg")
+ if os.path.isfile(sys_file):
+ files.append(sys_file)
+
+ # What to call the per-user config file
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+
+ # And look for the user config file
+ if self.want_user_cfg:
+ user_file = os.path.join(os.path.expanduser('~'), user_filename)
+ if os.path.isfile(user_file):
+ files.append(user_file)
+
+ # All platforms support local setup.cfg
+ local_file = "setup.cfg"
+ if os.path.isfile(local_file):
+ files.append(local_file)
+
+ if DEBUG:
+ self.announce("using config files: %s" % ', '.join(files))
+
+ return files
+
+ def parse_config_files(self, filenames=None):
+ from configparser import ConfigParser
+
+ # Ignore install directory options if we have a venv
+ if sys.prefix != sys.base_prefix:
+ ignore_options = [
+ 'install-base', 'install-platbase', 'install-lib',
+ 'install-platlib', 'install-purelib', 'install-headers',
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
+ 'home', 'user', 'root']
+ else:
+ ignore_options = []
+
+ ignore_options = frozenset(ignore_options)
+
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ if DEBUG:
+ self.announce("Distribution.parse_config_files():")
+
+ parser = ConfigParser()
+ for filename in filenames:
+ if DEBUG:
+ self.announce(" reading %s" % filename)
+ parser.read(filename)
+ for section in parser.sections():
+ options = parser.options(section)
+ opt_dict = self.get_option_dict(section)
+
+ for opt in options:
+ if opt != '__name__' and opt not in ignore_options:
+ val = parser.get(section,opt)
+ opt = opt.replace('-', '_')
+ opt_dict[opt] = (filename, val)
+
+ # Make the ConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+
+ if 'global' in self.command_options:
+ for (opt, (src, val)) in self.command_options['global'].items():
+ alias = self.negative_opt.get(opt)
+ try:
+ if alias:
+ setattr(self, alias, not strtobool(val))
+ elif opt in ('verbose', 'dry_run'): # ugh!
+ setattr(self, opt, strtobool(val))
+ else:
+ setattr(self, opt, val)
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+ # -- Command-line parsing methods ----------------------------------
+
+ def parse_command_line(self):
+ """Parse the setup script's command line, taken from the
+ 'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+ -- see 'setup()' in core.py). This list is first processed for
+ "global options" -- options that set attributes of the Distribution
+ instance. Then, it is alternately scanned for Distutils commands
+ and options for that command. Each new command terminates the
+ options for the previous command. The allowed options for a
+ command are determined by the 'user_options' attribute of the
+ command class -- thus, we have to be able to load command classes
+ in order to parse the command line. Any error in that 'options'
+ attribute raises DistutilsGetoptError; any error on the
+ command-line raises DistutilsArgError. If no Distutils commands
+ were found on the command line, raises DistutilsArgError. Return
+ true if command-line was successfully parsed and we should carry
+ on with executing commands; false if no errors but we shouldn't
+ execute commands (currently, this only happens if user asks for
+ help).
+ """
+ #
+ # We now have enough information to show the Macintosh dialog
+ # that allows the user to interactively specify the "command line".
+ #
+ toplevel_options = self._get_toplevel_options()
+
+ # We have to parse the command line a bit at a time -- global
+ # options, then the first command, then its options, and so on --
+ # because each command will be handled by a different class, and
+ # the options that are valid for a particular class aren't known
+ # until we have loaded the command class, which doesn't happen
+ # until we know what the command is.
+
+ self.commands = []
+ parser = FancyGetopt(toplevel_options + self.display_options)
+ parser.set_negative_aliases(self.negative_opt)
+ parser.set_aliases({'licence': 'license'})
+ args = parser.getopt(args=self.script_args, object=self)
+ option_order = parser.get_option_order()
+ log.set_verbosity(self.verbose)
+
+ # for display options we return immediately
+ if self.handle_display_options(option_order):
+ return
+ while args:
+ args = self._parse_command_opts(parser, args)
+ if args is None: # user asked for help (and got it)
+ return
+
+ # Handle the cases of --help as a "global" option, ie.
+ # "setup.py --help" and "setup.py --help command ...". For the
+ # former, we show global options (--verbose, --dry-run, etc.)
+ # and display-only options (--name, --version, etc.); for the
+ # latter, we omit the display-only options and show help for
+ # each command listed on the command line.
+ if self.help:
+ self._show_help(parser,
+ display_options=len(self.commands) == 0,
+ commands=self.commands)
+ return
+
+ # Oops, no commands found -- an end-user error
+ if not self.commands:
+ raise DistutilsArgError("no commands supplied")
+
+ # All is well: return true
+ return True
+
+ def _get_toplevel_options(self):
+ """Return the non-display options recognized at the top level.
+
+ This includes options that are recognized *only* at the top
+ level as well as options recognized for commands.
+ """
+ return self.global_options + [
+ ("command-packages=", None,
+ "list of packages that provide distutils commands"),
+ ]
+
+ def _parse_command_opts(self, parser, args):
+ """Parse the command-line options for a single command.
+ 'parser' must be a FancyGetopt instance; 'args' must be the list
+ of arguments, starting with the current command (whose options
+ we are about to parse). Returns a new version of 'args' with
+ the next command at the front of the list; will be the empty
+ list if there are no more commands on the command line. Returns
+ None if the user asked for help on this command.
+ """
+ # late import because of mutual dependence between these modules
+ from distutils.cmd import Command
+
+ # Pull the current command from the head of the command line
+ command = args[0]
+ if not command_re.match(command):
+ raise SystemExit("invalid command name '%s'" % command)
+ self.commands.append(command)
+
+ # Dig up the command class that implements this command, so we
+ # 1) know that it's a valid command, and 2) know which options
+ # it takes.
+ try:
+ cmd_class = self.get_command_class(command)
+ except DistutilsModuleError as msg:
+ raise DistutilsArgError(msg)
+
+ # Require that the command class be derived from Command -- want
+ # to be sure that the basic "command" interface is implemented.
+ if not issubclass(cmd_class, Command):
+ raise DistutilsClassError(
+ "command class %s must subclass Command" % cmd_class)
+
+ # Also make sure that the command object provides a list of its
+ # known options.
+ if not (hasattr(cmd_class, 'user_options') and
+ isinstance(cmd_class.user_options, list)):
+ msg = ("command class %s must provide "
+ "'user_options' attribute (a list of tuples)")
+ raise DistutilsClassError(msg % cmd_class)
+
+ # If the command class has a list of negative alias options,
+ # merge it in with the global negative aliases.
+ negative_opt = self.negative_opt
+ if hasattr(cmd_class, 'negative_opt'):
+ negative_opt = negative_opt.copy()
+ negative_opt.update(cmd_class.negative_opt)
+
+ # Check for help_options in command class. They have a different
+ # format (tuple of four) so we need to preprocess them here.
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_options = fix_help_options(cmd_class.help_options)
+ else:
+ help_options = []
+
+ # All commands support the global options too, just by adding
+ # in 'global_options'.
+ parser.set_option_table(self.global_options +
+ cmd_class.user_options +
+ help_options)
+ parser.set_negative_aliases(negative_opt)
+ (args, opts) = parser.getopt(args[1:])
+ if hasattr(opts, 'help') and opts.help:
+ self._show_help(parser, display_options=0, commands=[cmd_class])
+ return
+
+ if (hasattr(cmd_class, 'help_options') and
+ isinstance(cmd_class.help_options, list)):
+ help_option_found=0
+ for (help_option, short, desc, func) in cmd_class.help_options:
+ if hasattr(opts, parser.get_attr_name(help_option)):
+ help_option_found=1
+ if callable(func):
+ func()
+ else:
+ raise DistutilsClassError(
+ "invalid help function %r for help option '%s': "
+ "must be a callable object (function, etc.)"
+ % (func, help_option))
+
+ if help_option_found:
+ return
+
+ # Put the options from the command-line into their official
+ # holding pen, the 'command_options' dictionary.
+ opt_dict = self.get_option_dict(command)
+ for (name, value) in vars(opts).items():
+ opt_dict[name] = ("command line", value)
+
+ return args
+
+ def finalize_options(self):
+ """Set final values for all the options on the Distribution
+ instance, analogous to the .finalize_options() method of Command
+ objects.
+ """
+ for attr in ('keywords', 'platforms'):
+ value = getattr(self.metadata, attr)
+ if value is None:
+ continue
+ if isinstance(value, str):
+ value = [elm.strip() for elm in value.split(',')]
+ setattr(self.metadata, attr, value)
+
+ def _show_help(self, parser, global_options=1, display_options=1,
+ commands=[]):
+ """Show help for the setup script command-line in the form of
+ several lists of command-line options. 'parser' should be a
+ FancyGetopt instance; do not expect it to be returned in the
+ same state, as its option table will be reset to make it
+ generate the correct help text.
+
+ If 'global_options' is true, lists the global options:
+ --verbose, --dry-run, etc. If 'display_options' is true, lists
+ the "display-only" options: --name, --version, etc. Finally,
+ lists per-command help for every command name or command class
+ in 'commands'.
+ """
+ # late import because of mutual dependence between these modules
+ from distutils.core import gen_usage
+ from distutils.cmd import Command
+
+ if global_options:
+ if display_options:
+ options = self._get_toplevel_options()
+ else:
+ options = self.global_options
+ parser.set_option_table(options)
+ parser.print_help(self.common_usage + "\nGlobal options:")
+ print('')
+
+ if display_options:
+ parser.set_option_table(self.display_options)
+ parser.print_help(
+ "Information display options (just display " +
+ "information, ignore any commands)")
+ print('')
+
+ for command in self.commands:
+ if isinstance(command, type) and issubclass(command, Command):
+ klass = command
+ else:
+ klass = self.get_command_class(command)
+ if (hasattr(klass, 'help_options') and
+ isinstance(klass.help_options, list)):
+ parser.set_option_table(klass.user_options +
+ fix_help_options(klass.help_options))
+ else:
+ parser.set_option_table(klass.user_options)
+ parser.print_help("Options for '%s' command:" % klass.__name__)
+ print('')
+
+ print(gen_usage(self.script_name))
+
+ def handle_display_options(self, option_order):
+ """If there were any non-global "display-only" options
+ (--help-commands or the metadata display options) on the command
+ line, display the requested info and return true; else return
+ false.
+ """
+ from distutils.core import gen_usage
+
+ # User just wants a list of commands -- we'll print it out and stop
+ # processing now (ie. if they ran "setup --help-commands foo bar",
+ # we ignore "foo bar").
+ if self.help_commands:
+ self.print_commands()
+ print('')
+ print(gen_usage(self.script_name))
+ return 1
+
+ # If user supplied any of the "display metadata" options, then
+ # display that metadata in the order in which the user supplied the
+ # metadata options.
+ any_display_options = 0
+ is_display_option = {}
+ for option in self.display_options:
+ is_display_option[option[0]] = 1
+
+ for (opt, val) in option_order:
+ if val and is_display_option.get(opt):
+ opt = translate_longopt(opt)
+ value = getattr(self.metadata, "get_"+opt)()
+ if opt in ['keywords', 'platforms']:
+ print(','.join(value))
+ elif opt in ('classifiers', 'provides', 'requires',
+ 'obsoletes'):
+ print('\n'.join(value))
+ else:
+ print(value)
+ any_display_options = 1
+
+ return any_display_options
+
+ def print_command_list(self, commands, header, max_length):
+ """Print a subset of the list of all commands -- used by
+ 'print_commands()'.
+ """
+ print(header + ":")
+
+ for cmd in commands:
+ klass = self.cmdclass.get(cmd)
+ if not klass:
+ klass = self.get_command_class(cmd)
+ try:
+ description = klass.description
+ except AttributeError:
+ description = "(no description available)"
+
+ print(" %-*s %s" % (max_length, cmd, description))
+
+ def print_commands(self):
+ """Print out a help message listing all available commands with a
+ description of each. The list is divided into "standard commands"
+ (listed in distutils.command.__all__) and "extra commands"
+ (mentioned in self.cmdclass, but not a standard command). The
+ descriptions come from the command class attribute
+ 'description'.
+ """
+ import distutils.command
+ std_commands = distutils.command.__all__
+ is_std = {}
+ for cmd in std_commands:
+ is_std[cmd] = 1
+
+ extra_commands = []
+ for cmd in self.cmdclass.keys():
+ if not is_std.get(cmd):
+ extra_commands.append(cmd)
+
+ max_length = 0
+ for cmd in (std_commands + extra_commands):
+ if len(cmd) > max_length:
+ max_length = len(cmd)
+
+ self.print_command_list(std_commands,
+ "Standard commands",
+ max_length)
+ if extra_commands:
+ print()
+ self.print_command_list(extra_commands,
+ "Extra commands",
+ max_length)
+
+ def get_command_list(self):
+ """Get a list of (command, description) tuples.
+ The list is divided into "standard commands" (listed in
+ distutils.command.__all__) and "extra commands" (mentioned in
+ self.cmdclass, but not a standard command). The descriptions come
+ from the command class attribute 'description'.
+ """
+ # Currently this is only used on Mac OS, for the Mac-only GUI
+ # Distutils interface (by Jack Jansen)
+ import distutils.command
+ std_commands = distutils.command.__all__
+ is_std = {}
+ for cmd in std_commands:
+ is_std[cmd] = 1
+
+ extra_commands = []
+ for cmd in self.cmdclass.keys():
+ if not is_std.get(cmd):
+ extra_commands.append(cmd)
+
+ rv = []
+ for cmd in (std_commands + extra_commands):
+ klass = self.cmdclass.get(cmd)
+ if not klass:
+ klass = self.get_command_class(cmd)
+ try:
+ description = klass.description
+ except AttributeError:
+ description = "(no description available)"
+ rv.append((cmd, description))
+ return rv
+
+ # -- Command class/object methods ----------------------------------
+
+ def get_command_packages(self):
+ """Return a list of packages from which commands are loaded."""
+ pkgs = self.command_packages
+ if not isinstance(pkgs, list):
+ if pkgs is None:
+ pkgs = ''
+ pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
+ if "distutils.command" not in pkgs:
+ pkgs.insert(0, "distutils.command")
+ self.command_packages = pkgs
+ return pkgs
+
+ def get_command_class(self, command):
+ """Return the class that implements the Distutils command named by
+ 'command'. First we check the 'cmdclass' dictionary; if the
+ command is mentioned there, we fetch the class object from the
+ dictionary and return it. Otherwise we load the command module
+ ("distutils.command." + command) and fetch the command class from
+ the module. The loaded class is also stored in 'cmdclass'
+ to speed future calls to 'get_command_class()'.
+
+ Raises DistutilsModuleError if the expected module could not be
+ found, or if that module does not define the expected class.
+ """
+ klass = self.cmdclass.get(command)
+ if klass:
+ return klass
+
+ for pkgname in self.get_command_packages():
+ module_name = "%s.%s" % (pkgname, command)
+ klass_name = command
+
+ try:
+ __import__(module_name)
+ module = sys.modules[module_name]
+ except ImportError:
+ continue
+
+ try:
+ klass = getattr(module, klass_name)
+ except AttributeError:
+ raise DistutilsModuleError(
+ "invalid command '%s' (no class '%s' in module '%s')"
+ % (command, klass_name, module_name))
+
+ self.cmdclass[command] = klass
+ return klass
+
+ raise DistutilsModuleError("invalid command '%s'" % command)
+
+ def get_command_obj(self, command, create=1):
+ """Return the command object for 'command'. Normally this object
+ is cached on a previous call to 'get_command_obj()'; if no command
+ object for 'command' is in the cache, then we either create and
+ return it (if 'create' is true) or return None.
+ """
+ cmd_obj = self.command_obj.get(command)
+ if not cmd_obj and create:
+ if DEBUG:
+ self.announce("Distribution.get_command_obj(): "
+ "creating '%s' command object" % command)
+
+ klass = self.get_command_class(command)
+ cmd_obj = self.command_obj[command] = klass(self)
+ self.have_run[command] = 0
+
+ # Set any options that were supplied in config files
+ # or on the command line. (NB. support for error
+ # reporting is lame here: any errors aren't reported
+ # until 'finalize_options()' is called, which means
+ # we won't report the source of the error.)
+ options = self.command_options.get(command)
+ if options:
+ self._set_command_options(cmd_obj, options)
+
+ return cmd_obj
+
+ def _set_command_options(self, command_obj, option_dict=None):
+ """Set the options for 'command_obj' from 'option_dict'. Basically
+ this means copying elements of a dictionary ('option_dict') to
+ attributes of an instance ('command').
+
+ 'command_obj' must be a Command instance. If 'option_dict' is not
+ supplied, uses the standard option dictionary for this command
+ (from 'self.command_options').
+ """
+ command_name = command_obj.get_command_name()
+ if option_dict is None:
+ option_dict = self.get_option_dict(command_name)
+
+ if DEBUG:
+ self.announce(" setting options for '%s' command:" % command_name)
+ for (option, (source, value)) in option_dict.items():
+ if DEBUG:
+ self.announce(" %s = %s (from %s)" % (option, value,
+ source))
+ try:
+ bool_opts = [translate_longopt(o)
+ for o in command_obj.boolean_options]
+ except AttributeError:
+ bool_opts = []
+ try:
+ neg_opt = command_obj.negative_opt
+ except AttributeError:
+ neg_opt = {}
+
+ try:
+ is_string = isinstance(value, str)
+ if option in neg_opt and is_string:
+ setattr(command_obj, neg_opt[option], not strtobool(value))
+ elif option in bool_opts and is_string:
+ setattr(command_obj, option, strtobool(value))
+ elif hasattr(command_obj, option):
+ setattr(command_obj, option, value)
+ else:
+ raise DistutilsOptionError(
+ "error in %s: command '%s' has no such option '%s'"
+ % (source, command_name, option))
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+ def reinitialize_command(self, command, reinit_subcommands=0):
+ """Reinitializes a command to the state it was in when first
+ returned by 'get_command_obj()': ie., initialized but not yet
+ finalized. This provides the opportunity to sneak option
+ values in programmatically, overriding or supplementing
+ user-supplied values from the config files and command line.
+ You'll have to re-finalize the command object (by calling
+ 'finalize_options()' or 'ensure_finalized()') before using it for
+ real.
+
+ 'command' should be a command name (string) or command object. If
+ 'reinit_subcommands' is true, also reinitializes the command's
+ sub-commands, as declared by the 'sub_commands' class attribute (if
+ it has one). See the "install" command for an example. Only
+ reinitializes the sub-commands that actually matter, ie. those
+ whose test predicates return true.
+
+ Returns the reinitialized command object.
+ """
+ from distutils.cmd import Command
+ if not isinstance(command, Command):
+ command_name = command
+ command = self.get_command_obj(command_name)
+ else:
+ command_name = command.get_command_name()
+
+ if not command.finalized:
+ return command
+ command.initialize_options()
+ command.finalized = 0
+ self.have_run[command_name] = 0
+ self._set_command_options(command)
+
+ if reinit_subcommands:
+ for sub in command.get_sub_commands():
+ self.reinitialize_command(sub, reinit_subcommands)
+
+ return command
+
+ # -- Methods that operate on the Distribution ----------------------
+
+ def announce(self, msg, level=log.INFO):
+ log.log(level, msg)
+
+ def run_commands(self):
+ """Run each command that was seen on the setup script command line.
+ Uses the list of commands found and cache of command objects
+ created by 'get_command_obj()'.
+ """
+ for cmd in self.commands:
+ self.run_command(cmd)
+
+ # -- Methods that operate on its Commands --------------------------
+
+ def run_command(self, command):
+ """Do whatever it takes to run a command (including nothing at all,
+ if the command has already been run). Specifically: if we have
+ already created and run the command named by 'command', return
+ silently without doing anything. If the command named by 'command'
+ doesn't even have a command object yet, create one. Then invoke
+ 'run()' on that command object (or an existing one).
+ """
+ # Already been here, done that? then return silently.
+ if self.have_run.get(command):
+ return
+
+ log.info("running %s", command)
+ cmd_obj = self.get_command_obj(command)
+ cmd_obj.ensure_finalized()
+ cmd_obj.run()
+ self.have_run[command] = 1
+
+ # -- Distribution query methods ------------------------------------
+
+ def has_pure_modules(self):
+ return len(self.packages or self.py_modules or []) > 0
+
+ def has_ext_modules(self):
+ return self.ext_modules and len(self.ext_modules) > 0
+
+ def has_c_libraries(self):
+ return self.libraries and len(self.libraries) > 0
+
+ def has_modules(self):
+ return self.has_pure_modules() or self.has_ext_modules()
+
+ def has_headers(self):
+ return self.headers and len(self.headers) > 0
+
+ def has_scripts(self):
+ return self.scripts and len(self.scripts) > 0
+
+ def has_data_files(self):
+ return self.data_files and len(self.data_files) > 0
+
+ def is_pure(self):
+ return (self.has_pure_modules() and
+ not self.has_ext_modules() and
+ not self.has_c_libraries())
+
+ # -- Metadata query methods ----------------------------------------
+
+ # If you're looking for 'get_name()', 'get_version()', and so forth,
+ # they are defined in a sneaky way: the constructor binds self.get_XXX
+ # to self.metadata.get_XXX. The actual code is in the
+ # DistributionMetadata class, below.
+
+class DistributionMetadata:
+ """Dummy class to hold the distribution meta-data: name, version,
+ author, and so forth.
+ """
+
+ _METHOD_BASENAMES = ("name", "version", "author", "author_email",
+ "maintainer", "maintainer_email", "url",
+ "license", "description", "long_description",
+ "keywords", "platforms", "fullname", "contact",
+ "contact_email", "classifiers", "download_url",
+ # PEP 314
+ "provides", "requires", "obsoletes",
+ )
+
+ def __init__(self, path=None):
+ if path is not None:
+ self.read_pkg_file(open(path))
+ else:
+ self.name = None
+ self.version = None
+ self.author = None
+ self.author_email = None
+ self.maintainer = None
+ self.maintainer_email = None
+ self.url = None
+ self.license = None
+ self.description = None
+ self.long_description = None
+ self.keywords = None
+ self.platforms = None
+ self.classifiers = None
+ self.download_url = None
+ # PEP 314
+ self.provides = None
+ self.requires = None
+ self.obsoletes = None
+
+ def read_pkg_file(self, file):
+ """Reads the metadata values from a file object."""
+ msg = message_from_file(file)
+
+ def _read_field(name):
+ value = msg[name]
+ if value == 'UNKNOWN':
+ return None
+ return value
+
+ def _read_list(name):
+ values = msg.get_all(name, None)
+ if values == []:
+ return None
+ return values
+
+ metadata_version = msg['metadata-version']
+ self.name = _read_field('name')
+ self.version = _read_field('version')
+ self.description = _read_field('summary')
+ # we are filling author only.
+ self.author = _read_field('author')
+ self.maintainer = None
+ self.author_email = _read_field('author-email')
+ self.maintainer_email = None
+ self.url = _read_field('home-page')
+ self.license = _read_field('license')
+
+ if 'download-url' in msg:
+ self.download_url = _read_field('download-url')
+ else:
+ self.download_url = None
+
+ self.long_description = _read_field('description')
+ self.description = _read_field('summary')
+
+ if 'keywords' in msg:
+ self.keywords = _read_field('keywords').split(',')
+
+ self.platforms = _read_list('platform')
+ self.classifiers = _read_list('classifier')
+
+ # PEP 314 - these fields only exist in 1.1
+ if metadata_version == '1.1':
+ self.requires = _read_list('requires')
+ self.provides = _read_list('provides')
+ self.obsoletes = _read_list('obsoletes')
+ else:
+ self.requires = None
+ self.provides = None
+ self.obsoletes = None
+
+ def write_pkg_info(self, base_dir):
+ """Write the PKG-INFO file into the release tree.
+ """
+ with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
+ encoding='UTF-8') as pkg_info:
+ self.write_pkg_file(pkg_info)
+
+ def write_pkg_file(self, file):
+ """Write the PKG-INFO format data to a file object.
+ """
+ version = '1.0'
+ if (self.provides or self.requires or self.obsoletes or
+ self.classifiers or self.download_url):
+ version = '1.1'
+
+ file.write('Metadata-Version: %s\n' % version)
+ file.write('Name: %s\n' % self.get_name())
+ file.write('Version: %s\n' % self.get_version())
+ file.write('Summary: %s\n' % self.get_description())
+ file.write('Home-page: %s\n' % self.get_url())
+ file.write('Author: %s\n' % self.get_contact())
+ file.write('Author-email: %s\n' % self.get_contact_email())
+ file.write('License: %s\n' % self.get_license())
+ if self.download_url:
+ file.write('Download-URL: %s\n' % self.download_url)
+
+ long_desc = rfc822_escape(self.get_long_description())
+ file.write('Description: %s\n' % long_desc)
+
+ keywords = ','.join(self.get_keywords())
+ if keywords:
+ file.write('Keywords: %s\n' % keywords)
+
+ self._write_list(file, 'Platform', self.get_platforms())
+ self._write_list(file, 'Classifier', self.get_classifiers())
+
+ # PEP 314
+ self._write_list(file, 'Requires', self.get_requires())
+ self._write_list(file, 'Provides', self.get_provides())
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
+
+ def _write_list(self, file, name, values):
+ for value in values:
+ file.write('%s: %s\n' % (name, value))
+
+ # -- Metadata query methods ----------------------------------------
+
+ def get_name(self):
+ return self.name or "UNKNOWN"
+
+ def get_version(self):
+ return self.version or "0.0.0"
+
+ def get_fullname(self):
+ return "%s-%s" % (self.get_name(), self.get_version())
+
+ def get_author(self):
+ return self.author or "UNKNOWN"
+
+ def get_author_email(self):
+ return self.author_email or "UNKNOWN"
+
+ def get_maintainer(self):
+ return self.maintainer or "UNKNOWN"
+
+ def get_maintainer_email(self):
+ return self.maintainer_email or "UNKNOWN"
+
+ def get_contact(self):
+ return self.maintainer or self.author or "UNKNOWN"
+
+ def get_contact_email(self):
+ return self.maintainer_email or self.author_email or "UNKNOWN"
+
+ def get_url(self):
+ return self.url or "UNKNOWN"
+
+ def get_license(self):
+ return self.license or "UNKNOWN"
+ get_licence = get_license
+
+ def get_description(self):
+ return self.description or "UNKNOWN"
+
+ def get_long_description(self):
+ return self.long_description or "UNKNOWN"
+
+ def get_keywords(self):
+ return self.keywords or []
+
+ def set_keywords(self, value):
+ self.keywords = _ensure_list(value, 'keywords')
+
+ def get_platforms(self):
+ return self.platforms or ["UNKNOWN"]
+
+ def set_platforms(self, value):
+ self.platforms = _ensure_list(value, 'platforms')
+
+ def get_classifiers(self):
+ return self.classifiers or []
+
+ def set_classifiers(self, value):
+ self.classifiers = _ensure_list(value, 'classifiers')
+
+ def get_download_url(self):
+ return self.download_url or "UNKNOWN"
+
+ # PEP 314
+ def get_requires(self):
+ return self.requires or []
+
+ def set_requires(self, value):
+ import distutils.versionpredicate
+ for v in value:
+ distutils.versionpredicate.VersionPredicate(v)
+ self.requires = list(value)
+
+ def get_provides(self):
+ return self.provides or []
+
+ def set_provides(self, value):
+ value = [v.strip() for v in value]
+ for v in value:
+ import distutils.versionpredicate
+ distutils.versionpredicate.split_provision(v)
+ self.provides = value
+
+ def get_obsoletes(self):
+ return self.obsoletes or []
+
+ def set_obsoletes(self, value):
+ import distutils.versionpredicate
+ for v in value:
+ distutils.versionpredicate.VersionPredicate(v)
+ self.obsoletes = list(value)
+
+def fix_help_options(options):
+ """Convert a 4-tuple 'help_options' list as found in various command
+ classes to the 3-tuple form required by FancyGetopt.
+ """
+ new_options = []
+ for help_tuple in options:
+ new_options.append(help_tuple[0:3])
+ return new_options
diff --git a/setuptools/_distutils/errors.py b/setuptools/_distutils/errors.py
new file mode 100644
index 0000000..8b93059
--- /dev/null
+++ b/setuptools/_distutils/errors.py
@@ -0,0 +1,97 @@
+"""distutils.errors
+
+Provides exceptions used by the Distutils modules. Note that Distutils
+modules may raise standard exceptions; in particular, SystemExit is
+usually raised for errors that are obviously the end-user's fault
+(eg. bad command-line arguments).
+
+This module is safe to use in "from ... import *" mode; it only exports
+symbols whose names start with "Distutils" and end with "Error"."""
+
+class DistutilsError (Exception):
+ """The root of all Distutils evil."""
+ pass
+
+class DistutilsModuleError (DistutilsError):
+ """Unable to load an expected module, or to find an expected class
+ within some module (in particular, command modules and classes)."""
+ pass
+
+class DistutilsClassError (DistutilsError):
+ """Some command class (or possibly distribution class, if anyone
+ feels a need to subclass Distribution) is found not to be holding
+ up its end of the bargain, ie. implementing some part of the
+ "command "interface."""
+ pass
+
+class DistutilsGetoptError (DistutilsError):
+ """The option table provided to 'fancy_getopt()' is bogus."""
+ pass
+
+class DistutilsArgError (DistutilsError):
+ """Raised by fancy_getopt in response to getopt.error -- ie. an
+ error in the command line usage."""
+ pass
+
+class DistutilsFileError (DistutilsError):
+ """Any problems in the filesystem: expected file not found, etc.
+ Typically this is for problems that we detect before OSError
+ could be raised."""
+ pass
+
+class DistutilsOptionError (DistutilsError):
+ """Syntactic/semantic errors in command options, such as use of
+ mutually conflicting options, or inconsistent options,
+ badly-spelled values, etc. No distinction is made between option
+ values originating in the setup script, the command line, config
+ files, or what-have-you -- but if we *know* something originated in
+ the setup script, we'll raise DistutilsSetupError instead."""
+ pass
+
+class DistutilsSetupError (DistutilsError):
+ """For errors that can be definitely blamed on the setup script,
+ such as invalid keyword arguments to 'setup()'."""
+ pass
+
+class DistutilsPlatformError (DistutilsError):
+ """We don't know how to do something on the current platform (but
+ we do know how to do it on some platform) -- eg. trying to compile
+ C files on a platform not supported by a CCompiler subclass."""
+ pass
+
+class DistutilsExecError (DistutilsError):
+ """Any problems executing an external program (such as the C
+ compiler, when compiling C files)."""
+ pass
+
+class DistutilsInternalError (DistutilsError):
+ """Internal inconsistencies or impossibilities (obviously, this
+ should never be seen if the code is working!)."""
+ pass
+
+class DistutilsTemplateError (DistutilsError):
+ """Syntax error in a file list template."""
+
+class DistutilsByteCompileError(DistutilsError):
+ """Byte compile error."""
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError (Exception):
+ """Some compile/link operation failed."""
+
+class PreprocessError (CCompilerError):
+ """Failure to preprocess one or more C/C++ files."""
+
+class CompileError (CCompilerError):
+ """Failure to compile one or more C/C++ source files."""
+
+class LibError (CCompilerError):
+ """Failure to create a static library from one or more C/C++ object
+ files."""
+
+class LinkError (CCompilerError):
+ """Failure to link one or more C/C++ object files into an executable
+ or shared library file."""
+
+class UnknownFileError (CCompilerError):
+ """Attempt to process an unknown file type."""
diff --git a/setuptools/_distutils/extension.py b/setuptools/_distutils/extension.py
new file mode 100644
index 0000000..c507da3
--- /dev/null
+++ b/setuptools/_distutils/extension.py
@@ -0,0 +1,240 @@
+"""distutils.extension
+
+Provides the Extension class, used to describe C/C++ extension
+modules in setup scripts."""
+
+import os
+import warnings
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext. However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+class Extension:
+ """Just a collection of attributes that describes an extension
+ module and everything needed to build it (hopefully in a portable
+ way, but there are hooks that let you be as unportable as you need).
+
+ Instance attributes:
+ name : string
+ the full name of the extension, including any packages -- ie.
+ *not* a filename or pathname, but Python dotted name
+ sources : [string]
+ list of source filenames, relative to the distribution root
+ (where the setup script lives), in Unix form (slash-separated)
+ for portability. Source files may be C, C++, SWIG (.i),
+ platform-specific resource files, or whatever else is recognized
+ by the "build_ext" command as source for a Python extension.
+ include_dirs : [string]
+ list of directories to search for C/C++ header files (in Unix
+ form for portability)
+ define_macros : [(name : string, value : string|None)]
+ list of macros to define; each macro is defined using a 2-tuple,
+ where 'value' is either the string to define it to or None to
+ define it without a particular value (equivalent of "#define
+ FOO" in source or -DFOO on Unix C compiler command line)
+ undef_macros : [string]
+ list of macros to undefine explicitly
+ library_dirs : [string]
+ list of directories to search for C/C++ libraries at link time
+ libraries : [string]
+ list of library names (not filenames or paths) to link against
+ runtime_library_dirs : [string]
+ list of directories to search for C/C++ libraries at run time
+ (for shared extensions, this is when the extension is loaded)
+ extra_objects : [string]
+ list of extra files to link with (eg. object files not implied
+ by 'sources', static library that must be explicitly specified,
+ binary resource files, etc.)
+ extra_compile_args : [string]
+ any extra platform- and compiler-specific information to use
+ when compiling the source files in 'sources'. For platforms and
+ compilers where "command line" makes sense, this is typically a
+ list of command-line arguments, but for other platforms it could
+ be anything.
+ extra_link_args : [string]
+ any extra platform- and compiler-specific information to use
+ when linking object files together to create the extension (or
+ to create a new static Python interpreter). Similar
+ interpretation as for 'extra_compile_args'.
+ export_symbols : [string]
+ list of symbols to be exported from a shared extension. Not
+ used on all platforms, and not generally necessary for Python
+ extensions, which typically export exactly one symbol: "init" +
+ extension_name.
+ swig_opts : [string]
+ any extra options to pass to SWIG if a source file has the .i
+ extension.
+ depends : [string]
+ list of files that the extension depends on
+ language : string
+ extension language (i.e. "c", "c++", "objc"). Will be detected
+ from the source extensions if not provided.
+ optional : boolean
+ specifies that a build failure in the extension should not abort the
+ build process, but simply not install the failing extension.
+ """
+
+ # When adding arguments to this constructor, be sure to update
+ # setup_keywords in core.py.
+ def __init__(self, name, sources,
+ include_dirs=None,
+ define_macros=None,
+ undef_macros=None,
+ library_dirs=None,
+ libraries=None,
+ runtime_library_dirs=None,
+ extra_objects=None,
+ extra_compile_args=None,
+ extra_link_args=None,
+ export_symbols=None,
+ swig_opts = None,
+ depends=None,
+ language=None,
+ optional=None,
+ **kw # To catch unknown keywords
+ ):
+ if not isinstance(name, str):
+ raise AssertionError("'name' must be a string")
+ if not (isinstance(sources, list) and
+ all(isinstance(v, str) for v in sources)):
+ raise AssertionError("'sources' must be a list of strings")
+
+ self.name = name
+ self.sources = sources
+ self.include_dirs = include_dirs or []
+ self.define_macros = define_macros or []
+ self.undef_macros = undef_macros or []
+ self.library_dirs = library_dirs or []
+ self.libraries = libraries or []
+ self.runtime_library_dirs = runtime_library_dirs or []
+ self.extra_objects = extra_objects or []
+ self.extra_compile_args = extra_compile_args or []
+ self.extra_link_args = extra_link_args or []
+ self.export_symbols = export_symbols or []
+ self.swig_opts = swig_opts or []
+ self.depends = depends or []
+ self.language = language
+ self.optional = optional
+
+ # If there are unknown keyword options, warn about them
+ if len(kw) > 0:
+ options = [repr(option) for option in kw]
+ options = ', '.join(sorted(options))
+ msg = "Unknown Extension options: %s" % options
+ warnings.warn(msg)
+
+ def __repr__(self):
+ return '<%s.%s(%r) at %#x>' % (
+ self.__class__.__module__,
+ self.__class__.__qualname__,
+ self.name,
+ id(self))
+
+
+def read_setup_file(filename):
+ """Reads a Setup file and returns Extension instances."""
+ from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
+ _variable_rx)
+
+ from distutils.text_file import TextFile
+ from distutils.util import split_quoted
+
+ # First pass over the file to gather "VAR = VALUE" assignments.
+ vars = parse_makefile(filename)
+
+ # Second pass to gobble up the real content: lines of the form
+ # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
+ file = TextFile(filename,
+ strip_comments=1, skip_blanks=1, join_lines=1,
+ lstrip_ws=1, rstrip_ws=1)
+ try:
+ extensions = []
+
+ while True:
+ line = file.readline()
+ if line is None: # eof
+ break
+ if _variable_rx.match(line): # VAR=VALUE, handled in first pass
+ continue
+
+ if line[0] == line[-1] == "*":
+ file.warn("'%s' lines not handled yet" % line)
+ continue
+
+ line = expand_makefile_vars(line, vars)
+ words = split_quoted(line)
+
+ # NB. this parses a slightly different syntax than the old
+ # makesetup script: here, there must be exactly one extension per
+ # line, and it must be the first word of the line. I have no idea
+ # why the old syntax supported multiple extensions per line, as
+ # they all wind up being the same.
+
+ module = words[0]
+ ext = Extension(module, [])
+ append_next_word = None
+
+ for word in words[1:]:
+ if append_next_word is not None:
+ append_next_word.append(word)
+ append_next_word = None
+ continue
+
+ suffix = os.path.splitext(word)[1]
+ switch = word[0:2] ; value = word[2:]
+
+ if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
+ # hmm, should we do something about C vs. C++ sources?
+ # or leave it up to the CCompiler implementation to
+ # worry about?
+ ext.sources.append(word)
+ elif switch == "-I":
+ ext.include_dirs.append(value)
+ elif switch == "-D":
+ equals = value.find("=")
+ if equals == -1: # bare "-DFOO" -- no value
+ ext.define_macros.append((value, None))
+ else: # "-DFOO=blah"
+ ext.define_macros.append((value[0:equals],
+ value[equals+2:]))
+ elif switch == "-U":
+ ext.undef_macros.append(value)
+ elif switch == "-C": # only here 'cause makesetup has it!
+ ext.extra_compile_args.append(word)
+ elif switch == "-l":
+ ext.libraries.append(value)
+ elif switch == "-L":
+ ext.library_dirs.append(value)
+ elif switch == "-R":
+ ext.runtime_library_dirs.append(value)
+ elif word == "-rpath":
+ append_next_word = ext.runtime_library_dirs
+ elif word == "-Xlinker":
+ append_next_word = ext.extra_link_args
+ elif word == "-Xcompiler":
+ append_next_word = ext.extra_compile_args
+ elif switch == "-u":
+ ext.extra_link_args.append(word)
+ if not value:
+ append_next_word = ext.extra_link_args
+ elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
+ # NB. a really faithful emulation of makesetup would
+ # append a .o file to extra_objects only if it
+ # had a slash in it; otherwise, it would s/.o/.c/
+ # and append it to sources. Hmmmm.
+ ext.extra_objects.append(word)
+ else:
+ file.warn("unrecognized argument '%s'" % word)
+
+ extensions.append(ext)
+ finally:
+ file.close()
+
+ return extensions
diff --git a/setuptools/_distutils/fancy_getopt.py b/setuptools/_distutils/fancy_getopt.py
new file mode 100644
index 0000000..7d170dd
--- /dev/null
+++ b/setuptools/_distutils/fancy_getopt.py
@@ -0,0 +1,457 @@
+"""distutils.fancy_getopt
+
+Wrapper around the standard getopt module that provides the following
+additional features:
+ * short and long options are tied together
+ * options have help strings, so fancy_getopt could potentially
+ create a complete usage summary
+ * options set attributes of a passed-in object
+"""
+
+import sys, string, re
+import getopt
+from distutils.errors import *
+
+# Much like command_re in distutils.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+# This is used to translate long options to legitimate Python identifiers
+# (for use as attributes of some object).
+longopt_xlate = str.maketrans('-', '_')
+
+class FancyGetopt:
+ """Wrapper around the standard 'getopt()' module that provides some
+ handy extra functionality:
+ * short and long options are tied together
+ * options have help strings, and help text can be assembled
+ from them
+ * options set attributes of a passed-in object
+ * boolean options can have "negative aliases" -- eg. if
+ --quiet is the "negative alias" of --verbose, then "--quiet"
+ on the command line sets 'verbose' to false
+ """
+
+ def __init__(self, option_table=None):
+ # The option table is (currently) a list of tuples. The
+ # tuples may have 3 or four values:
+ # (long_option, short_option, help_string [, repeatable])
+ # if an option takes an argument, its long_option should have '='
+ # appended; short_option should just be a single character, no ':'
+ # in any case. If a long_option doesn't have a corresponding
+ # short_option, short_option should be None. All option tuples
+ # must have long options.
+ self.option_table = option_table
+
+ # 'option_index' maps long option names to entries in the option
+ # table (ie. those 3-tuples).
+ self.option_index = {}
+ if self.option_table:
+ self._build_index()
+
+ # 'alias' records (duh) alias options; {'foo': 'bar'} means
+ # --foo is an alias for --bar
+ self.alias = {}
+
+ # 'negative_alias' keeps track of options that are the boolean
+ # opposite of some other option
+ self.negative_alias = {}
+
+ # These keep track of the information in the option table. We
+ # don't actually populate these structures until we're ready to
+ # parse the command-line, since the 'option_table' passed in here
+ # isn't necessarily the final word.
+ self.short_opts = []
+ self.long_opts = []
+ self.short2long = {}
+ self.attr_name = {}
+ self.takes_arg = {}
+
+ # And 'option_order' is filled up in 'getopt()'; it records the
+ # original order of options (and their values) on the command-line,
+ # but expands short options, converts aliases, etc.
+ self.option_order = []
+
+ def _build_index(self):
+ self.option_index.clear()
+ for option in self.option_table:
+ self.option_index[option[0]] = option
+
+ def set_option_table(self, option_table):
+ self.option_table = option_table
+ self._build_index()
+
+ def add_option(self, long_option, short_option=None, help_string=None):
+ if long_option in self.option_index:
+ raise DistutilsGetoptError(
+ "option conflict: already an option '%s'" % long_option)
+ else:
+ option = (long_option, short_option, help_string)
+ self.option_table.append(option)
+ self.option_index[long_option] = option
+
+ def has_option(self, long_option):
+ """Return true if the option table for this parser has an
+ option with long name 'long_option'."""
+ return long_option in self.option_index
+
+ def get_attr_name(self, long_option):
+ """Translate long option name 'long_option' to the form it
+ has as an attribute of some object: ie., translate hyphens
+ to underscores."""
+ return long_option.translate(longopt_xlate)
+
+ def _check_alias_dict(self, aliases, what):
+ assert isinstance(aliases, dict)
+ for (alias, opt) in aliases.items():
+ if alias not in self.option_index:
+ raise DistutilsGetoptError(("invalid %s '%s': "
+ "option '%s' not defined") % (what, alias, alias))
+ if opt not in self.option_index:
+ raise DistutilsGetoptError(("invalid %s '%s': "
+ "aliased option '%s' not defined") % (what, alias, opt))
+
+ def set_aliases(self, alias):
+ """Set the aliases for this option parser."""
+ self._check_alias_dict(alias, "alias")
+ self.alias = alias
+
+ def set_negative_aliases(self, negative_alias):
+ """Set the negative aliases for this option parser.
+ 'negative_alias' should be a dictionary mapping option names to
+ option names, both the key and value must already be defined
+ in the option table."""
+ self._check_alias_dict(negative_alias, "negative alias")
+ self.negative_alias = negative_alias
+
+ def _grok_option_table(self):
+ """Populate the various data structures that keep tabs on the
+ option table. Called by 'getopt()' before it can do anything
+ worthwhile.
+ """
+ self.long_opts = []
+ self.short_opts = []
+ self.short2long.clear()
+ self.repeat = {}
+
+ for option in self.option_table:
+ if len(option) == 3:
+ long, short, help = option
+ repeat = 0
+ elif len(option) == 4:
+ long, short, help, repeat = option
+ else:
+ # the option table is part of the code, so simply
+ # assert that it is correct
+ raise ValueError("invalid option tuple: %r" % (option,))
+
+ # Type- and value-check the option names
+ if not isinstance(long, str) or len(long) < 2:
+ raise DistutilsGetoptError(("invalid long option '%s': "
+ "must be a string of length >= 2") % long)
+
+ if (not ((short is None) or
+ (isinstance(short, str) and len(short) == 1))):
+ raise DistutilsGetoptError("invalid short option '%s': "
+ "must a single character or None" % short)
+
+ self.repeat[long] = repeat
+ self.long_opts.append(long)
+
+ if long[-1] == '=': # option takes an argument?
+ if short: short = short + ':'
+ long = long[0:-1]
+ self.takes_arg[long] = 1
+ else:
+ # Is option is a "negative alias" for some other option (eg.
+ # "quiet" == "!verbose")?
+ alias_to = self.negative_alias.get(long)
+ if alias_to is not None:
+ if self.takes_arg[alias_to]:
+ raise DistutilsGetoptError(
+ "invalid negative alias '%s': "
+ "aliased option '%s' takes a value"
+ % (long, alias_to))
+
+ self.long_opts[-1] = long # XXX redundant?!
+ self.takes_arg[long] = 0
+
+ # If this is an alias option, make sure its "takes arg" flag is
+ # the same as the option it's aliased to.
+ alias_to = self.alias.get(long)
+ if alias_to is not None:
+ if self.takes_arg[long] != self.takes_arg[alias_to]:
+ raise DistutilsGetoptError(
+ "invalid alias '%s': inconsistent with "
+ "aliased option '%s' (one of them takes a value, "
+ "the other doesn't"
+ % (long, alias_to))
+
+ # Now enforce some bondage on the long option name, so we can
+ # later translate it to an attribute name on some object. Have
+ # to do this a bit late to make sure we've removed any trailing
+ # '='.
+ if not longopt_re.match(long):
+ raise DistutilsGetoptError(
+ "invalid long option name '%s' "
+ "(must be letters, numbers, hyphens only" % long)
+
+ self.attr_name[long] = self.get_attr_name(long)
+ if short:
+ self.short_opts.append(short)
+ self.short2long[short[0]] = long
+
+ def getopt(self, args=None, object=None):
+ """Parse command-line options in args. Store as attributes on object.
+
+ If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
+ 'object' is None or not supplied, creates a new OptionDummy
+ object, stores option values there, and returns a tuple (args,
+ object). If 'object' is supplied, it is modified in place and
+ 'getopt()' just returns 'args'; in both cases, the returned
+ 'args' is a modified copy of the passed-in 'args' list, which
+ is left untouched.
+ """
+ if args is None:
+ args = sys.argv[1:]
+ if object is None:
+ object = OptionDummy()
+ created_object = True
+ else:
+ created_object = False
+
+ self._grok_option_table()
+
+ short_opts = ' '.join(self.short_opts)
+ try:
+ opts, args = getopt.getopt(args, short_opts, self.long_opts)
+ except getopt.error as msg:
+ raise DistutilsArgError(msg)
+
+ for opt, val in opts:
+ if len(opt) == 2 and opt[0] == '-': # it's a short option
+ opt = self.short2long[opt[1]]
+ else:
+ assert len(opt) > 2 and opt[:2] == '--'
+ opt = opt[2:]
+
+ alias = self.alias.get(opt)
+ if alias:
+ opt = alias
+
+ if not self.takes_arg[opt]: # boolean option?
+ assert val == '', "boolean option can't have value"
+ alias = self.negative_alias.get(opt)
+ if alias:
+ opt = alias
+ val = 0
+ else:
+ val = 1
+
+ attr = self.attr_name[opt]
+ # The only repeating option at the moment is 'verbose'.
+ # It has a negative option -q quiet, which should set verbose = 0.
+ if val and self.repeat.get(attr) is not None:
+ val = getattr(object, attr, 0) + 1
+ setattr(object, attr, val)
+ self.option_order.append((opt, val))
+
+ # for opts
+ if created_object:
+ return args, object
+ else:
+ return args
+
+ def get_option_order(self):
+ """Returns the list of (option, value) tuples processed by the
+ previous run of 'getopt()'. Raises RuntimeError if
+ 'getopt()' hasn't been called yet.
+ """
+ if self.option_order is None:
+ raise RuntimeError("'getopt()' hasn't been called yet")
+ else:
+ return self.option_order
+
+ def generate_help(self, header=None):
+ """Generate help text (a list of strings, one per suggested line of
+ output) from the option table for this FancyGetopt object.
+ """
+ # Blithely assume the option table is good: probably wouldn't call
+ # 'generate_help()' unless you've already called 'getopt()'.
+
+ # First pass: determine maximum length of long option names
+ max_opt = 0
+ for option in self.option_table:
+ long = option[0]
+ short = option[1]
+ l = len(long)
+ if long[-1] == '=':
+ l = l - 1
+ if short is not None:
+ l = l + 5 # " (-x)" where short == 'x'
+ if l > max_opt:
+ max_opt = l
+
+ opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
+
+ # Typical help block looks like this:
+ # --foo controls foonabulation
+ # Help block for longest option looks like this:
+ # --flimflam set the flim-flam level
+ # and with wrapped text:
+ # --flimflam set the flim-flam level (must be between
+ # 0 and 100, except on Tuesdays)
+ # Options with short names will have the short name shown (but
+ # it doesn't contribute to max_opt):
+ # --foo (-f) controls foonabulation
+ # If adding the short option would make the left column too wide,
+ # we push the explanation off to the next line
+ # --flimflam (-l)
+ # set the flim-flam level
+ # Important parameters:
+ # - 2 spaces before option block start lines
+ # - 2 dashes for each long option name
+ # - min. 2 spaces between option and explanation (gutter)
+ # - 5 characters (incl. space) for short option name
+
+ # Now generate lines of help text. (If 80 columns were good enough
+ # for Jesus, then 78 columns are good enough for me!)
+ line_width = 78
+ text_width = line_width - opt_width
+ big_indent = ' ' * opt_width
+ if header:
+ lines = [header]
+ else:
+ lines = ['Option summary:']
+
+ for option in self.option_table:
+ long, short, help = option[:3]
+ text = wrap_text(help, text_width)
+ if long[-1] == '=':
+ long = long[0:-1]
+
+ # Case 1: no short option at all (makes life easy)
+ if short is None:
+ if text:
+ lines.append(" --%-*s %s" % (max_opt, long, text[0]))
+ else:
+ lines.append(" --%-*s " % (max_opt, long))
+
+ # Case 2: we have a short option, so we have to include it
+ # just after the long option
+ else:
+ opt_names = "%s (-%s)" % (long, short)
+ if text:
+ lines.append(" --%-*s %s" %
+ (max_opt, opt_names, text[0]))
+ else:
+ lines.append(" --%-*s" % opt_names)
+
+ for l in text[1:]:
+ lines.append(big_indent + l)
+ return lines
+
+ def print_help(self, header=None, file=None):
+ if file is None:
+ file = sys.stdout
+ for line in self.generate_help(header):
+ file.write(line + "\n")
+
+
+def fancy_getopt(options, negative_opt, object, args):
+ parser = FancyGetopt(options)
+ parser.set_negative_aliases(negative_opt)
+ return parser.getopt(args, object)
+
+
+WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace}
+
+def wrap_text(text, width):
+ """wrap_text(text : string, width : int) -> [string]
+
+ Split 'text' into multiple lines of no more than 'width' characters
+ each, and return the list of strings that results.
+ """
+ if text is None:
+ return []
+ if len(text) <= width:
+ return [text]
+
+ text = text.expandtabs()
+ text = text.translate(WS_TRANS)
+ chunks = re.split(r'( +|-+)', text)
+ chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
+ lines = []
+
+ while chunks:
+ cur_line = [] # list of chunks (to-be-joined)
+ cur_len = 0 # length of current line
+
+ while chunks:
+ l = len(chunks[0])
+ if cur_len + l <= width: # can squeeze (at least) this chunk in
+ cur_line.append(chunks[0])
+ del chunks[0]
+ cur_len = cur_len + l
+ else: # this line is full
+ # drop last chunk if all space
+ if cur_line and cur_line[-1][0] == ' ':
+ del cur_line[-1]
+ break
+
+ if chunks: # any chunks left to process?
+ # if the current line is still empty, then we had a single
+ # chunk that's too big too fit on a line -- so we break
+ # down and break it up at the line width
+ if cur_len == 0:
+ cur_line.append(chunks[0][0:width])
+ chunks[0] = chunks[0][width:]
+
+ # all-whitespace chunks at the end of a line can be discarded
+ # (and we know from the re.split above that if a chunk has
+ # *any* whitespace, it is *all* whitespace)
+ if chunks[0][0] == ' ':
+ del chunks[0]
+
+ # and store this line in the list-of-all-lines -- as a single
+ # string, of course!
+ lines.append(''.join(cur_line))
+
+ return lines
+
+
+def translate_longopt(opt):
+ """Convert a long option name to a valid Python identifier by
+ changing "-" to "_".
+ """
+ return opt.translate(longopt_xlate)
+
+
+class OptionDummy:
+ """Dummy class just used as a place to hold command-line option
+ values as instance attributes."""
+
+ def __init__(self, options=[]):
+ """Create a new OptionDummy instance. The attributes listed in
+ 'options' will be initialized to None."""
+ for opt in options:
+ setattr(self, opt, None)
+
+
+if __name__ == "__main__":
+ text = """\
+Tra-la-la, supercalifragilisticexpialidocious.
+How *do* you spell that odd word, anyways?
+(Someone ask Mary -- she'll know [or she'll
+say, "How should I know?"].)"""
+
+ for w in (10, 20, 30, 40):
+ print("width: %d" % w)
+ print("\n".join(wrap_text(text, w)))
+ print()
diff --git a/setuptools/_distutils/file_util.py b/setuptools/_distutils/file_util.py
new file mode 100644
index 0000000..b3fee35
--- /dev/null
+++ b/setuptools/_distutils/file_util.py
@@ -0,0 +1,238 @@
+"""distutils.file_util
+
+Utility functions for operating on single files.
+"""
+
+import os
+from distutils.errors import DistutilsFileError
+from distutils import log
+
+# for generating verbose output in 'copy_file()'
+_copy_action = { None: 'copying',
+ 'hard': 'hard linking',
+ 'sym': 'symbolically linking' }
+
+
+def _copy_file_contents(src, dst, buffer_size=16*1024):
+ """Copy the file 'src' to 'dst'; both must be filenames. Any error
+ opening either file, reading from 'src', or writing to 'dst', raises
+ DistutilsFileError. Data is read/written in chunks of 'buffer_size'
+ bytes (default 16k). No attempt is made to handle anything apart from
+ regular files.
+ """
+ # Stolen from shutil module in the standard library, but with
+ # custom error-handling added.
+ fsrc = None
+ fdst = None
+ try:
+ try:
+ fsrc = open(src, 'rb')
+ except OSError as e:
+ raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
+
+ if os.path.exists(dst):
+ try:
+ os.unlink(dst)
+ except OSError as e:
+ raise DistutilsFileError(
+ "could not delete '%s': %s" % (dst, e.strerror))
+
+ try:
+ fdst = open(dst, 'wb')
+ except OSError as e:
+ raise DistutilsFileError(
+ "could not create '%s': %s" % (dst, e.strerror))
+
+ while True:
+ try:
+ buf = fsrc.read(buffer_size)
+ except OSError as e:
+ raise DistutilsFileError(
+ "could not read from '%s': %s" % (src, e.strerror))
+
+ if not buf:
+ break
+
+ try:
+ fdst.write(buf)
+ except OSError as e:
+ raise DistutilsFileError(
+ "could not write to '%s': %s" % (dst, e.strerror))
+ finally:
+ if fdst:
+ fdst.close()
+ if fsrc:
+ fsrc.close()
+
+def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0,
+ link=None, verbose=1, dry_run=0):
+ """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
+ copied there with the same name; otherwise, it must be a filename. (If
+ the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
+ is true (the default), the file's mode (type and permission bits, or
+ whatever is analogous on the current platform) is copied. If
+ 'preserve_times' is true (the default), the last-modified and
+ last-access times are copied as well. If 'update' is true, 'src' will
+ only be copied if 'dst' does not exist, or if 'dst' does exist but is
+ older than 'src'.
+
+ 'link' allows you to make hard links (os.link) or symbolic links
+ (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
+ None (the default), files are copied. Don't set 'link' on systems that
+ don't support it: 'copy_file()' doesn't check if hard or symbolic
+ linking is available. If hardlink fails, falls back to
+ _copy_file_contents().
+
+ Under Mac OS, uses the native file copy function in macostools; on
+ other systems, uses '_copy_file_contents()' to copy file contents.
+
+ Return a tuple (dest_name, copied): 'dest_name' is the actual name of
+ the output file, and 'copied' is true if the file was copied (or would
+ have been copied, if 'dry_run' true).
+ """
+ # XXX if the destination file already exists, we clobber it if
+ # copying, but blow up if linking. Hmmm. And I don't know what
+ # macostools.copyfile() does. Should definitely be consistent, and
+ # should probably blow up if destination exists and we would be
+ # changing it (ie. it's not already a hard/soft link to src OR
+ # (not update) and (src newer than dst).
+
+ from distutils.dep_util import newer
+ from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
+
+ if not os.path.isfile(src):
+ raise DistutilsFileError(
+ "can't copy '%s': doesn't exist or not a regular file" % src)
+
+ if os.path.isdir(dst):
+ dir = dst
+ dst = os.path.join(dst, os.path.basename(src))
+ else:
+ dir = os.path.dirname(dst)
+
+ if update and not newer(src, dst):
+ if verbose >= 1:
+ log.debug("not copying %s (output up-to-date)", src)
+ return (dst, 0)
+
+ try:
+ action = _copy_action[link]
+ except KeyError:
+ raise ValueError("invalid value '%s' for 'link' argument" % link)
+
+ if verbose >= 1:
+ if os.path.basename(dst) == os.path.basename(src):
+ log.info("%s %s -> %s", action, src, dir)
+ else:
+ log.info("%s %s -> %s", action, src, dst)
+
+ if dry_run:
+ return (dst, 1)
+
+ # If linking (hard or symbolic), use the appropriate system call
+ # (Unix only, of course, but that's the caller's responsibility)
+ elif link == 'hard':
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+ try:
+ os.link(src, dst)
+ return (dst, 1)
+ except OSError:
+ # If hard linking fails, fall back on copying file
+ # (some special filesystems don't support hard linking
+ # even under Unix, see issue #8876).
+ pass
+ elif link == 'sym':
+ if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+ os.symlink(src, dst)
+ return (dst, 1)
+
+ # Otherwise (non-Mac, not linking), copy the file contents and
+ # (optionally) copy the times and mode.
+ _copy_file_contents(src, dst)
+ if preserve_mode or preserve_times:
+ st = os.stat(src)
+
+ # According to David Ascher <da@ski.org>, utime() should be done
+ # before chmod() (at least under NT).
+ if preserve_times:
+ os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
+ if preserve_mode:
+ os.chmod(dst, S_IMODE(st[ST_MODE]))
+
+ return (dst, 1)
+
+
+# XXX I suspect this is Unix-specific -- need porting help!
+def move_file (src, dst,
+ verbose=1,
+ dry_run=0):
+
+ """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
+ be moved into it with the same name; otherwise, 'src' is just renamed
+ to 'dst'. Return the new full name of the file.
+
+ Handles cross-device moves on Unix using 'copy_file()'. What about
+ other systems???
+ """
+ from os.path import exists, isfile, isdir, basename, dirname
+ import errno
+
+ if verbose >= 1:
+ log.info("moving %s -> %s", src, dst)
+
+ if dry_run:
+ return dst
+
+ if not isfile(src):
+ raise DistutilsFileError("can't move '%s': not a regular file" % src)
+
+ if isdir(dst):
+ dst = os.path.join(dst, basename(src))
+ elif exists(dst):
+ raise DistutilsFileError(
+ "can't move '%s': destination '%s' already exists" %
+ (src, dst))
+
+ if not isdir(dirname(dst)):
+ raise DistutilsFileError(
+ "can't move '%s': destination '%s' not a valid path" %
+ (src, dst))
+
+ copy_it = False
+ try:
+ os.rename(src, dst)
+ except OSError as e:
+ (num, msg) = e.args
+ if num == errno.EXDEV:
+ copy_it = True
+ else:
+ raise DistutilsFileError(
+ "couldn't move '%s' to '%s': %s" % (src, dst, msg))
+
+ if copy_it:
+ copy_file(src, dst, verbose=verbose)
+ try:
+ os.unlink(src)
+ except OSError as e:
+ (num, msg) = e.args
+ try:
+ os.unlink(dst)
+ except OSError:
+ pass
+ raise DistutilsFileError(
+ "couldn't move '%s' to '%s' by copy/delete: "
+ "delete '%s' failed: %s"
+ % (src, dst, src, msg))
+ return dst
+
+
+def write_file (filename, contents):
+ """Create a file with the specified name and write 'contents' (a
+ sequence of strings without line terminators) to it.
+ """
+ f = open(filename, "w")
+ try:
+ for line in contents:
+ f.write(line + "\n")
+ finally:
+ f.close()
diff --git a/setuptools/_distutils/filelist.py b/setuptools/_distutils/filelist.py
new file mode 100644
index 0000000..82a7738
--- /dev/null
+++ b/setuptools/_distutils/filelist.py
@@ -0,0 +1,355 @@
+"""distutils.filelist
+
+Provides the FileList class, used for poking about the filesystem
+and building lists of files.
+"""
+
+import os
+import re
+import fnmatch
+import functools
+
+from distutils.util import convert_path
+from distutils.errors import DistutilsTemplateError, DistutilsInternalError
+from distutils import log
+
+
+class FileList:
+ """A list of files built by on exploring the filesystem and filtered by
+ applying various patterns to what we find there.
+
+ Instance attributes:
+ dir
+ directory from which files will be taken -- only used if
+ 'allfiles' not supplied to constructor
+ files
+ list of filenames currently being built/filtered/manipulated
+ allfiles
+ complete list of files under consideration (ie. without any
+ filtering applied)
+ """
+
+ def __init__(self, warn=None, debug_print=None):
+ # ignore argument to FileList, but keep them for backwards
+ # compatibility
+ self.allfiles = None
+ self.files = []
+
+ def set_allfiles(self, allfiles):
+ self.allfiles = allfiles
+
+ def findall(self, dir=os.curdir):
+ self.allfiles = findall(dir)
+
+ def debug_print(self, msg):
+ """Print 'msg' to stdout if the global DEBUG (taken from the
+ DISTUTILS_DEBUG environment variable) flag is true.
+ """
+ from distutils.debug import DEBUG
+ if DEBUG:
+ print(msg)
+
+ # Collection methods
+
+ def append(self, item):
+ self.files.append(item)
+
+ def extend(self, items):
+ self.files.extend(items)
+
+ def sort(self):
+ # Not a strict lexical sort!
+ sortable_files = sorted(map(os.path.split, self.files))
+ self.files = []
+ for sort_tuple in sortable_files:
+ self.files.append(os.path.join(*sort_tuple))
+
+ # Other miscellaneous utility methods
+
+ def remove_duplicates(self):
+ # Assumes list has been sorted!
+ for i in range(len(self.files) - 1, 0, -1):
+ if self.files[i] == self.files[i - 1]:
+ del self.files[i]
+
+ # "File template" methods
+
+ def _parse_template_line(self, line):
+ words = line.split()
+ action = words[0]
+
+ patterns = dir = dir_pattern = None
+
+ if action in ('include', 'exclude',
+ 'global-include', 'global-exclude'):
+ if len(words) < 2:
+ raise DistutilsTemplateError(
+ "'%s' expects <pattern1> <pattern2> ..." % action)
+ patterns = [convert_path(w) for w in words[1:]]
+ elif action in ('recursive-include', 'recursive-exclude'):
+ if len(words) < 3:
+ raise DistutilsTemplateError(
+ "'%s' expects <dir> <pattern1> <pattern2> ..." % action)
+ dir = convert_path(words[1])
+ patterns = [convert_path(w) for w in words[2:]]
+ elif action in ('graft', 'prune'):
+ if len(words) != 2:
+ raise DistutilsTemplateError(
+ "'%s' expects a single <dir_pattern>" % action)
+ dir_pattern = convert_path(words[1])
+ else:
+ raise DistutilsTemplateError("unknown action '%s'" % action)
+
+ return (action, patterns, dir, dir_pattern)
+
+ def process_template_line(self, line):
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dir_pattern).
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ self.debug_print("include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.include_pattern(pattern, anchor=1):
+ log.warn("warning: no files found matching '%s'",
+ pattern)
+
+ elif action == 'exclude':
+ self.debug_print("exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, anchor=1):
+ log.warn(("warning: no previously-included files "
+ "found matching '%s'"), pattern)
+
+ elif action == 'global-include':
+ self.debug_print("global-include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.include_pattern(pattern, anchor=0):
+ log.warn(("warning: no files found matching '%s' "
+ "anywhere in distribution"), pattern)
+
+ elif action == 'global-exclude':
+ self.debug_print("global-exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, anchor=0):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found anywhere in distribution"),
+ pattern)
+
+ elif action == 'recursive-include':
+ self.debug_print("recursive-include %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.include_pattern(pattern, prefix=dir):
+ msg = (
+ "warning: no files found matching '%s' "
+ "under directory '%s'"
+ )
+ log.warn(msg, pattern, dir)
+
+ elif action == 'recursive-exclude':
+ self.debug_print("recursive-exclude %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.exclude_pattern(pattern, prefix=dir):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found under directory '%s'"),
+ pattern, dir)
+
+ elif action == 'graft':
+ self.debug_print("graft " + dir_pattern)
+ if not self.include_pattern(None, prefix=dir_pattern):
+ log.warn("warning: no directories found matching '%s'",
+ dir_pattern)
+
+ elif action == 'prune':
+ self.debug_print("prune " + dir_pattern)
+ if not self.exclude_pattern(None, prefix=dir_pattern):
+ log.warn(("no previously-included directories found "
+ "matching '%s'"), dir_pattern)
+ else:
+ raise DistutilsInternalError(
+ "this cannot happen: invalid action '%s'" % action)
+
+ # Filtering/selection methods
+
+ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+ """Select strings (presumably filenames) from 'self.files' that
+ match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
+ are not quite the same as implemented by the 'fnmatch' module: '*'
+ and '?' match non-special characters, where "special" is platform-
+ dependent: slash on Unix; colon, slash, and backslash on
+ DOS/Windows; and colon on Mac OS.
+
+ If 'anchor' is true (the default), then the pattern match is more
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
+ 'anchor' is false, both of these will match.
+
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
+ (itself a pattern) and ending with 'pattern', with anything in between
+ them, will match. 'anchor' is ignored in this case.
+
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+ 'pattern' is assumed to be either a string containing a regex or a
+ regex object -- no translation is done, the regex is just compiled
+ and used as-is.
+
+ Selected strings will be added to self.files.
+
+ Return True if files are found, False otherwise.
+ """
+ # XXX docstring lying about what the special chars are?
+ files_found = False
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+ self.debug_print("include_pattern: applying regex r'%s'" %
+ pattern_re.pattern)
+
+ # delayed loading of allfiles list
+ if self.allfiles is None:
+ self.findall()
+
+ for name in self.allfiles:
+ if pattern_re.search(name):
+ self.debug_print(" adding " + name)
+ self.files.append(name)
+ files_found = True
+ return files_found
+
+ def exclude_pattern(
+ self, pattern, anchor=1, prefix=None, is_regex=0):
+ """Remove strings (presumably filenames) from 'files' that match
+ 'pattern'. Other parameters are the same as for
+ 'include_pattern()', above.
+ The list 'self.files' is modified in place.
+ Return True if files are found, False otherwise.
+ """
+ files_found = False
+ pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+ self.debug_print("exclude_pattern: applying regex r'%s'" %
+ pattern_re.pattern)
+ for i in range(len(self.files)-1, -1, -1):
+ if pattern_re.search(self.files[i]):
+ self.debug_print(" removing " + self.files[i])
+ del self.files[i]
+ files_found = True
+ return files_found
+
+
+# Utility functions
+
+def _find_all_simple(path):
+ """
+ Find all files under 'path'
+ """
+ all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
+ results = (
+ os.path.join(base, file)
+ for base, dirs, files in all_unique
+ for file in files
+ )
+ return filter(os.path.isfile, results)
+
+
+class _UniqueDirs(set):
+ """
+ Exclude previously-seen dirs from walk results,
+ avoiding infinite recursion.
+ Ref https://bugs.python.org/issue44497.
+ """
+ def __call__(self, walk_item):
+ """
+ Given an item from an os.walk result, determine
+ if the item represents a unique dir for this instance
+ and if not, prevent further traversal.
+ """
+ base, dirs, files = walk_item
+ stat = os.stat(base)
+ candidate = stat.st_dev, stat.st_ino
+ found = candidate in self
+ if found:
+ del dirs[:]
+ self.add(candidate)
+ return not found
+
+ @classmethod
+ def filter(cls, items):
+ return filter(cls(), items)
+
+
+def findall(dir=os.curdir):
+ """
+ Find all files under 'dir' and return the list of full filenames.
+ Unless dir is '.', return full filenames with dir prepended.
+ """
+ files = _find_all_simple(dir)
+ if dir == os.curdir:
+ make_rel = functools.partial(os.path.relpath, start=dir)
+ files = map(make_rel, files)
+ return list(files)
+
+
+def glob_to_re(pattern):
+ """Translate a shell-like glob pattern to a regular expression; return
+ a string containing the regex. Differs from 'fnmatch.translate()' in
+ that '*' does not match "special characters" (which are
+ platform-specific).
+ """
+ pattern_re = fnmatch.translate(pattern)
+
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+ # and by extension they shouldn't match such "special characters" under
+ # any OS. So change all non-escaped dots in the RE to match any
+ # character except the special characters (currently: just os.sep).
+ sep = os.sep
+ if os.sep == '\\':
+ # we're using a regex to manipulate a regex, so we need
+ # to escape the backslash twice
+ sep = r'\\\\'
+ escaped = r'\1[^%s]' % sep
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
+ return pattern_re
+
+
+def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
+ """Translate a shell-like wildcard pattern to a compiled regular
+ expression. Return the compiled regex. If 'is_regex' true,
+ then 'pattern' is directly compiled to a regex (if it's a string)
+ or just returned as-is (assumes it's a regex object).
+ """
+ if is_regex:
+ if isinstance(pattern, str):
+ return re.compile(pattern)
+ else:
+ return pattern
+
+ # ditch start and end characters
+ start, _, end = glob_to_re('_').partition('_')
+
+ if pattern:
+ pattern_re = glob_to_re(pattern)
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
+ else:
+ pattern_re = ''
+
+ if prefix is not None:
+ prefix_re = glob_to_re(prefix)
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
+ sep = os.sep
+ if os.sep == '\\':
+ sep = r'\\'
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+ pattern_re = r'%s\A%s%s.*%s%s' % (
+ start, prefix_re, sep, pattern_re, end)
+ else: # no prefix -- respect anchor flag
+ if anchor:
+ pattern_re = r'%s\A%s' % (start, pattern_re[len(start):])
+
+ return re.compile(pattern_re)
diff --git a/setuptools/_distutils/log.py b/setuptools/_distutils/log.py
new file mode 100644
index 0000000..a68b156
--- /dev/null
+++ b/setuptools/_distutils/log.py
@@ -0,0 +1,81 @@
+"""A simple log mechanism styled after PEP 282."""
+
+# The class here is styled after PEP 282 so that it could later be
+# replaced with a standard Python logging implementation.
+
+import sys
+
+DEBUG = 1
+INFO = 2
+WARN = 3
+ERROR = 4
+FATAL = 5
+
+
+class Log:
+
+ def __init__(self, threshold=WARN):
+ self.threshold = threshold
+
+ def _log(self, level, msg, args):
+ if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
+ raise ValueError('%s wrong log level' % str(level))
+
+ if level >= self.threshold:
+ if args:
+ msg = msg % args
+ if level in (WARN, ERROR, FATAL):
+ stream = sys.stderr
+ else:
+ stream = sys.stdout
+ try:
+ stream.write('%s\n' % msg)
+ except UnicodeEncodeError:
+ # emulate backslashreplace error handler
+ encoding = stream.encoding
+ msg = msg.encode(encoding, "backslashreplace").decode(encoding)
+ stream.write('%s\n' % msg)
+ stream.flush()
+
+ def log(self, level, msg, *args):
+ self._log(level, msg, args)
+
+ def debug(self, msg, *args):
+ self._log(DEBUG, msg, args)
+
+ def info(self, msg, *args):
+ self._log(INFO, msg, args)
+
+ def warn(self, msg, *args):
+ self._log(WARN, msg, args)
+
+ def error(self, msg, *args):
+ self._log(ERROR, msg, args)
+
+ def fatal(self, msg, *args):
+ self._log(FATAL, msg, args)
+
+
+_global_log = Log()
+log = _global_log.log
+debug = _global_log.debug
+info = _global_log.info
+warn = _global_log.warn
+error = _global_log.error
+fatal = _global_log.fatal
+
+
+def set_threshold(level):
+ # return the old threshold for use from tests
+ old = _global_log.threshold
+ _global_log.threshold = level
+ return old
+
+
+def set_verbosity(v):
+ if v <= 0:
+ set_threshold(WARN)
+ elif v == 1:
+ set_threshold(INFO)
+ elif v >= 2:
+ set_threshold(DEBUG)
diff --git a/setuptools/_distutils/msvc9compiler.py b/setuptools/_distutils/msvc9compiler.py
new file mode 100644
index 0000000..6b62738
--- /dev/null
+++ b/setuptools/_distutils/msvc9compiler.py
@@ -0,0 +1,788 @@
+"""distutils.msvc9compiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for the Microsoft Visual Studio 2008.
+
+The module is compatible with VS 2005 and VS 2008. You can find legacy support
+for older versions of VS in distutils.msvccompiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+# finding DevStudio (through the registry)
+# ported to VS2005 and VS 2008 by Christian Heimes
+
+import os
+import subprocess
+import sys
+import re
+
+from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
+ CompileError, LibError, LinkError
+from distutils.ccompiler import CCompiler, gen_lib_options
+from distutils import log
+from distutils.util import get_platform
+
+import winreg
+
+RegOpenKeyEx = winreg.OpenKeyEx
+RegEnumKey = winreg.EnumKey
+RegEnumValue = winreg.EnumValue
+RegError = winreg.error
+
+HKEYS = (winreg.HKEY_USERS,
+ winreg.HKEY_CURRENT_USER,
+ winreg.HKEY_LOCAL_MACHINE,
+ winreg.HKEY_CLASSES_ROOT)
+
+NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
+if NATIVE_WIN64:
+ # Visual C++ is a 32-bit application, so we need to look in
+ # the corresponding registry branch, if we're running a
+ # 64-bit Python on Win64
+ VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
+ WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
+ NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
+else:
+ VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
+ WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
+ NET_BASE = r"Software\Microsoft\.NETFramework"
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
+# the param to cross-compile on x86 targeting amd64.)
+PLAT_TO_VCVARS = {
+ 'win32' : 'x86',
+ 'win-amd64' : 'amd64',
+}
+
+class Reg:
+ """Helper class to read values from the registry
+ """
+
+ def get_value(cls, path, key):
+ for base in HKEYS:
+ d = cls.read_values(base, path)
+ if d and key in d:
+ return d[key]
+ raise KeyError(key)
+ get_value = classmethod(get_value)
+
+ def read_keys(cls, base, key):
+ """Return list of registry keys."""
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ L = []
+ i = 0
+ while True:
+ try:
+ k = RegEnumKey(handle, i)
+ except RegError:
+ break
+ L.append(k)
+ i += 1
+ return L
+ read_keys = classmethod(read_keys)
+
+ def read_values(cls, base, key):
+ """Return dict of registry keys and values.
+
+ All names are converted to lowercase.
+ """
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ d = {}
+ i = 0
+ while True:
+ try:
+ name, value, type = RegEnumValue(handle, i)
+ except RegError:
+ break
+ name = name.lower()
+ d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
+ i += 1
+ return d
+ read_values = classmethod(read_values)
+
+ def convert_mbcs(s):
+ dec = getattr(s, "decode", None)
+ if dec is not None:
+ try:
+ s = dec("mbcs")
+ except UnicodeError:
+ pass
+ return s
+ convert_mbcs = staticmethod(convert_mbcs)
+
+class MacroExpander:
+
+ def __init__(self, version):
+ self.macros = {}
+ self.vsbase = VS_BASE % version
+ self.load_macros(version)
+
+ def set_macro(self, macro, path, key):
+ self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+
+ def load_macros(self, version):
+ self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
+ self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
+ self.set_macro("FrameworkDir", NET_BASE, "installroot")
+ try:
+ if version >= 8.0:
+ self.set_macro("FrameworkSDKDir", NET_BASE,
+ "sdkinstallrootv2.0")
+ else:
+ raise KeyError("sdkinstallrootv2.0")
+ except KeyError:
+ raise DistutilsPlatformError(
+ """Python was built with Visual Studio 2008;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2008 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+ if version >= 9.0:
+ self.set_macro("FrameworkVersion", self.vsbase, "clr version")
+ self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
+ else:
+ p = r"Software\Microsoft\NET Framework Setup\Product"
+ for base in HKEYS:
+ try:
+ h = RegOpenKeyEx(base, p)
+ except RegError:
+ continue
+ key = RegEnumKey(h, 0)
+ d = Reg.get_value(base, r"%s\%s" % (p, key))
+ self.macros["$(FrameworkVersion)"] = d["version"]
+
+ def sub(self, s):
+ for k, v in self.macros.items():
+ s = s.replace(k, v)
+ return s
+
+def get_build_version():
+ """Return the version of MSVC that was used to build Python.
+
+ For Python 2.3 and up, the version number is included in
+ sys.version. For earlier versions, assume the compiler is MSVC 6.
+ """
+ prefix = "MSC v."
+ i = sys.version.find(prefix)
+ if i == -1:
+ return 6
+ i = i + len(prefix)
+ s, rest = sys.version[i:].split(" ", 1)
+ majorVersion = int(s[:-2]) - 6
+ if majorVersion >= 13:
+ # v13 was skipped and should be v14
+ majorVersion += 1
+ minorVersion = int(s[2:3]) / 10.0
+ # I don't think paths are affected by minor version in version 6
+ if majorVersion == 6:
+ minorVersion = 0
+ if majorVersion >= 6:
+ return majorVersion + minorVersion
+ # else we don't know what version of the compiler this is
+ return None
+
+def normalize_and_reduce_paths(paths):
+ """Return a list of normalized paths with duplicates removed.
+
+ The current order of paths is maintained.
+ """
+ # Paths are normalized so things like: /a and /a/ aren't both preserved.
+ reduced_paths = []
+ for p in paths:
+ np = os.path.normpath(p)
+ # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+ if np not in reduced_paths:
+ reduced_paths.append(np)
+ return reduced_paths
+
+def removeDuplicates(variable):
+ """Remove duplicate values of an environment variable.
+ """
+ oldList = variable.split(os.pathsep)
+ newList = []
+ for i in oldList:
+ if i not in newList:
+ newList.append(i)
+ newVariable = os.pathsep.join(newList)
+ return newVariable
+
+def find_vcvarsall(version):
+ """Find the vcvarsall.bat file
+
+ At first it tries to find the productdir of VS 2008 in the registry. If
+ that fails it falls back to the VS90COMNTOOLS env var.
+ """
+ vsbase = VS_BASE % version
+ try:
+ productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+ "productdir")
+ except KeyError:
+ log.debug("Unable to find productdir in registry")
+ productdir = None
+
+ if not productdir or not os.path.isdir(productdir):
+ toolskey = "VS%0.f0COMNTOOLS" % version
+ toolsdir = os.environ.get(toolskey, None)
+
+ if toolsdir and os.path.isdir(toolsdir):
+ productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
+ productdir = os.path.abspath(productdir)
+ if not os.path.isdir(productdir):
+ log.debug("%s is not a valid directory" % productdir)
+ return None
+ else:
+ log.debug("Env var %s is not set or invalid" % toolskey)
+ if not productdir:
+ log.debug("No productdir found")
+ return None
+ vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+ if os.path.isfile(vcvarsall):
+ return vcvarsall
+ log.debug("Unable to find vcvarsall.bat")
+ return None
+
+def query_vcvarsall(version, arch="x86"):
+ """Launch vcvarsall.bat and read the settings from its environment
+ """
+ vcvarsall = find_vcvarsall(version)
+ interesting = {"include", "lib", "libpath", "path"}
+ result = {}
+
+ if vcvarsall is None:
+ raise DistutilsPlatformError("Unable to find vcvarsall.bat")
+ log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+ popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ try:
+ stdout, stderr = popen.communicate()
+ if popen.wait() != 0:
+ raise DistutilsPlatformError(stderr.decode("mbcs"))
+
+ stdout = stdout.decode("mbcs")
+ for line in stdout.split("\n"):
+ line = Reg.convert_mbcs(line)
+ if '=' not in line:
+ continue
+ line = line.strip()
+ key, value = line.split('=', 1)
+ key = key.lower()
+ if key in interesting:
+ if value.endswith(os.pathsep):
+ value = value[:-1]
+ result[key] = removeDuplicates(value)
+
+ finally:
+ popen.stdout.close()
+ popen.stderr.close()
+
+ if len(result) != len(interesting):
+ raise ValueError(str(list(result.keys())))
+
+ return result
+
+# More globals
+VERSION = get_build_version()
+# MACROS = MacroExpander(VERSION)
+
+class MSVCCompiler(CCompiler) :
+ """Concrete class that implements an interface to Microsoft Visual C++,
+ as defined by the CCompiler abstract class."""
+
+ compiler_type = 'msvc'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+ _rc_extensions = ['.rc']
+ _mc_extensions = ['.mc']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = (_c_extensions + _cpp_extensions +
+ _rc_extensions + _mc_extensions)
+ res_extension = '.res'
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+ super().__init__(verbose, dry_run, force)
+ self.__version = VERSION
+ self.__root = r"Software\Microsoft\VisualStudio"
+ # self.__macros = MACROS
+ self.__paths = []
+ # target platform (.plat_name is consistent with 'bdist')
+ self.plat_name = None
+ self.__arch = None # deprecated name
+ self.initialized = False
+
+ def initialize(self, plat_name=None):
+ # multi-init means we would need to check platform same each time...
+ assert not self.initialized, "don't init multiple times"
+ if self.__version < 8.0:
+ raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version)
+ if plat_name is None:
+ plat_name = get_platform()
+ # sanity check for platforms to prevent obscure errors later.
+ ok_plats = 'win32', 'win-amd64'
+ if plat_name not in ok_plats:
+ raise DistutilsPlatformError("--plat-name must be one of %s" %
+ (ok_plats,))
+
+ if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+ # Assume that the SDK set up everything alright; don't try to be
+ # smarter
+ self.cc = "cl.exe"
+ self.linker = "link.exe"
+ self.lib = "lib.exe"
+ self.rc = "rc.exe"
+ self.mc = "mc.exe"
+ else:
+ # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
+ # to cross compile, you use 'x86_amd64'.
+ # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
+ # compile use 'x86' (ie, it runs the x86 compiler directly)
+ if plat_name == get_platform() or plat_name == 'win32':
+ # native build or cross-compile to win32
+ plat_spec = PLAT_TO_VCVARS[plat_name]
+ else:
+ # cross compile from win32 -> some 64bit
+ plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
+ PLAT_TO_VCVARS[plat_name]
+
+ vc_env = query_vcvarsall(VERSION, plat_spec)
+
+ self.__paths = vc_env['path'].split(os.pathsep)
+ os.environ['lib'] = vc_env['lib']
+ os.environ['include'] = vc_env['include']
+
+ if len(self.__paths) == 0:
+ raise DistutilsPlatformError("Python was built with %s, "
+ "and extensions need to be built with the same "
+ "version of the compiler, but it isn't installed."
+ % self.__product)
+
+ self.cc = self.find_exe("cl.exe")
+ self.linker = self.find_exe("link.exe")
+ self.lib = self.find_exe("lib.exe")
+ self.rc = self.find_exe("rc.exe") # resource compiler
+ self.mc = self.find_exe("mc.exe") # message compiler
+ #self.set_path_env_var('lib')
+ #self.set_path_env_var('include')
+
+ # extend the MSVC path with the current path
+ try:
+ for p in os.environ['path'].split(';'):
+ self.__paths.append(p)
+ except KeyError:
+ pass
+ self.__paths = normalize_and_reduce_paths(self.__paths)
+ os.environ['path'] = ";".join(self.__paths)
+
+ self.preprocess_options = None
+ if self.__arch == "x86":
+ self.compile_options = [ '/nologo', '/O2', '/MD', '/W3',
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
+ '/Z7', '/D_DEBUG']
+ else:
+ # Win64
+ self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' ,
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+ '/Z7', '/D_DEBUG']
+
+ self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+ if self.__version >= 7:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+ ]
+ self.ldflags_static = [ '/nologo']
+
+ self.initialized = True
+
+ # -- Worker methods ------------------------------------------------
+
+ def object_filenames(self,
+ source_filenames,
+ strip_dir=0,
+ output_dir=''):
+ # Copied from ccompiler.py, extended to return .res as 'object'-file
+ # for .rc input file
+ if output_dir is None: output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ (base, ext) = os.path.splitext (src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ # Better to raise an exception instead of silently continuing
+ # and later complain about sources and targets having
+ # different lengths
+ raise CompileError ("Don't know how to compile %s" % src_name)
+ if strip_dir:
+ base = os.path.basename (base)
+ if ext in self._rc_extensions:
+ obj_names.append (os.path.join (output_dir,
+ base + self.res_extension))
+ elif ext in self._mc_extensions:
+ obj_names.append (os.path.join (output_dir,
+ base + self.res_extension))
+ else:
+ obj_names.append (os.path.join (output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=0,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ if not self.initialized:
+ self.initialize()
+ compile_info = self._setup_compile(output_dir, macros, include_dirs,
+ sources, depends, extra_postargs)
+ macros, objects, extra_postargs, pp_opts, build = compile_info
+
+ compile_opts = extra_preargs or []
+ compile_opts.append ('/c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ if debug:
+ # pass the full pathname to MSVC in debug mode,
+ # this allows the debugger to find the source file
+ # without asking the user to browse for it
+ src = os.path.abspath(src)
+
+ if ext in self._c_extensions:
+ input_opt = "/Tc" + src
+ elif ext in self._cpp_extensions:
+ input_opt = "/Tp" + src
+ elif ext in self._rc_extensions:
+ # compile .RC to .RES file
+ input_opt = src
+ output_opt = "/fo" + obj
+ try:
+ self.spawn([self.rc] + pp_opts +
+ [output_opt] + [input_opt])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ elif ext in self._mc_extensions:
+ # Compile .MC to .RC file to .RES file.
+ # * '-h dir' specifies the directory for the
+ # generated include file
+ # * '-r dir' specifies the target directory of the
+ # generated RC file and the binary message resource
+ # it includes
+ #
+ # For now (since there are no options to change this),
+ # we use the source-directory for the include file and
+ # the build directory for the RC file and message
+ # resources. This works at least for win32all.
+ h_dir = os.path.dirname(src)
+ rc_dir = os.path.dirname(obj)
+ try:
+ # first compile .MC to .RC and .H file
+ self.spawn([self.mc] +
+ ['-h', h_dir, '-r', rc_dir] + [src])
+ base, _ = os.path.splitext (os.path.basename (src))
+ rc_file = os.path.join (rc_dir, base + '.rc')
+ # then compile .RC to .RES file
+ self.spawn([self.rc] +
+ ["/fo" + obj] + [rc_file])
+
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ else:
+ # how to handle this file?
+ raise CompileError("Don't know how to compile %s to %s"
+ % (src, obj))
+
+ output_opt = "/Fo" + obj
+ try:
+ self.spawn([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+
+ def create_static_lib(self,
+ objects,
+ output_libname,
+ output_dir=None,
+ debug=0,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ (objects, output_dir) = self._fix_object_args(objects, output_dir)
+ output_filename = self.library_filename(output_libname,
+ output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = objects + ['/OUT:' + output_filename]
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn([self.lib] + lib_args)
+ except DistutilsExecError as msg:
+ raise LibError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+
+ def link(self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ (objects, output_dir) = self._fix_object_args(objects, output_dir)
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ (libraries, library_dirs, runtime_library_dirs) = fixed_args
+
+ if runtime_library_dirs:
+ self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ + str (runtime_library_dirs))
+
+ lib_opts = gen_lib_options(self,
+ library_dirs, runtime_library_dirs,
+ libraries)
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ if target_desc == CCompiler.EXECUTABLE:
+ if debug:
+ ldflags = self.ldflags_shared_debug[1:]
+ else:
+ ldflags = self.ldflags_shared[1:]
+ else:
+ if debug:
+ ldflags = self.ldflags_shared_debug
+ else:
+ ldflags = self.ldflags_shared
+
+ export_opts = []
+ for sym in (export_symbols or []):
+ export_opts.append("/EXPORT:" + sym)
+
+ ld_args = (ldflags + lib_opts + export_opts +
+ objects + ['/OUT:' + output_filename])
+
+ # The MSVC linker generates .lib and .exp files, which cannot be
+ # suppressed by any linker switches. The .lib files may even be
+ # needed! Make sure they are generated in the temporary build
+ # directory. Since they have different names for debug and release
+ # builds, they can go into the same directory.
+ build_temp = os.path.dirname(objects[0])
+ if export_symbols is not None:
+ (dll_name, dll_ext) = os.path.splitext(
+ os.path.basename(output_filename))
+ implib_file = os.path.join(
+ build_temp,
+ self.library_filename(dll_name))
+ ld_args.append ('/IMPLIB:' + implib_file)
+
+ self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ self.spawn([self.linker] + ld_args)
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+
+ # embed the manifest
+ # XXX - this is somewhat fragile - if mt.exe fails, distutils
+ # will still consider the DLL up-to-date, but it will not have a
+ # manifest. Maybe we should link to a temp file? OTOH, that
+ # implies a build environment error that shouldn't go undetected.
+ mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
+ if mfinfo is not None:
+ mffilename, mfid = mfinfo
+ out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+ try:
+ self.spawn(['mt.exe', '-nologo', '-manifest',
+ mffilename, out_arg])
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
+ # If we need a manifest at all, an embedded manifest is recommended.
+ # See MSDN article titled
+ # "How to: Embed a Manifest Inside a C/C++ Application"
+ # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+ # Ask the linker to generate the manifest in the temp dir, so
+ # we can check it, and possibly embed it, later.
+ temp_manifest = os.path.join(
+ build_temp,
+ os.path.basename(output_filename) + ".manifest")
+ ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+ def manifest_get_embed_info(self, target_desc, ld_args):
+ # If a manifest should be embedded, return a tuple of
+ # (manifest_filename, resource_id). Returns None if no manifest
+ # should be embedded. See http://bugs.python.org/issue7833 for why
+ # we want to avoid any manifest for extension modules if we can)
+ for arg in ld_args:
+ if arg.startswith("/MANIFESTFILE:"):
+ temp_manifest = arg.split(":", 1)[1]
+ break
+ else:
+ # no /MANIFESTFILE so nothing to do.
+ return None
+ if target_desc == CCompiler.EXECUTABLE:
+ # by default, executables always get the manifest with the
+ # CRT referenced.
+ mfid = 1
+ else:
+ # Extension modules try and avoid any manifest if possible.
+ mfid = 2
+ temp_manifest = self._remove_visual_c_ref(temp_manifest)
+ if temp_manifest is None:
+ return None
+ return temp_manifest, mfid
+
+ def _remove_visual_c_ref(self, manifest_file):
+ try:
+ # Remove references to the Visual C runtime, so they will
+ # fall through to the Visual C dependency of Python.exe.
+ # This way, when installed for a restricted user (e.g.
+ # runtimes are not in WinSxS folder, but in Python's own
+ # folder), the runtimes do not need to be in every folder
+ # with .pyd's.
+ # Returns either the filename of the modified manifest or
+ # None if no manifest should be embedded.
+ manifest_f = open(manifest_file)
+ try:
+ manifest_buf = manifest_f.read()
+ finally:
+ manifest_f.close()
+ pattern = re.compile(
+ r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
+ r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
+ re.DOTALL)
+ manifest_buf = re.sub(pattern, "", manifest_buf)
+ pattern = r"<dependentAssembly>\s*</dependentAssembly>"
+ manifest_buf = re.sub(pattern, "", manifest_buf)
+ # Now see if any other assemblies are referenced - if not, we
+ # don't want a manifest embedded.
+ pattern = re.compile(
+ r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
+ r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL)
+ if re.search(pattern, manifest_buf) is None:
+ return None
+
+ manifest_f = open(manifest_file, 'w')
+ try:
+ manifest_f.write(manifest_buf)
+ return manifest_file
+ finally:
+ manifest_f.close()
+ except OSError:
+ pass
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "/LIBPATH:" + dir
+
+ def runtime_library_dir_option(self, dir):
+ raise DistutilsPlatformError(
+ "don't know how to set runtime library search path for MSVC++")
+
+ def library_option(self, lib):
+ return self.library_filename(lib)
+
+
+ def find_library_file(self, dirs, lib, debug=0):
+ # Prefer a debugging library if found (and requested), but deal
+ # with it if we don't have one.
+ if debug:
+ try_names = [lib + "_d", lib]
+ else:
+ try_names = [lib]
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename (name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # Helper methods for using the MSVC registry settings
+
+ def find_exe(self, exe):
+ """Return path to an MSVC executable program.
+
+ Tries to find the program in several places: first, one of the
+ MSVC program search paths from the registry; next, the directories
+ in the PATH environment variable. If any of those work, return an
+ absolute path that is known to exist. If none of them work, just
+ return the original program name, 'exe'.
+ """
+ for p in self.__paths:
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+
+ # didn't find it; try existing path
+ for p in os.environ['Path'].split(';'):
+ fn = os.path.join(os.path.abspath(p),exe)
+ if os.path.isfile(fn):
+ return fn
+
+ return exe
diff --git a/setuptools/_distutils/msvccompiler.py b/setuptools/_distutils/msvccompiler.py
new file mode 100644
index 0000000..e1367b8
--- /dev/null
+++ b/setuptools/_distutils/msvccompiler.py
@@ -0,0 +1,643 @@
+"""distutils.msvccompiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for the Microsoft Visual Studio.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+# finding DevStudio (through the registry)
+
+import sys, os
+from distutils.errors import \
+ DistutilsExecError, DistutilsPlatformError, \
+ CompileError, LibError, LinkError
+from distutils.ccompiler import \
+ CCompiler, gen_lib_options
+from distutils import log
+
+_can_read_reg = False
+try:
+ import winreg
+
+ _can_read_reg = True
+ hkey_mod = winreg
+
+ RegOpenKeyEx = winreg.OpenKeyEx
+ RegEnumKey = winreg.EnumKey
+ RegEnumValue = winreg.EnumValue
+ RegError = winreg.error
+
+except ImportError:
+ try:
+ import win32api
+ import win32con
+ _can_read_reg = True
+ hkey_mod = win32con
+
+ RegOpenKeyEx = win32api.RegOpenKeyEx
+ RegEnumKey = win32api.RegEnumKey
+ RegEnumValue = win32api.RegEnumValue
+ RegError = win32api.error
+ except ImportError:
+ log.info("Warning: Can't read registry to find the "
+ "necessary compiler setting\n"
+ "Make sure that Python modules winreg, "
+ "win32api or win32con are installed.")
+ pass
+
+if _can_read_reg:
+ HKEYS = (hkey_mod.HKEY_USERS,
+ hkey_mod.HKEY_CURRENT_USER,
+ hkey_mod.HKEY_LOCAL_MACHINE,
+ hkey_mod.HKEY_CLASSES_ROOT)
+
+def read_keys(base, key):
+ """Return list of registry keys."""
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ L = []
+ i = 0
+ while True:
+ try:
+ k = RegEnumKey(handle, i)
+ except RegError:
+ break
+ L.append(k)
+ i += 1
+ return L
+
+def read_values(base, key):
+ """Return dict of registry keys and values.
+
+ All names are converted to lowercase.
+ """
+ try:
+ handle = RegOpenKeyEx(base, key)
+ except RegError:
+ return None
+ d = {}
+ i = 0
+ while True:
+ try:
+ name, value, type = RegEnumValue(handle, i)
+ except RegError:
+ break
+ name = name.lower()
+ d[convert_mbcs(name)] = convert_mbcs(value)
+ i += 1
+ return d
+
+def convert_mbcs(s):
+ dec = getattr(s, "decode", None)
+ if dec is not None:
+ try:
+ s = dec("mbcs")
+ except UnicodeError:
+ pass
+ return s
+
+class MacroExpander:
+ def __init__(self, version):
+ self.macros = {}
+ self.load_macros(version)
+
+ def set_macro(self, macro, path, key):
+ for base in HKEYS:
+ d = read_values(base, path)
+ if d:
+ self.macros["$(%s)" % macro] = d[key]
+ break
+
+ def load_macros(self, version):
+ vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+ self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+ self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+ net = r"Software\Microsoft\.NETFramework"
+ self.set_macro("FrameworkDir", net, "installroot")
+ try:
+ if version > 7.0:
+ self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+ else:
+ self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+ except KeyError as exc: #
+ raise DistutilsPlatformError(
+ """Python was built with Visual Studio 2003;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2003 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+ p = r"Software\Microsoft\NET Framework Setup\Product"
+ for base in HKEYS:
+ try:
+ h = RegOpenKeyEx(base, p)
+ except RegError:
+ continue
+ key = RegEnumKey(h, 0)
+ d = read_values(base, r"%s\%s" % (p, key))
+ self.macros["$(FrameworkVersion)"] = d["version"]
+
+ def sub(self, s):
+ for k, v in self.macros.items():
+ s = s.replace(k, v)
+ return s
+
+def get_build_version():
+ """Return the version of MSVC that was used to build Python.
+
+ For Python 2.3 and up, the version number is included in
+ sys.version. For earlier versions, assume the compiler is MSVC 6.
+ """
+ prefix = "MSC v."
+ i = sys.version.find(prefix)
+ if i == -1:
+ return 6
+ i = i + len(prefix)
+ s, rest = sys.version[i:].split(" ", 1)
+ majorVersion = int(s[:-2]) - 6
+ if majorVersion >= 13:
+ # v13 was skipped and should be v14
+ majorVersion += 1
+ minorVersion = int(s[2:3]) / 10.0
+ # I don't think paths are affected by minor version in version 6
+ if majorVersion == 6:
+ minorVersion = 0
+ if majorVersion >= 6:
+ return majorVersion + minorVersion
+ # else we don't know what version of the compiler this is
+ return None
+
+def get_build_architecture():
+ """Return the processor architecture.
+
+ Possible results are "Intel" or "AMD64".
+ """
+
+ prefix = " bit ("
+ i = sys.version.find(prefix)
+ if i == -1:
+ return "Intel"
+ j = sys.version.find(")", i)
+ return sys.version[i+len(prefix):j]
+
+def normalize_and_reduce_paths(paths):
+ """Return a list of normalized paths with duplicates removed.
+
+ The current order of paths is maintained.
+ """
+ # Paths are normalized so things like: /a and /a/ aren't both preserved.
+ reduced_paths = []
+ for p in paths:
+ np = os.path.normpath(p)
+ # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+ if np not in reduced_paths:
+ reduced_paths.append(np)
+ return reduced_paths
+
+
+class MSVCCompiler(CCompiler) :
+ """Concrete class that implements an interface to Microsoft Visual C++,
+ as defined by the CCompiler abstract class."""
+
+ compiler_type = 'msvc'
+
+ # Just set this so CCompiler's constructor doesn't barf. We currently
+ # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+ # as it really isn't necessary for this sort of single-compiler class.
+ # Would be nice to have a consistent interface with UnixCCompiler,
+ # though, so it's worth thinking about.
+ executables = {}
+
+ # Private class data (need to distinguish C from C++ source for compiler)
+ _c_extensions = ['.c']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx']
+ _rc_extensions = ['.rc']
+ _mc_extensions = ['.mc']
+
+ # Needed for the filename generation methods provided by the
+ # base class, CCompiler.
+ src_extensions = (_c_extensions + _cpp_extensions +
+ _rc_extensions + _mc_extensions)
+ res_extension = '.res'
+ obj_extension = '.obj'
+ static_lib_extension = '.lib'
+ shared_lib_extension = '.dll'
+ static_lib_format = shared_lib_format = '%s%s'
+ exe_extension = '.exe'
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+ super().__init__(verbose, dry_run, force)
+ self.__version = get_build_version()
+ self.__arch = get_build_architecture()
+ if self.__arch == "Intel":
+ # x86
+ if self.__version >= 7:
+ self.__root = r"Software\Microsoft\VisualStudio"
+ self.__macros = MacroExpander(self.__version)
+ else:
+ self.__root = r"Software\Microsoft\Devstudio"
+ self.__product = "Visual Studio version %s" % self.__version
+ else:
+ # Win64. Assume this was built with the platform SDK
+ self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
+
+ self.initialized = False
+
+ def initialize(self):
+ self.__paths = []
+ if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+ # Assume that the SDK set up everything alright; don't try to be
+ # smarter
+ self.cc = "cl.exe"
+ self.linker = "link.exe"
+ self.lib = "lib.exe"
+ self.rc = "rc.exe"
+ self.mc = "mc.exe"
+ else:
+ self.__paths = self.get_msvc_paths("path")
+
+ if len(self.__paths) == 0:
+ raise DistutilsPlatformError("Python was built with %s, "
+ "and extensions need to be built with the same "
+ "version of the compiler, but it isn't installed."
+ % self.__product)
+
+ self.cc = self.find_exe("cl.exe")
+ self.linker = self.find_exe("link.exe")
+ self.lib = self.find_exe("lib.exe")
+ self.rc = self.find_exe("rc.exe") # resource compiler
+ self.mc = self.find_exe("mc.exe") # message compiler
+ self.set_path_env_var('lib')
+ self.set_path_env_var('include')
+
+ # extend the MSVC path with the current path
+ try:
+ for p in os.environ['path'].split(';'):
+ self.__paths.append(p)
+ except KeyError:
+ pass
+ self.__paths = normalize_and_reduce_paths(self.__paths)
+ os.environ['path'] = ";".join(self.__paths)
+
+ self.preprocess_options = None
+ if self.__arch == "Intel":
+ self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' ,
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+ '/Z7', '/D_DEBUG']
+ else:
+ # Win64
+ self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' ,
+ '/DNDEBUG']
+ self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+ '/Z7', '/D_DEBUG']
+
+ self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+ if self.__version >= 7:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+ ]
+ else:
+ self.ldflags_shared_debug = [
+ '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+ ]
+ self.ldflags_static = [ '/nologo']
+
+ self.initialized = True
+
+ # -- Worker methods ------------------------------------------------
+
+ def object_filenames(self,
+ source_filenames,
+ strip_dir=0,
+ output_dir=''):
+ # Copied from ccompiler.py, extended to return .res as 'object'-file
+ # for .rc input file
+ if output_dir is None: output_dir = ''
+ obj_names = []
+ for src_name in source_filenames:
+ (base, ext) = os.path.splitext (src_name)
+ base = os.path.splitdrive(base)[1] # Chop off the drive
+ base = base[os.path.isabs(base):] # If abs, chop off leading /
+ if ext not in self.src_extensions:
+ # Better to raise an exception instead of silently continuing
+ # and later complain about sources and targets having
+ # different lengths
+ raise CompileError ("Don't know how to compile %s" % src_name)
+ if strip_dir:
+ base = os.path.basename (base)
+ if ext in self._rc_extensions:
+ obj_names.append (os.path.join (output_dir,
+ base + self.res_extension))
+ elif ext in self._mc_extensions:
+ obj_names.append (os.path.join (output_dir,
+ base + self.res_extension))
+ else:
+ obj_names.append (os.path.join (output_dir,
+ base + self.obj_extension))
+ return obj_names
+
+
+ def compile(self, sources,
+ output_dir=None, macros=None, include_dirs=None, debug=0,
+ extra_preargs=None, extra_postargs=None, depends=None):
+
+ if not self.initialized:
+ self.initialize()
+ compile_info = self._setup_compile(output_dir, macros, include_dirs,
+ sources, depends, extra_postargs)
+ macros, objects, extra_postargs, pp_opts, build = compile_info
+
+ compile_opts = extra_preargs or []
+ compile_opts.append ('/c')
+ if debug:
+ compile_opts.extend(self.compile_options_debug)
+ else:
+ compile_opts.extend(self.compile_options)
+
+ for obj in objects:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ continue
+ if debug:
+ # pass the full pathname to MSVC in debug mode,
+ # this allows the debugger to find the source file
+ # without asking the user to browse for it
+ src = os.path.abspath(src)
+
+ if ext in self._c_extensions:
+ input_opt = "/Tc" + src
+ elif ext in self._cpp_extensions:
+ input_opt = "/Tp" + src
+ elif ext in self._rc_extensions:
+ # compile .RC to .RES file
+ input_opt = src
+ output_opt = "/fo" + obj
+ try:
+ self.spawn([self.rc] + pp_opts +
+ [output_opt] + [input_opt])
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ elif ext in self._mc_extensions:
+ # Compile .MC to .RC file to .RES file.
+ # * '-h dir' specifies the directory for the
+ # generated include file
+ # * '-r dir' specifies the target directory of the
+ # generated RC file and the binary message resource
+ # it includes
+ #
+ # For now (since there are no options to change this),
+ # we use the source-directory for the include file and
+ # the build directory for the RC file and message
+ # resources. This works at least for win32all.
+ h_dir = os.path.dirname(src)
+ rc_dir = os.path.dirname(obj)
+ try:
+ # first compile .MC to .RC and .H file
+ self.spawn([self.mc] +
+ ['-h', h_dir, '-r', rc_dir] + [src])
+ base, _ = os.path.splitext (os.path.basename (src))
+ rc_file = os.path.join (rc_dir, base + '.rc')
+ # then compile .RC to .RES file
+ self.spawn([self.rc] +
+ ["/fo" + obj] + [rc_file])
+
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+ continue
+ else:
+ # how to handle this file?
+ raise CompileError("Don't know how to compile %s to %s"
+ % (src, obj))
+
+ output_opt = "/Fo" + obj
+ try:
+ self.spawn([self.cc] + compile_opts + pp_opts +
+ [input_opt, output_opt] +
+ extra_postargs)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ return objects
+
+
+ def create_static_lib(self,
+ objects,
+ output_libname,
+ output_dir=None,
+ debug=0,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ (objects, output_dir) = self._fix_object_args(objects, output_dir)
+ output_filename = self.library_filename(output_libname,
+ output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ lib_args = objects + ['/OUT:' + output_filename]
+ if debug:
+ pass # XXX what goes here?
+ try:
+ self.spawn([self.lib] + lib_args)
+ except DistutilsExecError as msg:
+ raise LibError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+
+ def link(self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None):
+
+ if not self.initialized:
+ self.initialize()
+ (objects, output_dir) = self._fix_object_args(objects, output_dir)
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ (libraries, library_dirs, runtime_library_dirs) = fixed_args
+
+ if runtime_library_dirs:
+ self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ + str (runtime_library_dirs))
+
+ lib_opts = gen_lib_options(self,
+ library_dirs, runtime_library_dirs,
+ libraries)
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ if target_desc == CCompiler.EXECUTABLE:
+ if debug:
+ ldflags = self.ldflags_shared_debug[1:]
+ else:
+ ldflags = self.ldflags_shared[1:]
+ else:
+ if debug:
+ ldflags = self.ldflags_shared_debug
+ else:
+ ldflags = self.ldflags_shared
+
+ export_opts = []
+ for sym in (export_symbols or []):
+ export_opts.append("/EXPORT:" + sym)
+
+ ld_args = (ldflags + lib_opts + export_opts +
+ objects + ['/OUT:' + output_filename])
+
+ # The MSVC linker generates .lib and .exp files, which cannot be
+ # suppressed by any linker switches. The .lib files may even be
+ # needed! Make sure they are generated in the temporary build
+ # directory. Since they have different names for debug and release
+ # builds, they can go into the same directory.
+ if export_symbols is not None:
+ (dll_name, dll_ext) = os.path.splitext(
+ os.path.basename(output_filename))
+ implib_file = os.path.join(
+ os.path.dirname(objects[0]),
+ self.library_filename(dll_name))
+ ld_args.append ('/IMPLIB:' + implib_file)
+
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ self.spawn([self.linker] + ld_args)
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "/LIBPATH:" + dir
+
+ def runtime_library_dir_option(self, dir):
+ raise DistutilsPlatformError(
+ "don't know how to set runtime library search path for MSVC++")
+
+ def library_option(self, lib):
+ return self.library_filename(lib)
+
+
+ def find_library_file(self, dirs, lib, debug=0):
+ # Prefer a debugging library if found (and requested), but deal
+ # with it if we don't have one.
+ if debug:
+ try_names = [lib + "_d", lib]
+ else:
+ try_names = [lib]
+ for dir in dirs:
+ for name in try_names:
+ libfile = os.path.join(dir, self.library_filename (name))
+ if os.path.exists(libfile):
+ return libfile
+ else:
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
+
+ # Helper methods for using the MSVC registry settings
+
+ def find_exe(self, exe):
+ """Return path to an MSVC executable program.
+
+ Tries to find the program in several places: first, one of the
+ MSVC program search paths from the registry; next, the directories
+ in the PATH environment variable. If any of those work, return an
+ absolute path that is known to exist. If none of them work, just
+ return the original program name, 'exe'.
+ """
+ for p in self.__paths:
+ fn = os.path.join(os.path.abspath(p), exe)
+ if os.path.isfile(fn):
+ return fn
+
+ # didn't find it; try existing path
+ for p in os.environ['Path'].split(';'):
+ fn = os.path.join(os.path.abspath(p),exe)
+ if os.path.isfile(fn):
+ return fn
+
+ return exe
+
+ def get_msvc_paths(self, path, platform='x86'):
+ """Get a list of devstudio directories (include, lib or path).
+
+ Return a list of strings. The list will be empty if unable to
+ access the registry or appropriate registry keys not found.
+ """
+ if not _can_read_reg:
+ return []
+
+ path = path + " dirs"
+ if self.__version >= 7:
+ key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+ % (self.__root, self.__version))
+ else:
+ key = (r"%s\6.0\Build System\Components\Platforms"
+ r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+ for base in HKEYS:
+ d = read_values(base, key)
+ if d:
+ if self.__version >= 7:
+ return self.__macros.sub(d[path]).split(";")
+ else:
+ return d[path].split(";")
+ # MSVC 6 seems to create the registry entries we need only when
+ # the GUI is run.
+ if self.__version == 6:
+ for base in HKEYS:
+ if read_values(base, r"%s\6.0" % self.__root) is not None:
+ self.warn("It seems you have Visual Studio 6 installed, "
+ "but the expected registry settings are not present.\n"
+ "You must at least run the Visual Studio GUI once "
+ "so that these entries are created.")
+ break
+ return []
+
+ def set_path_env_var(self, name):
+ """Set environment variable 'name' to an MSVC path type value.
+
+ This is equivalent to a SET command prior to execution of spawned
+ commands.
+ """
+
+ if name == "lib":
+ p = self.get_msvc_paths("library")
+ else:
+ p = self.get_msvc_paths(name)
+ if p:
+ os.environ[name] = ';'.join(p)
+
+
+if get_build_version() >= 8.0:
+ log.debug("Importing new compiler from distutils.msvc9compiler")
+ OldMSVCCompiler = MSVCCompiler
+ from distutils.msvc9compiler import MSVCCompiler
+ # get_build_architecture not really relevant now we support cross-compile
+ from distutils.msvc9compiler import MacroExpander
diff --git a/setuptools/_distutils/py35compat.py b/setuptools/_distutils/py35compat.py
new file mode 100644
index 0000000..79b2e7f
--- /dev/null
+++ b/setuptools/_distutils/py35compat.py
@@ -0,0 +1,19 @@
+import sys
+import subprocess
+
+
+def __optim_args_from_interpreter_flags():
+ """Return a list of command-line arguments reproducing the current
+ optimization settings in sys.flags."""
+ args = []
+ value = sys.flags.optimize
+ if value > 0:
+ args.append("-" + "O" * value)
+ return args
+
+
+_optim_args_from_interpreter_flags = getattr(
+ subprocess,
+ "_optim_args_from_interpreter_flags",
+ __optim_args_from_interpreter_flags,
+)
diff --git a/setuptools/_distutils/py38compat.py b/setuptools/_distutils/py38compat.py
new file mode 100644
index 0000000..7dbe8ce
--- /dev/null
+++ b/setuptools/_distutils/py38compat.py
@@ -0,0 +1,7 @@
+def aix_platform(osname, version, release):
+ try:
+ import _aix_support
+ return _aix_support.aix_platform()
+ except ImportError:
+ pass
+ return "%s-%s.%s" % (osname, version, release)
diff --git a/setuptools/_distutils/spawn.py b/setuptools/_distutils/spawn.py
new file mode 100644
index 0000000..b2d10e3
--- /dev/null
+++ b/setuptools/_distutils/spawn.py
@@ -0,0 +1,106 @@
+"""distutils.spawn
+
+Provides the 'spawn()' function, a front-end to various platform-
+specific functions for launching another program in a sub-process.
+Also provides the 'find_executable()' to search the path for a given
+executable name.
+"""
+
+import sys
+import os
+import subprocess
+
+from distutils.errors import DistutilsExecError
+from distutils.debug import DEBUG
+from distutils import log
+
+
+def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):
+ """Run another program, specified as a command list 'cmd', in a new process.
+
+ 'cmd' is just the argument list for the new process, ie.
+ cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+ There is no way to run a program with a name different from that of its
+ executable.
+
+ If 'search_path' is true (the default), the system's executable
+ search path will be used to find the program; otherwise, cmd[0]
+ must be the exact path to the executable. If 'dry_run' is true,
+ the command will not actually be run.
+
+ Raise DistutilsExecError if running the program fails in any way; just
+ return on success.
+ """
+ # cmd is documented as a list, but just in case some code passes a tuple
+ # in, protect our %-formatting code against horrible death
+ cmd = list(cmd)
+
+ log.info(subprocess.list2cmdline(cmd))
+ if dry_run:
+ return
+
+ if search_path:
+ executable = find_executable(cmd[0])
+ if executable is not None:
+ cmd[0] = executable
+
+ env = env if env is not None else dict(os.environ)
+
+ if sys.platform == 'darwin':
+ from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
+ macosx_target_ver = get_macosx_target_ver()
+ if macosx_target_ver:
+ env[MACOSX_VERSION_VAR] = macosx_target_ver
+
+ try:
+ proc = subprocess.Popen(cmd, env=env)
+ proc.wait()
+ exitcode = proc.returncode
+ except OSError as exc:
+ if not DEBUG:
+ cmd = cmd[0]
+ raise DistutilsExecError(
+ "command %r failed: %s" % (cmd, exc.args[-1])) from exc
+
+ if exitcode:
+ if not DEBUG:
+ cmd = cmd[0]
+ raise DistutilsExecError(
+ "command %r failed with exit code %s" % (cmd, exitcode))
+
+
+def find_executable(executable, path=None):
+ """Tries to find 'executable' in the directories listed in 'path'.
+
+ A string listing directories separated by 'os.pathsep'; defaults to
+ os.environ['PATH']. Returns the complete filename or None if not found.
+ """
+ _, ext = os.path.splitext(executable)
+ if (sys.platform == 'win32') and (ext != '.exe'):
+ executable = executable + '.exe'
+
+ if os.path.isfile(executable):
+ return executable
+
+ if path is None:
+ path = os.environ.get('PATH', None)
+ if path is None:
+ try:
+ path = os.confstr("CS_PATH")
+ except (AttributeError, ValueError):
+ # os.confstr() or CS_PATH is not available
+ path = os.defpath
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
+ # set to an empty string
+
+ # PATH='' doesn't match, whereas PATH=':' looks in the current directory
+ if not path:
+ return None
+
+ paths = path.split(os.pathsep)
+ for p in paths:
+ f = os.path.join(p, executable)
+ if os.path.isfile(f):
+ # the file exists, we have a shot at spawn working
+ return f
+ return None
diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py
new file mode 100644
index 0000000..9fad383
--- /dev/null
+++ b/setuptools/_distutils/sysconfig.py
@@ -0,0 +1,469 @@
+"""Provide access to Python's configuration information. The specific
+configuration variables available depend heavily on the platform and
+configuration. The values may be retrieved using
+get_config_var(name), and the list of variables is available via
+get_config_vars().keys(). Additional convenience functions are also
+available.
+
+Written by: Fred L. Drake, Jr.
+Email: <fdrake@acm.org>
+"""
+
+import _imp
+import os
+import re
+import sys
+import sysconfig
+
+from .errors import DistutilsPlatformError
+
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
+# These are needed in a couple of spots, so just compute them once.
+PREFIX = os.path.normpath(sys.prefix)
+EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+BASE_PREFIX = os.path.normpath(sys.base_prefix)
+BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
+
+# Path to the base directory of the project. On Windows the binary may
+# live in project/PCbuild/win32 or project/PCbuild/amd64.
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+ project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
+else:
+ if sys.executable:
+ project_base = os.path.dirname(os.path.abspath(sys.executable))
+ else:
+ # sys.executable can be empty if argv[0] has been changed and Python is
+ # unable to retrieve the real program name
+ project_base = os.getcwd()
+
+
+# python_build: (Boolean) if true, we're either building Python or
+# building an extension with an un-installed Python, so we use
+# different (hard-wired) directories.
+def _is_python_source_dir(d):
+ for fn in ("Setup", "Setup.local"):
+ if os.path.isfile(os.path.join(d, "Modules", fn)):
+ return True
+ return False
+
+_sys_home = getattr(sys, '_home', None)
+
+if os.name == 'nt':
+ def _fix_pcbuild(d):
+ if d and os.path.normcase(d).startswith(
+ os.path.normcase(os.path.join(PREFIX, "PCbuild"))):
+ return PREFIX
+ return d
+ project_base = _fix_pcbuild(project_base)
+ _sys_home = _fix_pcbuild(_sys_home)
+
+def _python_build():
+ if _sys_home:
+ return _is_python_source_dir(_sys_home)
+ return _is_python_source_dir(project_base)
+
+python_build = _python_build()
+
+
+# Calculate the build qualifier flags if they are defined. Adding the flags
+# to the include and lib directories only makes sense for an installation, not
+# an in-source build.
+build_flags = ''
+try:
+ if not python_build:
+ build_flags = sys.abiflags
+except AttributeError:
+ # It's not a configure-based build, so the sys module doesn't have
+ # this attribute, which is fine.
+ pass
+
+def get_python_version():
+ """Return a string containing the major and minor Python version,
+ leaving off the patchlevel. Sample return values could be '1.5'
+ or '2.2'.
+ """
+ return '%d.%d' % sys.version_info[:2]
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+ """Return the directory containing installed Python header files.
+
+ If 'plat_specific' is false (the default), this is the path to the
+ non-platform-specific header files, i.e. Python.h and so on;
+ otherwise, this is the path to platform-specific header files
+ (namely pyconfig.h).
+
+ If 'prefix' is supplied, use it instead of sys.base_prefix or
+ sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+ """
+ if prefix is None:
+ prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+ if os.name == "posix":
+ if IS_PYPY and sys.version_info < (3, 8):
+ return os.path.join(prefix, 'include')
+ if python_build:
+ # Assume the executable is in the build directory. The
+ # pyconfig.h file should be in the same directory. Since
+ # the build directory may not be the source directory, we
+ # must use "srcdir" from the makefile to find the "Include"
+ # directory.
+ if plat_specific:
+ return _sys_home or project_base
+ else:
+ incdir = os.path.join(get_config_var('srcdir'), 'Include')
+ return os.path.normpath(incdir)
+ implementation = 'pypy' if IS_PYPY else 'python'
+ python_dir = implementation + get_python_version() + build_flags
+ return os.path.join(prefix, "include", python_dir)
+ elif os.name == "nt":
+ if python_build:
+ # Include both the include and PC dir to ensure we can find
+ # pyconfig.h
+ return (os.path.join(prefix, "include") + os.path.pathsep +
+ os.path.join(prefix, "PC"))
+ return os.path.join(prefix, "include")
+ else:
+ raise DistutilsPlatformError(
+ "I don't know where Python installs its C header files "
+ "on platform '%s'" % os.name)
+
+
+# allow this behavior to be monkey-patched. Ref pypa/distutils#2.
+def _posix_lib(standard_lib, libpython, early_prefix, prefix):
+ if standard_lib:
+ return libpython
+ else:
+ return os.path.join(libpython, "site-packages")
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+ """Return the directory containing the Python library (standard or
+ site additions).
+
+ If 'plat_specific' is true, return the directory containing
+ platform-specific modules, i.e. any module from a non-pure-Python
+ module distribution; otherwise, return the platform-shared library
+ directory. If 'standard_lib' is true, return the directory
+ containing standard Python library modules; otherwise, return the
+ directory for site-specific modules.
+
+ If 'prefix' is supplied, use it instead of sys.base_prefix or
+ sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+ """
+
+ if IS_PYPY and sys.version_info < (3, 8):
+ # PyPy-specific schema
+ if prefix is None:
+ prefix = PREFIX
+ if standard_lib:
+ return os.path.join(prefix, "lib-python", sys.version[0])
+ return os.path.join(prefix, 'site-packages')
+
+ early_prefix = prefix
+
+ if prefix is None:
+ if standard_lib:
+ prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+ else:
+ prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+ if os.name == "posix":
+ if plat_specific or standard_lib:
+ # Platform-specific modules (any module from a non-pure-Python
+ # module distribution) or standard Python library modules.
+ libdir = getattr(sys, "platlibdir", "lib")
+ else:
+ # Pure Python
+ libdir = "lib"
+ implementation = 'pypy' if IS_PYPY else 'python'
+ libpython = os.path.join(prefix, libdir,
+ implementation + get_python_version())
+ return _posix_lib(standard_lib, libpython, early_prefix, prefix)
+ elif os.name == "nt":
+ if standard_lib:
+ return os.path.join(prefix, "Lib")
+ else:
+ return os.path.join(prefix, "Lib", "site-packages")
+ else:
+ raise DistutilsPlatformError(
+ "I don't know where Python installs its library "
+ "on platform '%s'" % os.name)
+
+
+
+def customize_compiler(compiler):
+ """Do any platform-specific customization of a CCompiler instance.
+
+ Mainly needed on Unix, so we can plug in the information that
+ varies across Unices and is stored in Python's Makefile.
+ """
+ if compiler.compiler_type == "unix":
+ if sys.platform == "darwin":
+ # Perform first-time customization of compiler-related
+ # config vars on OS X now that we know we need a compiler.
+ # This is primarily to support Pythons from binary
+ # installers. The kind and paths to build tools on
+ # the user system may vary significantly from the system
+ # that Python itself was built on. Also the user OS
+ # version and build tools may not support the same set
+ # of CPU architectures for universal builds.
+ global _config_vars
+ # Use get_config_var() to ensure _config_vars is initialized.
+ if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
+ import _osx_support
+ _osx_support.customize_compiler(_config_vars)
+ _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+
+ (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+ get_config_vars('CC', 'CXX', 'CFLAGS',
+ 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
+
+ if 'CC' in os.environ:
+ newcc = os.environ['CC']
+ if('LDSHARED' not in os.environ
+ and ldshared.startswith(cc)):
+ # If CC is overridden, use that as the default
+ # command for LDSHARED as well
+ ldshared = newcc + ldshared[len(cc):]
+ cc = newcc
+ if 'CXX' in os.environ:
+ cxx = os.environ['CXX']
+ if 'LDSHARED' in os.environ:
+ ldshared = os.environ['LDSHARED']
+ if 'CPP' in os.environ:
+ cpp = os.environ['CPP']
+ else:
+ cpp = cc + " -E" # not always
+ if 'LDFLAGS' in os.environ:
+ ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+ if 'CFLAGS' in os.environ:
+ cflags = cflags + ' ' + os.environ['CFLAGS']
+ ldshared = ldshared + ' ' + os.environ['CFLAGS']
+ if 'CPPFLAGS' in os.environ:
+ cpp = cpp + ' ' + os.environ['CPPFLAGS']
+ cflags = cflags + ' ' + os.environ['CPPFLAGS']
+ ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+ if 'AR' in os.environ:
+ ar = os.environ['AR']
+ if 'ARFLAGS' in os.environ:
+ archiver = ar + ' ' + os.environ['ARFLAGS']
+ else:
+ archiver = ar + ' ' + ar_flags
+
+ cc_cmd = cc + ' ' + cflags
+ compiler.set_executables(
+ preprocessor=cpp,
+ compiler=cc_cmd,
+ compiler_so=cc_cmd + ' ' + ccshared,
+ compiler_cxx=cxx,
+ linker_so=ldshared,
+ linker_exe=cc,
+ archiver=archiver)
+
+ if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None):
+ compiler.set_executables(ranlib=os.environ['RANLIB'])
+
+ compiler.shared_lib_extension = shlib_suffix
+
+
+def get_config_h_filename():
+ """Return full pathname of installed pyconfig.h file."""
+ if python_build:
+ if os.name == "nt":
+ inc_dir = os.path.join(_sys_home or project_base, "PC")
+ else:
+ inc_dir = _sys_home or project_base
+ return os.path.join(inc_dir, 'pyconfig.h')
+ else:
+ return sysconfig.get_config_h_filename()
+
+
+
+def get_makefile_filename():
+ """Return full pathname of installed Makefile from the Python build."""
+ return sysconfig.get_makefile_filename()
+
+
+def parse_config_h(fp, g=None):
+ """Parse a config.h-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ return sysconfig.parse_config_h(fp, vars=g)
+
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+def parse_makefile(fn, g=None):
+ """Parse a Makefile-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ from distutils.text_file import TextFile
+ fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
+
+ if g is None:
+ g = {}
+ done = {}
+ notdone = {}
+
+ while True:
+ line = fp.readline()
+ if line is None: # eof
+ break
+ m = _variable_rx.match(line)
+ if m:
+ n, v = m.group(1, 2)
+ v = v.strip()
+ # `$$' is a literal `$' in make
+ tmpv = v.replace('$$', '')
+
+ if "$" in tmpv:
+ notdone[n] = v
+ else:
+ try:
+ v = int(v)
+ except ValueError:
+ # insert literal `$'
+ done[n] = v.replace('$$', '$')
+ else:
+ done[n] = v
+
+ # Variables with a 'PY_' prefix in the makefile. These need to
+ # be made available without that prefix through sysconfig.
+ # Special care is needed to ensure that variable expansion works, even
+ # if the expansion uses the name without a prefix.
+ renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+ # do variable interpolation here
+ while notdone:
+ for name in list(notdone):
+ value = notdone[name]
+ m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+ if m:
+ n = m.group(1)
+ found = True
+ if n in done:
+ item = str(done[n])
+ elif n in notdone:
+ # get it on a subsequent round
+ found = False
+ elif n in os.environ:
+ # do it like make: fall back to environment
+ item = os.environ[n]
+
+ elif n in renamed_variables:
+ if name.startswith('PY_') and name[3:] in renamed_variables:
+ item = ""
+
+ elif 'PY_' + n in notdone:
+ found = False
+
+ else:
+ item = str(done['PY_' + n])
+ else:
+ done[n] = item = ""
+ if found:
+ after = value[m.end():]
+ value = value[:m.start()] + item + after
+ if "$" in after:
+ notdone[name] = value
+ else:
+ try: value = int(value)
+ except ValueError:
+ done[name] = value.strip()
+ else:
+ done[name] = value
+ del notdone[name]
+
+ if name.startswith('PY_') \
+ and name[3:] in renamed_variables:
+
+ name = name[3:]
+ if name not in done:
+ done[name] = value
+ else:
+ # bogus variable reference; just drop it since we can't deal
+ del notdone[name]
+
+ fp.close()
+
+ # strip spurious spaces
+ for k, v in done.items():
+ if isinstance(v, str):
+ done[k] = v.strip()
+
+ # save the results in the global dictionary
+ g.update(done)
+ return g
+
+
+def expand_makefile_vars(s, vars):
+ """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+ 'string' according to 'vars' (a dictionary mapping variable names to
+ values). Variables not present in 'vars' are silently expanded to the
+ empty string. The variable values in 'vars' should not contain further
+ variable expansions; if 'vars' is the output of 'parse_makefile()',
+ you're fine. Returns a variable-expanded version of 's'.
+ """
+
+ # This algorithm does multiple expansion, so if vars['foo'] contains
+ # "${bar}", it will expand ${foo} to ${bar}, and then expand
+ # ${bar}... and so forth. This is fine as long as 'vars' comes from
+ # 'parse_makefile()', which takes care of such expansions eagerly,
+ # according to make's variable expansion semantics.
+
+ while True:
+ m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
+ if m:
+ (beg, end) = m.span()
+ s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+ else:
+ break
+ return s
+
+
+_config_vars = None
+
+
+def get_config_vars(*args):
+ """With no arguments, return a dictionary of all configuration
+ variables relevant for the current platform. Generally this includes
+ everything needed to build extensions and install both pure modules and
+ extensions. On Unix, this means every variable defined in Python's
+ installed Makefile; on Windows it's a much smaller set.
+
+ With arguments, return a list of values that result from looking up
+ each argument in the configuration variable dictionary.
+ """
+ global _config_vars
+ if _config_vars is None:
+ _config_vars = sysconfig.get_config_vars().copy()
+
+ if args:
+ vals = []
+ for name in args:
+ vals.append(_config_vars.get(name))
+ return vals
+ else:
+ return _config_vars
+
+def get_config_var(name):
+ """Return the value of a single variable using the dictionary
+ returned by 'get_config_vars()'. Equivalent to
+ get_config_vars().get(name)
+ """
+ if name == 'SO':
+ import warnings
+ warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
+ return get_config_vars().get(name)
diff --git a/setuptools/_distutils/tests/__init__.py b/setuptools/_distutils/tests/__init__.py
new file mode 100644
index 0000000..c7dcc7e
--- /dev/null
+++ b/setuptools/_distutils/tests/__init__.py
@@ -0,0 +1,42 @@
+"""Test suite for distutils.
+
+This test suite consists of a collection of test modules in the
+distutils.tests package. Each test module has a name starting with
+'test' and contains a function test_suite(). The function is expected
+to return an initialized unittest.TestSuite instance.
+
+Tests for the command classes in the distutils.command package are
+included in distutils.tests as well, instead of using a separate
+distutils.command.tests package, since command identification is done
+by import rather than matching pre-defined names.
+
+"""
+
+import os
+import sys
+import unittest
+from test.support import run_unittest
+
+from .py38compat import save_restore_warnings_filters
+
+
+here = os.path.dirname(__file__) or os.curdir
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ for fn in os.listdir(here):
+ if fn.startswith("test") and fn.endswith(".py"):
+ modname = "distutils.tests." + fn[:-3]
+ # bpo-40055: Save/restore warnings filters to leave them unchanged.
+ # Importing tests imports docutils which imports pkg_resources
+ # which adds a warnings filter.
+ with save_restore_warnings_filters():
+ __import__(modname)
+ module = sys.modules[modname]
+ suite.addTest(module.test_suite())
+ return suite
+
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/py35compat.py b/setuptools/_distutils/tests/py35compat.py
new file mode 100644
index 0000000..0c75526
--- /dev/null
+++ b/setuptools/_distutils/tests/py35compat.py
@@ -0,0 +1,77 @@
+"""
+Backward compatibility support for Python 3.5
+"""
+
+import sys
+import test.support
+import subprocess
+
+
+# copied from Python 3.9 test.support module
+def _missing_compiler_executable(cmd_names=[]):
+ """Check if the compiler components used to build the interpreter exist.
+
+ Check for the existence of the compiler executables whose names are listed
+ in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
+ and return the first missing executable or None when none is found
+ missing.
+
+ """
+ from distutils import ccompiler, sysconfig, spawn
+ compiler = ccompiler.new_compiler()
+ sysconfig.customize_compiler(compiler)
+ for name in compiler.executables:
+ if cmd_names and name not in cmd_names:
+ continue
+ cmd = getattr(compiler, name)
+ if cmd_names:
+ assert cmd is not None, \
+ "the '%s' executable is not configured" % name
+ elif not cmd:
+ continue
+ if spawn.find_executable(cmd[0]) is None:
+ return cmd[0]
+
+
+missing_compiler_executable = vars(test.support).setdefault(
+ 'missing_compiler_executable',
+ _missing_compiler_executable,
+)
+
+
+try:
+ from test.support import unix_shell
+except ImportError:
+ # Adapted from Python 3.9 test.support module
+ is_android = hasattr(sys, 'getandroidapilevel')
+ unix_shell = (
+ None if sys.platform == 'win32' else
+ '/system/bin/sh' if is_android else
+ '/bin/sh'
+ )
+
+
+# copied from Python 3.9 subprocess module
+def _optim_args_from_interpreter_flags():
+ """Return a list of command-line arguments reproducing the current
+ optimization settings in sys.flags."""
+ args = []
+ value = sys.flags.optimize
+ if value > 0:
+ args.append('-' + 'O' * value)
+ return args
+
+
+vars(subprocess).setdefault(
+ '_optim_args_from_interpreter_flags',
+ _optim_args_from_interpreter_flags,
+)
+
+
+def adapt_glob(regex):
+ """
+ Supply legacy expectation on Python 3.5
+ """
+ if sys.version_info > (3, 6):
+ return regex
+ return regex.replace('(?s:', '').replace(r')\Z', r'\Z(?ms)')
diff --git a/setuptools/_distutils/tests/py38compat.py b/setuptools/_distutils/tests/py38compat.py
new file mode 100644
index 0000000..c949f58
--- /dev/null
+++ b/setuptools/_distutils/tests/py38compat.py
@@ -0,0 +1,62 @@
+# flake8: noqa
+
+import contextlib
+import builtins
+import sys
+
+from test.support import requires_zlib
+import test.support
+
+
+ModuleNotFoundError = getattr(builtins, 'ModuleNotFoundError', ImportError)
+
+try:
+ from test.support.warnings_helper import check_warnings
+except (ModuleNotFoundError, ImportError):
+ from test.support import check_warnings
+
+
+try:
+ from test.support.os_helper import (
+ change_cwd,
+ rmtree,
+ EnvironmentVarGuard,
+ TESTFN,
+ unlink,
+ skip_unless_symlink,
+ temp_dir,
+ create_empty_file,
+ temp_cwd,
+ )
+except (ModuleNotFoundError, ImportError):
+ from test.support import (
+ change_cwd,
+ rmtree,
+ EnvironmentVarGuard,
+ TESTFN,
+ unlink,
+ skip_unless_symlink,
+ temp_dir,
+ create_empty_file,
+ temp_cwd,
+ )
+
+
+# From Python 3.9
+@contextlib.contextmanager
+def _save_restore_warnings_filters():
+ old_filters = warnings.filters[:]
+ try:
+ yield
+ finally:
+ warnings.filters[:] = old_filters
+
+
+try:
+ from test.support.warnings_helper import save_restore_warnings_filters
+except (ModuleNotFoundError, ImportError):
+ save_restore_warnings_filters = _save_restore_warnings_filters
+
+
+if sys.version_info < (3, 9):
+ requires_zlib = lambda: test.support.requires_zlib
diff --git a/setuptools/_distutils/tests/support.py b/setuptools/_distutils/tests/support.py
new file mode 100644
index 0000000..b4410fc
--- /dev/null
+++ b/setuptools/_distutils/tests/support.py
@@ -0,0 +1,210 @@
+"""Support code for distutils test cases."""
+import os
+import sys
+import shutil
+import tempfile
+import unittest
+import sysconfig
+from copy import deepcopy
+
+from . import py38compat as os_helper
+
+from distutils import log
+from distutils.log import DEBUG, INFO, WARN, ERROR, FATAL
+from distutils.core import Distribution
+
+
+class LoggingSilencer(object):
+
+ def setUp(self):
+ super().setUp()
+ self.threshold = log.set_threshold(log.FATAL)
+ # catching warnings
+ # when log will be replaced by logging
+ # we won't need such monkey-patch anymore
+ self._old_log = log.Log._log
+ log.Log._log = self._log
+ self.logs = []
+
+ def tearDown(self):
+ log.set_threshold(self.threshold)
+ log.Log._log = self._old_log
+ super().tearDown()
+
+ def _log(self, level, msg, args):
+ if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
+ raise ValueError('%s wrong log level' % str(level))
+ if not isinstance(msg, str):
+ raise TypeError("msg should be str, not '%.200s'"
+ % (type(msg).__name__))
+ self.logs.append((level, msg, args))
+
+ def get_logs(self, *levels):
+ return [msg % args for level, msg, args
+ in self.logs if level in levels]
+
+ def clear_logs(self):
+ self.logs = []
+
+
+class TempdirManager(object):
+ """Mix-in class that handles temporary directories for test cases.
+
+ This is intended to be used with unittest.TestCase.
+ """
+
+ def setUp(self):
+ super().setUp()
+ self.old_cwd = os.getcwd()
+ self.tempdirs = []
+
+ def tearDown(self):
+ # Restore working dir, for Solaris and derivatives, where rmdir()
+ # on the current directory fails.
+ os.chdir(self.old_cwd)
+ super().tearDown()
+ while self.tempdirs:
+ tmpdir = self.tempdirs.pop()
+ os_helper.rmtree(tmpdir)
+
+ def mkdtemp(self):
+ """Create a temporary directory that will be cleaned up.
+
+ Returns the path of the directory.
+ """
+ d = tempfile.mkdtemp()
+ self.tempdirs.append(d)
+ return d
+
+ def write_file(self, path, content='xxx'):
+ """Writes a file in the given path.
+
+
+ path can be a string or a sequence.
+ """
+ if isinstance(path, (list, tuple)):
+ path = os.path.join(*path)
+ f = open(path, 'w')
+ try:
+ f.write(content)
+ finally:
+ f.close()
+
+ def create_dist(self, pkg_name='foo', **kw):
+ """Will generate a test environment.
+
+ This function creates:
+ - a Distribution instance using keywords
+ - a temporary directory with a package structure
+
+ It returns the package directory and the distribution
+ instance.
+ """
+ tmp_dir = self.mkdtemp()
+ pkg_dir = os.path.join(tmp_dir, pkg_name)
+ os.mkdir(pkg_dir)
+ dist = Distribution(attrs=kw)
+
+ return pkg_dir, dist
+
+
+class DummyCommand:
+ """Class to store options for retrieval via set_undefined_options()."""
+
+ def __init__(self, **kwargs):
+ for kw, val in kwargs.items():
+ setattr(self, kw, val)
+
+ def ensure_finalized(self):
+ pass
+
+
+class EnvironGuard(object):
+
+ def setUp(self):
+ super(EnvironGuard, self).setUp()
+ self.old_environ = deepcopy(os.environ)
+
+ def tearDown(self):
+ for key, value in self.old_environ.items():
+ if os.environ.get(key) != value:
+ os.environ[key] = value
+
+ for key in tuple(os.environ.keys()):
+ if key not in self.old_environ:
+ del os.environ[key]
+
+ super(EnvironGuard, self).tearDown()
+
+
+def copy_xxmodule_c(directory):
+ """Helper for tests that need the xxmodule.c source file.
+
+ Example use:
+
+ def test_compile(self):
+ copy_xxmodule_c(self.tmpdir)
+ self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
+
+ If the source file can be found, it will be copied to *directory*. If not,
+ the test will be skipped. Errors during copy are not caught.
+ """
+ filename = _get_xxmodule_path()
+ if filename is None:
+ raise unittest.SkipTest('cannot find xxmodule.c (test must run in '
+ 'the python build dir)')
+ shutil.copy(filename, directory)
+
+
+def _get_xxmodule_path():
+ srcdir = sysconfig.get_config_var('srcdir')
+ candidates = [
+ # use installed copy if available
+ os.path.join(os.path.dirname(__file__), 'xxmodule.c'),
+ # otherwise try using copy from build directory
+ os.path.join(srcdir, 'Modules', 'xxmodule.c'),
+ # srcdir mysteriously can be $srcdir/Lib/distutils/tests when
+ # this file is run from its parent directory, so walk up the
+ # tree to find the real srcdir
+ os.path.join(srcdir, '..', '..', '..', 'Modules', 'xxmodule.c'),
+ ]
+ for path in candidates:
+ if os.path.exists(path):
+ return path
+
+
+def fixup_build_ext(cmd):
+ """Function needed to make build_ext tests pass.
+
+ When Python was built with --enable-shared on Unix, -L. is not enough to
+ find libpython<blah>.so, because regrtest runs in a tempdir, not in the
+ source directory where the .so lives.
+
+ When Python was built with in debug mode on Windows, build_ext commands
+ need their debug attribute set, and it is not done automatically for
+ some reason.
+
+ This function handles both of these things. Example use:
+
+ cmd = build_ext(dist)
+ support.fixup_build_ext(cmd)
+ cmd.ensure_finalized()
+
+ Unlike most other Unix platforms, Mac OS X embeds absolute paths
+ to shared libraries into executables, so the fixup is not needed there.
+ """
+ if os.name == 'nt':
+ cmd.debug = sys.executable.endswith('_d.exe')
+ elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
+ # To further add to the shared builds fun on Unix, we can't just add
+ # library_dirs to the Extension() instance because that doesn't get
+ # plumbed through to the final compiler command.
+ runshared = sysconfig.get_config_var('RUNSHARED')
+ if runshared is None:
+ cmd.library_dirs = ['.']
+ else:
+ if sys.platform == 'darwin':
+ cmd.library_dirs = []
+ else:
+ name, equals, value = runshared.partition('=')
+ cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
diff --git a/setuptools/_distutils/tests/test_archive_util.py b/setuptools/_distutils/tests/test_archive_util.py
new file mode 100644
index 0000000..800b901
--- /dev/null
+++ b/setuptools/_distutils/tests/test_archive_util.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+"""Tests for distutils.archive_util."""
+import unittest
+import os
+import sys
+import tarfile
+from os.path import splitdrive
+import warnings
+
+from distutils import archive_util
+from distutils.archive_util import (check_archive_formats, make_tarball,
+ make_zipfile, make_archive,
+ ARCHIVE_FORMATS)
+from distutils.spawn import find_executable, spawn
+from distutils.tests import support
+from test.support import run_unittest, patch
+from .unix_compat import require_unix_id, require_uid_0, grp, pwd, UID_0_SUPPORT
+
+from .py38compat import change_cwd
+from .py38compat import check_warnings
+
+
+try:
+ import zipfile
+ ZIP_SUPPORT = True
+except ImportError:
+ ZIP_SUPPORT = find_executable('zip')
+
+try:
+ import zlib
+ ZLIB_SUPPORT = True
+except ImportError:
+ ZLIB_SUPPORT = False
+
+try:
+ import bz2
+except ImportError:
+ bz2 = None
+
+try:
+ import lzma
+except ImportError:
+ lzma = None
+
+def can_fs_encode(filename):
+ """
+ Return True if the filename can be saved in the file system.
+ """
+ if os.path.supports_unicode_filenames:
+ return True
+ try:
+ filename.encode(sys.getfilesystemencoding())
+ except UnicodeEncodeError:
+ return False
+ return True
+
+
+class ArchiveUtilTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_make_tarball(self, name='archive'):
+ # creating something to tar
+ tmpdir = self._create_files()
+ self._make_tarball(tmpdir, name, '.tar.gz')
+ # trying an uncompressed one
+ self._make_tarball(tmpdir, name, '.tar', compress=None)
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_make_tarball_gzip(self):
+ tmpdir = self._create_files()
+ self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
+
+ @unittest.skipUnless(bz2, 'Need bz2 support to run')
+ def test_make_tarball_bzip2(self):
+ tmpdir = self._create_files()
+ self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
+
+ @unittest.skipUnless(lzma, 'Need lzma support to run')
+ def test_make_tarball_xz(self):
+ tmpdir = self._create_files()
+ self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
+
+ @unittest.skipUnless(can_fs_encode('årchiv'),
+ 'File system cannot handle this filename')
+ def test_make_tarball_latin1(self):
+ """
+ Mirror test_make_tarball, except filename contains latin characters.
+ """
+ self.test_make_tarball('årchiv') # note this isn't a real word
+
+ @unittest.skipUnless(can_fs_encode('のアーカイブ'),
+ 'File system cannot handle this filename')
+ def test_make_tarball_extended(self):
+ """
+ Mirror test_make_tarball, except filename contains extended
+ characters outside the latin charset.
+ """
+ self.test_make_tarball('のアーカイブ') # japanese for archive
+
+ def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
+ tmpdir2 = self.mkdtemp()
+ unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
+ "source and target should be on same drive")
+
+ base_name = os.path.join(tmpdir2, target_name)
+
+ # working with relative paths to avoid tar warnings
+ with change_cwd(tmpdir):
+ make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
+
+ # check if the compressed tarball was created
+ tarball = base_name + suffix
+ self.assertTrue(os.path.exists(tarball))
+ self.assertEqual(self._tarinfo(tarball), self._created_files)
+
+ def _tarinfo(self, path):
+ tar = tarfile.open(path)
+ try:
+ names = tar.getnames()
+ names.sort()
+ return names
+ finally:
+ tar.close()
+
+ _zip_created_files = ['dist/', 'dist/file1', 'dist/file2',
+ 'dist/sub/', 'dist/sub/file3', 'dist/sub2/']
+ _created_files = [p.rstrip('/') for p in _zip_created_files]
+
+ def _create_files(self):
+ # creating something to tar
+ tmpdir = self.mkdtemp()
+ dist = os.path.join(tmpdir, 'dist')
+ os.mkdir(dist)
+ self.write_file([dist, 'file1'], 'xxx')
+ self.write_file([dist, 'file2'], 'xxx')
+ os.mkdir(os.path.join(dist, 'sub'))
+ self.write_file([dist, 'sub', 'file3'], 'xxx')
+ os.mkdir(os.path.join(dist, 'sub2'))
+ return tmpdir
+
+ @unittest.skipUnless(find_executable('tar') and find_executable('gzip')
+ and ZLIB_SUPPORT,
+ 'Need the tar, gzip and zlib command to run')
+ def test_tarfile_vs_tar(self):
+ tmpdir = self._create_files()
+ tmpdir2 = self.mkdtemp()
+ base_name = os.path.join(tmpdir2, 'archive')
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ make_tarball(base_name, 'dist')
+ finally:
+ os.chdir(old_dir)
+
+ # check if the compressed tarball was created
+ tarball = base_name + '.tar.gz'
+ self.assertTrue(os.path.exists(tarball))
+
+ # now create another tarball using `tar`
+ tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
+ tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
+ gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ spawn(tar_cmd)
+ spawn(gzip_cmd)
+ finally:
+ os.chdir(old_dir)
+
+ self.assertTrue(os.path.exists(tarball2))
+ # let's compare both tarballs
+ self.assertEqual(self._tarinfo(tarball), self._created_files)
+ self.assertEqual(self._tarinfo(tarball2), self._created_files)
+
+ # trying an uncompressed one
+ base_name = os.path.join(tmpdir2, 'archive')
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ make_tarball(base_name, 'dist', compress=None)
+ finally:
+ os.chdir(old_dir)
+ tarball = base_name + '.tar'
+ self.assertTrue(os.path.exists(tarball))
+
+ # now for a dry_run
+ base_name = os.path.join(tmpdir2, 'archive')
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ make_tarball(base_name, 'dist', compress=None, dry_run=True)
+ finally:
+ os.chdir(old_dir)
+ tarball = base_name + '.tar'
+ self.assertTrue(os.path.exists(tarball))
+
+ @unittest.skipUnless(find_executable('compress'),
+ 'The compress program is required')
+ def test_compress_deprecated(self):
+ tmpdir = self._create_files()
+ base_name = os.path.join(self.mkdtemp(), 'archive')
+
+ # using compress and testing the PendingDeprecationWarning
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ with check_warnings() as w:
+ warnings.simplefilter("always")
+ make_tarball(base_name, 'dist', compress='compress')
+ finally:
+ os.chdir(old_dir)
+ tarball = base_name + '.tar.Z'
+ self.assertTrue(os.path.exists(tarball))
+ self.assertEqual(len(w.warnings), 1)
+
+ # same test with dry_run
+ os.remove(tarball)
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ try:
+ with check_warnings() as w:
+ warnings.simplefilter("always")
+ make_tarball(base_name, 'dist', compress='compress',
+ dry_run=True)
+ finally:
+ os.chdir(old_dir)
+ self.assertFalse(os.path.exists(tarball))
+ self.assertEqual(len(w.warnings), 1)
+
+ @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT,
+ 'Need zip and zlib support to run')
+ def test_make_zipfile(self):
+ # creating something to tar
+ tmpdir = self._create_files()
+ base_name = os.path.join(self.mkdtemp(), 'archive')
+ with change_cwd(tmpdir):
+ make_zipfile(base_name, 'dist')
+
+ # check if the compressed tarball was created
+ tarball = base_name + '.zip'
+ self.assertTrue(os.path.exists(tarball))
+ with zipfile.ZipFile(tarball) as zf:
+ self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
+
+ @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
+ def test_make_zipfile_no_zlib(self):
+ patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
+
+ called = []
+ zipfile_class = zipfile.ZipFile
+ def fake_zipfile(*a, **kw):
+ if kw.get('compression', None) == zipfile.ZIP_STORED:
+ called.append((a, kw))
+ return zipfile_class(*a, **kw)
+
+ patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
+
+ # create something to tar and compress
+ tmpdir = self._create_files()
+ base_name = os.path.join(self.mkdtemp(), 'archive')
+ with change_cwd(tmpdir):
+ make_zipfile(base_name, 'dist')
+
+ tarball = base_name + '.zip'
+ self.assertEqual(called,
+ [((tarball, "w"), {'compression': zipfile.ZIP_STORED})])
+ self.assertTrue(os.path.exists(tarball))
+ with zipfile.ZipFile(tarball) as zf:
+ self.assertEqual(sorted(zf.namelist()), self._zip_created_files)
+
+ def test_check_archive_formats(self):
+ self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
+ 'xxx')
+ self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar',
+ 'ztar', 'tar', 'zip']))
+
+ def test_make_archive(self):
+ tmpdir = self.mkdtemp()
+ base_name = os.path.join(tmpdir, 'archive')
+ self.assertRaises(ValueError, make_archive, base_name, 'xxx')
+
+ def test_make_archive_cwd(self):
+ current_dir = os.getcwd()
+ def _breaks(*args, **kw):
+ raise RuntimeError()
+ ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
+ try:
+ try:
+ make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
+ except:
+ pass
+ self.assertEqual(os.getcwd(), current_dir)
+ finally:
+ del ARCHIVE_FORMATS['xxx']
+
+ def test_make_archive_tar(self):
+ base_dir = self._create_files()
+ base_name = os.path.join(self.mkdtemp() , 'archive')
+ res = make_archive(base_name, 'tar', base_dir, 'dist')
+ self.assertTrue(os.path.exists(res))
+ self.assertEqual(os.path.basename(res), 'archive.tar')
+ self.assertEqual(self._tarinfo(res), self._created_files)
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_make_archive_gztar(self):
+ base_dir = self._create_files()
+ base_name = os.path.join(self.mkdtemp() , 'archive')
+ res = make_archive(base_name, 'gztar', base_dir, 'dist')
+ self.assertTrue(os.path.exists(res))
+ self.assertEqual(os.path.basename(res), 'archive.tar.gz')
+ self.assertEqual(self._tarinfo(res), self._created_files)
+
+ @unittest.skipUnless(bz2, 'Need bz2 support to run')
+ def test_make_archive_bztar(self):
+ base_dir = self._create_files()
+ base_name = os.path.join(self.mkdtemp() , 'archive')
+ res = make_archive(base_name, 'bztar', base_dir, 'dist')
+ self.assertTrue(os.path.exists(res))
+ self.assertEqual(os.path.basename(res), 'archive.tar.bz2')
+ self.assertEqual(self._tarinfo(res), self._created_files)
+
+ @unittest.skipUnless(lzma, 'Need xz support to run')
+ def test_make_archive_xztar(self):
+ base_dir = self._create_files()
+ base_name = os.path.join(self.mkdtemp() , 'archive')
+ res = make_archive(base_name, 'xztar', base_dir, 'dist')
+ self.assertTrue(os.path.exists(res))
+ self.assertEqual(os.path.basename(res), 'archive.tar.xz')
+ self.assertEqual(self._tarinfo(res), self._created_files)
+
+ def test_make_archive_owner_group(self):
+ # testing make_archive with owner and group, with various combinations
+ # this works even if there's not gid/uid support
+ if UID_0_SUPPORT:
+ group = grp.getgrgid(0)[0]
+ owner = pwd.getpwuid(0)[0]
+ else:
+ group = owner = 'root'
+
+ base_dir = self._create_files()
+ root_dir = self.mkdtemp()
+ base_name = os.path.join(self.mkdtemp() , 'archive')
+ res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
+ group=group)
+ self.assertTrue(os.path.exists(res))
+
+ res = make_archive(base_name, 'zip', root_dir, base_dir)
+ self.assertTrue(os.path.exists(res))
+
+ res = make_archive(base_name, 'tar', root_dir, base_dir,
+ owner=owner, group=group)
+ self.assertTrue(os.path.exists(res))
+
+ res = make_archive(base_name, 'tar', root_dir, base_dir,
+ owner='kjhkjhkjg', group='oihohoh')
+ self.assertTrue(os.path.exists(res))
+
+ @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib")
+ @require_unix_id
+ @require_uid_0
+ def test_tarfile_root_owner(self):
+ tmpdir = self._create_files()
+ base_name = os.path.join(self.mkdtemp(), 'archive')
+ old_dir = os.getcwd()
+ os.chdir(tmpdir)
+ group = grp.getgrgid(0)[0]
+ owner = pwd.getpwuid(0)[0]
+ try:
+ archive_name = make_tarball(base_name, 'dist', compress=None,
+ owner=owner, group=group)
+ finally:
+ os.chdir(old_dir)
+
+ # check if the compressed tarball was created
+ self.assertTrue(os.path.exists(archive_name))
+
+ # now checks the rights
+ archive = tarfile.open(archive_name)
+ try:
+ for member in archive.getmembers():
+ self.assertEqual(member.uid, 0)
+ self.assertEqual(member.gid, 0)
+ finally:
+ archive.close()
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_bdist.py b/setuptools/_distutils/tests/test_bdist.py
new file mode 100644
index 0000000..8b7498e
--- /dev/null
+++ b/setuptools/_distutils/tests/test_bdist.py
@@ -0,0 +1,57 @@
+"""Tests for distutils.command.bdist."""
+import os
+import unittest
+from test.support import run_unittest
+import warnings
+
+from distutils.command.bdist import bdist
+from distutils.tests import support
+
+
+class BuildTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ def test_formats(self):
+ # let's create a command and make sure
+ # we can set the format
+ dist = self.create_dist()[1]
+ cmd = bdist(dist)
+ cmd.formats = ['msi']
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.formats, ['msi'])
+
+ # what formats does bdist offer?
+ formats = ['bztar', 'gztar', 'msi', 'rpm', 'tar',
+ 'wininst', 'xztar', 'zip', 'ztar']
+ found = sorted(cmd.format_command)
+ self.assertEqual(found, formats)
+
+ def test_skip_build(self):
+ # bug #10946: bdist --skip-build should trickle down to subcommands
+ dist = self.create_dist()[1]
+ cmd = bdist(dist)
+ cmd.skip_build = 1
+ cmd.ensure_finalized()
+ dist.command_obj['bdist'] = cmd
+
+ names = ['bdist_dumb', 'bdist_wininst'] # bdist_rpm does not support --skip-build
+ if os.name == 'nt':
+ names.append('bdist_msi')
+
+ for name in names:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', 'bdist_wininst command is deprecated',
+ DeprecationWarning)
+ subcmd = cmd.get_finalized_command(name)
+ if getattr(subcmd, '_unsupported', False):
+ # command is not supported on this build
+ continue
+ self.assertTrue(subcmd.skip_build,
+ '%s should take --skip-build from bdist' % name)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_bdist_dumb.py b/setuptools/_distutils/tests/test_bdist_dumb.py
new file mode 100644
index 0000000..bb860c8
--- /dev/null
+++ b/setuptools/_distutils/tests/test_bdist_dumb.py
@@ -0,0 +1,97 @@
+"""Tests for distutils.command.bdist_dumb."""
+
+import os
+import sys
+import zipfile
+import unittest
+from test.support import run_unittest
+
+from distutils.core import Distribution
+from distutils.command.bdist_dumb import bdist_dumb
+from distutils.tests import support
+
+SETUP_PY = """\
+from distutils.core import setup
+import foo
+
+setup(name='foo', version='0.1', py_modules=['foo'],
+ url='xxx', author='xxx', author_email='xxx')
+
+"""
+
+try:
+ import zlib
+ ZLIB_SUPPORT = True
+except ImportError:
+ ZLIB_SUPPORT = False
+
+
+class BuildDumbTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(BuildDumbTestCase, self).setUp()
+ self.old_location = os.getcwd()
+ self.old_sys_argv = sys.argv, sys.argv[:]
+
+ def tearDown(self):
+ os.chdir(self.old_location)
+ sys.argv = self.old_sys_argv[0]
+ sys.argv[:] = self.old_sys_argv[1]
+ super(BuildDumbTestCase, self).tearDown()
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_simple_built(self):
+
+ # let's create a simple package
+ tmp_dir = self.mkdtemp()
+ pkg_dir = os.path.join(tmp_dir, 'foo')
+ os.mkdir(pkg_dir)
+ self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
+ self.write_file((pkg_dir, 'foo.py'), '#')
+ self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+ self.write_file((pkg_dir, 'README'), '')
+
+ dist = Distribution({'name': 'foo', 'version': '0.1',
+ 'py_modules': ['foo'],
+ 'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'})
+ dist.script_name = 'setup.py'
+ os.chdir(pkg_dir)
+
+ sys.argv = ['setup.py']
+ cmd = bdist_dumb(dist)
+
+ # so the output is the same no matter
+ # what is the platform
+ cmd.format = 'zip'
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # see what we have
+ dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+ base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
+
+ self.assertEqual(dist_created, [base])
+
+ # now let's check what we have in the zip file
+ fp = zipfile.ZipFile(os.path.join('dist', base))
+ try:
+ contents = fp.namelist()
+ finally:
+ fp.close()
+
+ contents = sorted(filter(None, map(os.path.basename, contents)))
+ wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py']
+ if not sys.dont_write_bytecode:
+ wanted.append('foo.%s.pyc' % sys.implementation.cache_tag)
+ self.assertEqual(contents, sorted(wanted))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_bdist_msi.py b/setuptools/_distutils/tests/test_bdist_msi.py
new file mode 100644
index 0000000..b1831ef
--- /dev/null
+++ b/setuptools/_distutils/tests/test_bdist_msi.py
@@ -0,0 +1,28 @@
+"""Tests for distutils.command.bdist_msi."""
+import sys
+import unittest
+from test.support import run_unittest
+from distutils.tests import support
+
+from .py38compat import check_warnings
+
+
+@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
+class BDistMSITestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_minimal(self):
+ # minimal test XXX need more tests
+ from distutils.command.bdist_msi import bdist_msi
+ project_dir, dist = self.create_dist()
+ with check_warnings(("", DeprecationWarning)):
+ cmd = bdist_msi(dist)
+ cmd.ensure_finalized()
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BDistMSITestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_bdist_rpm.py b/setuptools/_distutils/tests/test_bdist_rpm.py
new file mode 100644
index 0000000..08a7cb4
--- /dev/null
+++ b/setuptools/_distutils/tests/test_bdist_rpm.py
@@ -0,0 +1,138 @@
+"""Tests for distutils.command.bdist_rpm."""
+
+import unittest
+import sys
+import os
+from test.support import run_unittest
+
+from distutils.core import Distribution
+from distutils.command.bdist_rpm import bdist_rpm
+from distutils.tests import support
+from distutils.spawn import find_executable
+
+from .py38compat import requires_zlib
+
+
+SETUP_PY = """\
+from distutils.core import setup
+import foo
+
+setup(name='foo', version='0.1', py_modules=['foo'],
+ url='xxx', author='xxx', author_email='xxx')
+
+"""
+
+class BuildRpmTestCase(support.TempdirManager,
+ support.EnvironGuard,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def setUp(self):
+ try:
+ sys.executable.encode("UTF-8")
+ except UnicodeEncodeError:
+ raise unittest.SkipTest("sys.executable is not encodable to UTF-8")
+
+ super(BuildRpmTestCase, self).setUp()
+ self.old_location = os.getcwd()
+ self.old_sys_argv = sys.argv, sys.argv[:]
+
+ def tearDown(self):
+ os.chdir(self.old_location)
+ sys.argv = self.old_sys_argv[0]
+ sys.argv[:] = self.old_sys_argv[1]
+ super(BuildRpmTestCase, self).tearDown()
+
+ # XXX I am unable yet to make this test work without
+ # spurious sdtout/stderr output under Mac OS X
+ @unittest.skipUnless(sys.platform.startswith('linux'),
+ 'spurious sdtout/stderr output under Mac OS X')
+ @requires_zlib()
+ @unittest.skipIf(find_executable('rpm') is None,
+ 'the rpm command is not found')
+ @unittest.skipIf(find_executable('rpmbuild') is None,
+ 'the rpmbuild command is not found')
+ def test_quiet(self):
+ # let's create a package
+ tmp_dir = self.mkdtemp()
+ os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
+ pkg_dir = os.path.join(tmp_dir, 'foo')
+ os.mkdir(pkg_dir)
+ self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
+ self.write_file((pkg_dir, 'foo.py'), '#')
+ self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+ self.write_file((pkg_dir, 'README'), '')
+
+ dist = Distribution({'name': 'foo', 'version': '0.1',
+ 'py_modules': ['foo'],
+ 'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'})
+ dist.script_name = 'setup.py'
+ os.chdir(pkg_dir)
+
+ sys.argv = ['setup.py']
+ cmd = bdist_rpm(dist)
+ cmd.fix_python = True
+
+ # running in quiet mode
+ cmd.quiet = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+ self.assertIn('foo-0.1-1.noarch.rpm', dist_created)
+
+ # bug #2945: upload ignores bdist_rpm files
+ self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
+ self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
+
+ # XXX I am unable yet to make this test work without
+ # spurious sdtout/stderr output under Mac OS X
+ @unittest.skipUnless(sys.platform.startswith('linux'),
+ 'spurious sdtout/stderr output under Mac OS X')
+ @requires_zlib()
+ # http://bugs.python.org/issue1533164
+ @unittest.skipIf(find_executable('rpm') is None,
+ 'the rpm command is not found')
+ @unittest.skipIf(find_executable('rpmbuild') is None,
+ 'the rpmbuild command is not found')
+ def test_no_optimize_flag(self):
+ # let's create a package that breaks bdist_rpm
+ tmp_dir = self.mkdtemp()
+ os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
+ pkg_dir = os.path.join(tmp_dir, 'foo')
+ os.mkdir(pkg_dir)
+ self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
+ self.write_file((pkg_dir, 'foo.py'), '#')
+ self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
+ self.write_file((pkg_dir, 'README'), '')
+
+ dist = Distribution({'name': 'foo', 'version': '0.1',
+ 'py_modules': ['foo'],
+ 'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'})
+ dist.script_name = 'setup.py'
+ os.chdir(pkg_dir)
+
+ sys.argv = ['setup.py']
+ cmd = bdist_rpm(dist)
+ cmd.fix_python = True
+
+ cmd.quiet = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
+ self.assertIn('foo-0.1-1.noarch.rpm', dist_created)
+
+ # bug #2945: upload ignores bdist_rpm files
+ self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
+ self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
+
+ os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_bdist_wininst.py b/setuptools/_distutils/tests/test_bdist_wininst.py
new file mode 100644
index 0000000..59f2516
--- /dev/null
+++ b/setuptools/_distutils/tests/test_bdist_wininst.py
@@ -0,0 +1,40 @@
+"""Tests for distutils.command.bdist_wininst."""
+import sys
+import platform
+import unittest
+from test.support import run_unittest
+
+from .py38compat import check_warnings
+
+from distutils.command.bdist_wininst import bdist_wininst
+from distutils.tests import support
+
+@unittest.skipIf(sys.platform == 'win32' and platform.machine() == 'ARM64',
+ 'bdist_wininst is not supported in this install')
+@unittest.skipIf(getattr(bdist_wininst, '_unsupported', False),
+ 'bdist_wininst is not supported in this install')
+class BuildWinInstTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_get_exe_bytes(self):
+
+ # issue5731: command was broken on non-windows platforms
+ # this test makes sure it works now for every platform
+ # let's create a command
+ pkg_pth, dist = self.create_dist()
+ with check_warnings(("", DeprecationWarning)):
+ cmd = bdist_wininst(dist)
+ cmd.ensure_finalized()
+
+ # let's run the code that finds the right wininst*.exe file
+ # and make sure it finds it and returns its content
+ # no matter what platform we have
+ exe_file = cmd.get_exe_bytes()
+ self.assertGreater(len(exe_file), 10)
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildWinInstTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_build.py b/setuptools/_distutils/tests/test_build.py
new file mode 100644
index 0000000..83a9e4f
--- /dev/null
+++ b/setuptools/_distutils/tests/test_build.py
@@ -0,0 +1,56 @@
+"""Tests for distutils.command.build."""
+import unittest
+import os
+import sys
+from test.support import run_unittest
+
+from distutils.command.build import build
+from distutils.tests import support
+from sysconfig import get_platform
+
+class BuildTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_finalize_options(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build(dist)
+ cmd.finalize_options()
+
+ # if not specified, plat_name gets the current platform
+ self.assertEqual(cmd.plat_name, get_platform())
+
+ # build_purelib is build + lib
+ wanted = os.path.join(cmd.build_base, 'lib')
+ self.assertEqual(cmd.build_purelib, wanted)
+
+ # build_platlib is 'build/lib.platform-x.x[-pydebug]'
+ # examples:
+ # build/lib.macosx-10.3-i386-2.7
+ plat_spec = '.%s-%d.%d' % (cmd.plat_name, *sys.version_info[:2])
+ if hasattr(sys, 'gettotalrefcount'):
+ self.assertTrue(cmd.build_platlib.endswith('-pydebug'))
+ plat_spec += '-pydebug'
+ wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
+ self.assertEqual(cmd.build_platlib, wanted)
+
+ # by default, build_lib = build_purelib
+ self.assertEqual(cmd.build_lib, cmd.build_purelib)
+
+ # build_temp is build/temp.<plat>
+ wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
+ self.assertEqual(cmd.build_temp, wanted)
+
+ # build_scripts is build/scripts-x.x
+ wanted = os.path.join(cmd.build_base,
+ 'scripts-%d.%d' % sys.version_info[:2])
+ self.assertEqual(cmd.build_scripts, wanted)
+
+ # executable is os.path.normpath(sys.executable)
+ self.assertEqual(cmd.executable, os.path.normpath(sys.executable))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_build_clib.py b/setuptools/_distutils/tests/test_build_clib.py
new file mode 100644
index 0000000..d50ead7
--- /dev/null
+++ b/setuptools/_distutils/tests/test_build_clib.py
@@ -0,0 +1,136 @@
+"""Tests for distutils.command.build_clib."""
+import unittest
+import os
+import sys
+
+from test.support import run_unittest
+
+from .py35compat import missing_compiler_executable
+
+from distutils.command.build_clib import build_clib
+from distutils.errors import DistutilsSetupError
+from distutils.tests import support
+
+class BuildCLibTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_check_library_dist(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ # 'libraries' option must be a list
+ self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo')
+
+ # each element of 'libraries' must a 2-tuple
+ self.assertRaises(DistutilsSetupError, cmd.check_library_list,
+ ['foo1', 'foo2'])
+
+ # first element of each tuple in 'libraries'
+ # must be a string (the library name)
+ self.assertRaises(DistutilsSetupError, cmd.check_library_list,
+ [(1, 'foo1'), ('name', 'foo2')])
+
+ # library name may not contain directory separators
+ self.assertRaises(DistutilsSetupError, cmd.check_library_list,
+ [('name', 'foo1'),
+ ('another/name', 'foo2')])
+
+ # second element of each tuple must be a dictionary (build info)
+ self.assertRaises(DistutilsSetupError, cmd.check_library_list,
+ [('name', {}),
+ ('another', 'foo2')])
+
+ # those work
+ libs = [('name', {}), ('name', {'ok': 'good'})]
+ cmd.check_library_list(libs)
+
+ def test_get_source_files(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ # "in 'libraries' option 'sources' must be present and must be
+ # a list of source filenames
+ cmd.libraries = [('name', {})]
+ self.assertRaises(DistutilsSetupError, cmd.get_source_files)
+
+ cmd.libraries = [('name', {'sources': 1})]
+ self.assertRaises(DistutilsSetupError, cmd.get_source_files)
+
+ cmd.libraries = [('name', {'sources': ['a', 'b']})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+ cmd.libraries = [('name', {'sources': ('a', 'b')})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b'])
+
+ cmd.libraries = [('name', {'sources': ('a', 'b')}),
+ ('name2', {'sources': ['c', 'd']})]
+ self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd'])
+
+ def test_build_libraries(self):
+
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+ class FakeCompiler:
+ def compile(*args, **kw):
+ pass
+ create_static_lib = compile
+
+ cmd.compiler = FakeCompiler()
+
+ # build_libraries is also doing a bit of typo checking
+ lib = [('name', {'sources': 'notvalid'})]
+ self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib)
+
+ lib = [('name', {'sources': list()})]
+ cmd.build_libraries(lib)
+
+ lib = [('name', {'sources': tuple()})]
+ cmd.build_libraries(lib)
+
+ def test_finalize_options(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ cmd.include_dirs = 'one-dir'
+ cmd.finalize_options()
+ self.assertEqual(cmd.include_dirs, ['one-dir'])
+
+ cmd.include_dirs = None
+ cmd.finalize_options()
+ self.assertEqual(cmd.include_dirs, [])
+
+ cmd.distribution.libraries = 'WONTWORK'
+ self.assertRaises(DistutilsSetupError, cmd.finalize_options)
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = build_clib(dist)
+
+ foo_c = os.path.join(pkg_dir, 'foo.c')
+ self.write_file(foo_c, 'int main(void) { return 1;}\n')
+ cmd.libraries = [('foo', {'sources': [foo_c]})]
+
+ build_temp = os.path.join(pkg_dir, 'build')
+ os.mkdir(build_temp)
+ cmd.build_temp = build_temp
+ cmd.build_clib = build_temp
+
+ # Before we run the command, we want to make sure
+ # all commands are present on the system.
+ ccmd = missing_compiler_executable()
+ if ccmd is not None:
+ self.skipTest('The %r command is not found' % ccmd)
+
+ # this should work
+ cmd.run()
+
+ # let's check the result
+ self.assertIn('libfoo.a', os.listdir(build_temp))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_build_ext.py b/setuptools/_distutils/tests/test_build_ext.py
new file mode 100644
index 0000000..920e4dc
--- /dev/null
+++ b/setuptools/_distutils/tests/test_build_ext.py
@@ -0,0 +1,550 @@
+import sys
+import os
+from io import StringIO
+import textwrap
+
+from distutils.core import Distribution
+from distutils.command.build_ext import build_ext
+from distutils import sysconfig
+from distutils.tests.support import (TempdirManager, LoggingSilencer,
+ copy_xxmodule_c, fixup_build_ext)
+from distutils.extension import Extension
+from distutils.errors import (
+ CompileError, DistutilsPlatformError, DistutilsSetupError,
+ UnknownFileError)
+
+import unittest
+from test import support
+from . import py38compat as os_helper
+from test.support.script_helper import assert_python_ok
+
+# http://bugs.python.org/issue4373
+# Don't load the xx module more than once.
+ALREADY_TESTED = False
+
+
+class BuildExtTestCase(TempdirManager,
+ LoggingSilencer,
+ unittest.TestCase):
+ def setUp(self):
+ # Create a simple test environment
+ super(BuildExtTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ import site
+ self.old_user_base = site.USER_BASE
+ site.USER_BASE = self.mkdtemp()
+ from distutils.command import build_ext
+ build_ext.USER_BASE = site.USER_BASE
+
+ # bpo-30132: On Windows, a .pdb file may be created in the current
+ # working directory. Create a temporary working directory to cleanup
+ # everything at the end of the test.
+ change_cwd = os_helper.change_cwd(self.tmp_dir)
+ change_cwd.__enter__()
+ self.addCleanup(change_cwd.__exit__, None, None, None)
+
+ def tearDown(self):
+ import site
+ site.USER_BASE = self.old_user_base
+ from distutils.command import build_ext
+ build_ext.USER_BASE = self.old_user_base
+ super(BuildExtTestCase, self).tearDown()
+
+ def build_ext(self, *args, **kwargs):
+ return build_ext(*args, **kwargs)
+
+ def test_build_ext(self):
+ cmd = support.missing_compiler_executable()
+ if cmd is not None:
+ self.skipTest('The %r command is not found' % cmd)
+ global ALREADY_TESTED
+ copy_xxmodule_c(self.tmp_dir)
+ xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
+ xx_ext = Extension('xx', [xx_c])
+ dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
+ dist.package_dir = self.tmp_dir
+ cmd = self.build_ext(dist)
+ fixup_build_ext(cmd)
+ cmd.build_lib = self.tmp_dir
+ cmd.build_temp = self.tmp_dir
+
+ old_stdout = sys.stdout
+ if not support.verbose:
+ # silence compiler output
+ sys.stdout = StringIO()
+ try:
+ cmd.ensure_finalized()
+ cmd.run()
+ finally:
+ sys.stdout = old_stdout
+
+ if ALREADY_TESTED:
+ self.skipTest('Already tested in %s' % ALREADY_TESTED)
+ else:
+ ALREADY_TESTED = type(self).__name__
+
+ code = textwrap.dedent("""
+ tmp_dir = {self.tmp_dir!r}
+
+ import sys
+ import unittest
+ from test import support
+
+ sys.path.insert(0, tmp_dir)
+ import xx
+
+ class Tests(unittest.TestCase):
+ def test_xx(self):
+ for attr in ('error', 'foo', 'new', 'roj'):
+ self.assertTrue(hasattr(xx, attr))
+
+ self.assertEqual(xx.foo(2, 5), 7)
+ self.assertEqual(xx.foo(13,15), 28)
+ self.assertEqual(xx.new().demo(), None)
+ if support.HAVE_DOCSTRINGS:
+ doc = 'This is a template module just for instruction.'
+ self.assertEqual(xx.__doc__, doc)
+ self.assertIsInstance(xx.Null(), xx.Null)
+ self.assertIsInstance(xx.Str(), xx.Str)
+
+
+ unittest.main()
+ """.format(**locals()))
+ assert_python_ok('-c', code)
+
+ def test_solaris_enable_shared(self):
+ dist = Distribution({'name': 'xx'})
+ cmd = self.build_ext(dist)
+ old = sys.platform
+
+ sys.platform = 'sunos' # fooling finalize_options
+ from distutils.sysconfig import _config_vars
+ old_var = _config_vars.get('Py_ENABLE_SHARED')
+ _config_vars['Py_ENABLE_SHARED'] = 1
+ try:
+ cmd.ensure_finalized()
+ finally:
+ sys.platform = old
+ if old_var is None:
+ del _config_vars['Py_ENABLE_SHARED']
+ else:
+ _config_vars['Py_ENABLE_SHARED'] = old_var
+
+ # make sure we get some library dirs under solaris
+ self.assertGreater(len(cmd.library_dirs), 0)
+
+ def test_user_site(self):
+ import site
+ dist = Distribution({'name': 'xx'})
+ cmd = self.build_ext(dist)
+
+ # making sure the user option is there
+ options = [name for name, short, lable in
+ cmd.user_options]
+ self.assertIn('user', options)
+
+ # setting a value
+ cmd.user = 1
+
+ # setting user based lib and include
+ lib = os.path.join(site.USER_BASE, 'lib')
+ incl = os.path.join(site.USER_BASE, 'include')
+ os.mkdir(lib)
+ os.mkdir(incl)
+
+ # let's run finalize
+ cmd.ensure_finalized()
+
+ # see if include_dirs and library_dirs
+ # were set
+ self.assertIn(lib, cmd.library_dirs)
+ self.assertIn(lib, cmd.rpath)
+ self.assertIn(incl, cmd.include_dirs)
+
+ def test_optional_extension(self):
+
+ # this extension will fail, but let's ignore this failure
+ # with the optional argument.
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = self.build_ext(dist)
+ cmd.ensure_finalized()
+ self.assertRaises((UnknownFileError, CompileError),
+ cmd.run) # should raise an error
+
+ modules = [Extension('foo', ['xxx'], optional=True)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = self.build_ext(dist)
+ cmd.ensure_finalized()
+ cmd.run() # should pass
+
+ def test_finalize_options(self):
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
+ # etc.) are in the include search path.
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = self.build_ext(dist)
+ cmd.finalize_options()
+
+ py_include = sysconfig.get_python_inc()
+ for p in py_include.split(os.path.pathsep):
+ self.assertIn(p, cmd.include_dirs)
+
+ plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+ for p in plat_py_include.split(os.path.pathsep):
+ self.assertIn(p, cmd.include_dirs)
+
+ # make sure cmd.libraries is turned into a list
+ # if it's a string
+ cmd = self.build_ext(dist)
+ cmd.libraries = 'my_lib, other_lib lastlib'
+ cmd.finalize_options()
+ self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])
+
+ # make sure cmd.library_dirs is turned into a list
+ # if it's a string
+ cmd = self.build_ext(dist)
+ cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
+ cmd.finalize_options()
+ self.assertIn('my_lib_dir', cmd.library_dirs)
+ self.assertIn('other_lib_dir', cmd.library_dirs)
+
+ # make sure rpath is turned into a list
+ # if it's a string
+ cmd = self.build_ext(dist)
+ cmd.rpath = 'one%stwo' % os.pathsep
+ cmd.finalize_options()
+ self.assertEqual(cmd.rpath, ['one', 'two'])
+
+ # make sure cmd.link_objects is turned into a list
+ # if it's a string
+ cmd = build_ext(dist)
+ cmd.link_objects = 'one two,three'
+ cmd.finalize_options()
+ self.assertEqual(cmd.link_objects, ['one', 'two', 'three'])
+
+ # XXX more tests to perform for win32
+
+ # make sure define is turned into 2-tuples
+ # strings if they are ','-separated strings
+ cmd = self.build_ext(dist)
+ cmd.define = 'one,two'
+ cmd.finalize_options()
+ self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
+
+ # make sure undef is turned into a list of
+ # strings if they are ','-separated strings
+ cmd = self.build_ext(dist)
+ cmd.undef = 'one,two'
+ cmd.finalize_options()
+ self.assertEqual(cmd.undef, ['one', 'two'])
+
+ # make sure swig_opts is turned into a list
+ cmd = self.build_ext(dist)
+ cmd.swig_opts = None
+ cmd.finalize_options()
+ self.assertEqual(cmd.swig_opts, [])
+
+ cmd = self.build_ext(dist)
+ cmd.swig_opts = '1 2'
+ cmd.finalize_options()
+ self.assertEqual(cmd.swig_opts, ['1', '2'])
+
+ def test_check_extensions_list(self):
+ dist = Distribution()
+ cmd = self.build_ext(dist)
+ cmd.finalize_options()
+
+ #'extensions' option must be a list of Extension instances
+ self.assertRaises(DistutilsSetupError,
+ cmd.check_extensions_list, 'foo')
+
+ # each element of 'ext_modules' option must be an
+ # Extension instance or 2-tuple
+ exts = [('bar', 'foo', 'bar'), 'foo']
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
+
+ # first element of each tuple in 'ext_modules'
+ # must be the extension name (a string) and match
+ # a python dotted-separated name
+ exts = [('foo-bar', '')]
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
+
+ # second element of each tuple in 'ext_modules'
+ # must be a dictionary (build info)
+ exts = [('foo.bar', '')]
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
+
+ # ok this one should pass
+ exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
+ 'some': 'bar'})]
+ cmd.check_extensions_list(exts)
+ ext = exts[0]
+ self.assertIsInstance(ext, Extension)
+
+ # check_extensions_list adds in ext the values passed
+ # when they are in ('include_dirs', 'library_dirs', 'libraries'
+ # 'extra_objects', 'extra_compile_args', 'extra_link_args')
+ self.assertEqual(ext.libraries, 'foo')
+ self.assertFalse(hasattr(ext, 'some'))
+
+ # 'macros' element of build info dict must be 1- or 2-tuple
+ exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
+ 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})]
+ self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
+
+ exts[0][1]['macros'] = [('1', '2'), ('3',)]
+ cmd.check_extensions_list(exts)
+ self.assertEqual(exts[0].undef_macros, ['3'])
+ self.assertEqual(exts[0].define_macros, [('1', '2')])
+
+ def test_get_source_files(self):
+ modules = [Extension('foo', ['xxx'], optional=False)]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = self.build_ext(dist)
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.get_source_files(), ['xxx'])
+
+ def test_unicode_module_names(self):
+ modules = [
+ Extension('foo', ['aaa'], optional=False),
+ Extension('föö', ['uuu'], optional=False),
+ ]
+ dist = Distribution({'name': 'xx', 'ext_modules': modules})
+ cmd = self.build_ext(dist)
+ cmd.ensure_finalized()
+ self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*')
+ self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*')
+ self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo'])
+ self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_1gaa'])
+
+ def test_compiler_option(self):
+ # cmd.compiler is an option and
+ # should not be overridden by a compiler instance
+ # when the command is run
+ dist = Distribution()
+ cmd = self.build_ext(dist)
+ cmd.compiler = 'unix'
+ cmd.ensure_finalized()
+ cmd.run()
+ self.assertEqual(cmd.compiler, 'unix')
+
+ def test_get_outputs(self):
+ cmd = support.missing_compiler_executable()
+ if cmd is not None:
+ self.skipTest('The %r command is not found' % cmd)
+ tmp_dir = self.mkdtemp()
+ c_file = os.path.join(tmp_dir, 'foo.c')
+ self.write_file(c_file, 'void PyInit_foo(void) {}\n')
+ ext = Extension('foo', [c_file], optional=False)
+ dist = Distribution({'name': 'xx',
+ 'ext_modules': [ext]})
+ cmd = self.build_ext(dist)
+ fixup_build_ext(cmd)
+ cmd.ensure_finalized()
+ self.assertEqual(len(cmd.get_outputs()), 1)
+
+ cmd.build_lib = os.path.join(self.tmp_dir, 'build')
+ cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
+
+ # issue #5977 : distutils build_ext.get_outputs
+ # returns wrong result with --inplace
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
+ old_wd = os.getcwd()
+ os.chdir(other_tmp_dir)
+ try:
+ cmd.inplace = 1
+ cmd.run()
+ so_file = cmd.get_outputs()[0]
+ finally:
+ os.chdir(old_wd)
+ self.assertTrue(os.path.exists(so_file))
+ ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
+ self.assertTrue(so_file.endswith(ext_suffix))
+ so_dir = os.path.dirname(so_file)
+ self.assertEqual(so_dir, other_tmp_dir)
+
+ cmd.inplace = 0
+ cmd.compiler = None
+ cmd.run()
+ so_file = cmd.get_outputs()[0]
+ self.assertTrue(os.path.exists(so_file))
+ self.assertTrue(so_file.endswith(ext_suffix))
+ so_dir = os.path.dirname(so_file)
+ self.assertEqual(so_dir, cmd.build_lib)
+
+ # inplace = 0, cmd.package = 'bar'
+ build_py = cmd.get_finalized_command('build_py')
+ build_py.package_dir = {'': 'bar'}
+ path = cmd.get_ext_fullpath('foo')
+ # checking that the last directory is the build_dir
+ path = os.path.split(path)[0]
+ self.assertEqual(path, cmd.build_lib)
+
+ # inplace = 1, cmd.package = 'bar'
+ cmd.inplace = 1
+ other_tmp_dir = os.path.realpath(self.mkdtemp())
+ old_wd = os.getcwd()
+ os.chdir(other_tmp_dir)
+ try:
+ path = cmd.get_ext_fullpath('foo')
+ finally:
+ os.chdir(old_wd)
+ # checking that the last directory is bar
+ path = os.path.split(path)[0]
+ lastdir = os.path.split(path)[-1]
+ self.assertEqual(lastdir, 'bar')
+
+ def test_ext_fullpath(self):
+ ext = sysconfig.get_config_var('EXT_SUFFIX')
+ # building lxml.etree inplace
+ #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
+ #etree_ext = Extension('lxml.etree', [etree_c])
+ #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
+ dist = Distribution()
+ cmd = self.build_ext(dist)
+ cmd.inplace = 1
+ cmd.distribution.package_dir = {'': 'src'}
+ cmd.distribution.packages = ['lxml', 'lxml.html']
+ curdir = os.getcwd()
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
+ path = cmd.get_ext_fullpath('lxml.etree')
+ self.assertEqual(wanted, path)
+
+ # building lxml.etree not inplace
+ cmd.inplace = 0
+ cmd.build_lib = os.path.join(curdir, 'tmpdir')
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
+ path = cmd.get_ext_fullpath('lxml.etree')
+ self.assertEqual(wanted, path)
+
+ # building twisted.runner.portmap not inplace
+ build_py = cmd.get_finalized_command('build_py')
+ build_py.package_dir = {}
+ cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
+ wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
+ 'portmap' + ext)
+ self.assertEqual(wanted, path)
+
+ # building twisted.runner.portmap inplace
+ cmd.inplace = 1
+ path = cmd.get_ext_fullpath('twisted.runner.portmap')
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
+ self.assertEqual(wanted, path)
+
+
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
+ def test_deployment_target_default(self):
+ # Issue 9516: Test that, in the absence of the environment variable,
+ # an extension module is compiled with the same deployment target as
+ # the interpreter.
+ self._try_compile_deployment_target('==', None)
+
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
+ def test_deployment_target_too_low(self):
+ # Issue 9516: Test that an extension module is not allowed to be
+ # compiled with a deployment target less than that of the interpreter.
+ self.assertRaises(DistutilsPlatformError,
+ self._try_compile_deployment_target, '>', '10.1')
+
+ @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
+ def test_deployment_target_higher_ok(self):
+ # Issue 9516: Test that an extension module can be compiled with a
+ # deployment target higher than that of the interpreter: the ext
+ # module may depend on some newer OS feature.
+ deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ if deptarget:
+ # increment the minor version number (i.e. 10.6 -> 10.7)
+ deptarget = [int(x) for x in deptarget.split('.')]
+ deptarget[-1] += 1
+ deptarget = '.'.join(str(i) for i in deptarget)
+ self._try_compile_deployment_target('<', deptarget)
+
+ def _try_compile_deployment_target(self, operator, target):
+ orig_environ = os.environ
+ os.environ = orig_environ.copy()
+ self.addCleanup(setattr, os, 'environ', orig_environ)
+
+ if target is None:
+ if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
+ del os.environ['MACOSX_DEPLOYMENT_TARGET']
+ else:
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
+
+ deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
+
+ with open(deptarget_c, 'w') as fp:
+ fp.write(textwrap.dedent('''\
+ #include <AvailabilityMacros.h>
+
+ int dummy;
+
+ #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
+ #else
+ #error "Unexpected target"
+ #endif
+
+ ''' % operator))
+
+ # get the deployment target that the interpreter was built with
+ target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ target = tuple(map(int, target.split('.')[0:2]))
+ # format the target value as defined in the Apple
+ # Availability Macros. We can't use the macro names since
+ # at least one value we test with will not exist yet.
+ if target[:2] < (10, 10):
+ # for 10.1 through 10.9.x -> "10n0"
+ target = '%02d%01d0' % target
+ else:
+ # for 10.10 and beyond -> "10nn00"
+ if len(target) >= 2:
+ target = '%02d%02d00' % target
+ else:
+ # 11 and later can have no minor version (11 instead of 11.0)
+ target = '%02d0000' % target
+ deptarget_ext = Extension(
+ 'deptarget',
+ [deptarget_c],
+ extra_compile_args=['-DTARGET=%s'%(target,)],
+ )
+ dist = Distribution({
+ 'name': 'deptarget',
+ 'ext_modules': [deptarget_ext]
+ })
+ dist.package_dir = self.tmp_dir
+ cmd = self.build_ext(dist)
+ cmd.build_lib = self.tmp_dir
+ cmd.build_temp = self.tmp_dir
+
+ try:
+ old_stdout = sys.stdout
+ if not support.verbose:
+ # silence compiler output
+ sys.stdout = StringIO()
+ try:
+ cmd.ensure_finalized()
+ cmd.run()
+ finally:
+ sys.stdout = old_stdout
+
+ except CompileError:
+ self.fail("Wrong deployment target during compilation")
+
+
+class ParallelBuildExtTestCase(BuildExtTestCase):
+
+ def build_ext(self, *args, **kwargs):
+ build_ext = super().build_ext(*args, **kwargs)
+ build_ext.parallel = True
+ return build_ext
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(BuildExtTestCase))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase))
+ return suite
+
+if __name__ == '__main__':
+ support.run_unittest(__name__)
diff --git a/setuptools/_distutils/tests/test_build_py.py b/setuptools/_distutils/tests/test_build_py.py
new file mode 100644
index 0000000..a590a48
--- /dev/null
+++ b/setuptools/_distutils/tests/test_build_py.py
@@ -0,0 +1,179 @@
+"""Tests for distutils.command.build_py."""
+
+import os
+import sys
+import unittest
+
+from distutils.command.build_py import build_py
+from distutils.core import Distribution
+from distutils.errors import DistutilsFileError
+
+from distutils.tests import support
+from test.support import run_unittest
+
+
+class BuildPyTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_package_data(self):
+ sources = self.mkdtemp()
+ f = open(os.path.join(sources, "__init__.py"), "w")
+ try:
+ f.write("# Pretend this is a package.")
+ finally:
+ f.close()
+ f = open(os.path.join(sources, "README.txt"), "w")
+ try:
+ f.write("Info about this package")
+ finally:
+ f.close()
+
+ destination = self.mkdtemp()
+
+ dist = Distribution({"packages": ["pkg"],
+ "package_dir": {"pkg": sources}})
+ # script_name need not exist, it just need to be initialized
+ dist.script_name = os.path.join(sources, "setup.py")
+ dist.command_obj["build"] = support.DummyCommand(
+ force=0,
+ build_lib=destination)
+ dist.packages = ["pkg"]
+ dist.package_data = {"pkg": ["README.txt"]}
+ dist.package_dir = {"pkg": sources}
+
+ cmd = build_py(dist)
+ cmd.compile = 1
+ cmd.ensure_finalized()
+ self.assertEqual(cmd.package_data, dist.package_data)
+
+ cmd.run()
+
+ # This makes sure the list of outputs includes byte-compiled
+ # files for Python modules but not for package data files
+ # (there shouldn't *be* byte-code files for those!).
+ self.assertEqual(len(cmd.get_outputs()), 3)
+ pkgdest = os.path.join(destination, "pkg")
+ files = os.listdir(pkgdest)
+ pycache_dir = os.path.join(pkgdest, "__pycache__")
+ self.assertIn("__init__.py", files)
+ self.assertIn("README.txt", files)
+ if sys.dont_write_bytecode:
+ self.assertFalse(os.path.exists(pycache_dir))
+ else:
+ pyc_files = os.listdir(pycache_dir)
+ self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag,
+ pyc_files)
+
+ def test_empty_package_dir(self):
+ # See bugs #1668596/#1720897
+ sources = self.mkdtemp()
+ open(os.path.join(sources, "__init__.py"), "w").close()
+
+ testdir = os.path.join(sources, "doc")
+ os.mkdir(testdir)
+ open(os.path.join(testdir, "testfile"), "w").close()
+
+ os.chdir(sources)
+ dist = Distribution({"packages": ["pkg"],
+ "package_dir": {"pkg": ""},
+ "package_data": {"pkg": ["doc/*"]}})
+ # script_name need not exist, it just need to be initialized
+ dist.script_name = os.path.join(sources, "setup.py")
+ dist.script_args = ["build"]
+ dist.parse_command_line()
+
+ try:
+ dist.run_commands()
+ except DistutilsFileError:
+ self.fail("failed package_data test when package_dir is ''")
+
+ @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
+ def test_byte_compile(self):
+ project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
+ os.chdir(project_dir)
+ self.write_file('boiledeggs.py', 'import antigravity')
+ cmd = build_py(dist)
+ cmd.compile = 1
+ cmd.build_lib = 'here'
+ cmd.finalize_options()
+ cmd.run()
+
+ found = os.listdir(cmd.build_lib)
+ self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
+ found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
+ self.assertEqual(found,
+ ['boiledeggs.%s.pyc' % sys.implementation.cache_tag])
+
+ @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
+ def test_byte_compile_optimized(self):
+ project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
+ os.chdir(project_dir)
+ self.write_file('boiledeggs.py', 'import antigravity')
+ cmd = build_py(dist)
+ cmd.compile = 0
+ cmd.optimize = 1
+ cmd.build_lib = 'here'
+ cmd.finalize_options()
+ cmd.run()
+
+ found = os.listdir(cmd.build_lib)
+ self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
+ found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
+ expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag)
+ self.assertEqual(sorted(found), [expect])
+
+ def test_dir_in_package_data(self):
+ """
+ A directory in package_data should not be added to the filelist.
+ """
+ # See bug 19286
+ sources = self.mkdtemp()
+ pkg_dir = os.path.join(sources, "pkg")
+
+ os.mkdir(pkg_dir)
+ open(os.path.join(pkg_dir, "__init__.py"), "w").close()
+
+ docdir = os.path.join(pkg_dir, "doc")
+ os.mkdir(docdir)
+ open(os.path.join(docdir, "testfile"), "w").close()
+
+ # create the directory that could be incorrectly detected as a file
+ os.mkdir(os.path.join(docdir, 'otherdir'))
+
+ os.chdir(sources)
+ dist = Distribution({"packages": ["pkg"],
+ "package_data": {"pkg": ["doc/*"]}})
+ # script_name need not exist, it just need to be initialized
+ dist.script_name = os.path.join(sources, "setup.py")
+ dist.script_args = ["build"]
+ dist.parse_command_line()
+
+ try:
+ dist.run_commands()
+ except DistutilsFileError:
+ self.fail("failed package_data when data dir includes a dir")
+
+ def test_dont_write_bytecode(self):
+ # makes sure byte_compile is not used
+ dist = self.create_dist()[1]
+ cmd = build_py(dist)
+ cmd.compile = 1
+ cmd.optimize = 1
+
+ old_dont_write_bytecode = sys.dont_write_bytecode
+ sys.dont_write_bytecode = True
+ try:
+ cmd.byte_compile([])
+ finally:
+ sys.dont_write_bytecode = old_dont_write_bytecode
+
+ self.assertIn('byte-compiling is disabled',
+ self.logs[0][1] % self.logs[0][2])
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_build_scripts.py b/setuptools/_distutils/tests/test_build_scripts.py
new file mode 100644
index 0000000..f299e51
--- /dev/null
+++ b/setuptools/_distutils/tests/test_build_scripts.py
@@ -0,0 +1,112 @@
+"""Tests for distutils.command.build_scripts."""
+
+import os
+import unittest
+
+from distutils.command.build_scripts import build_scripts
+from distutils.core import Distribution
+from distutils import sysconfig
+
+from distutils.tests import support
+from test.support import run_unittest
+
+
+class BuildScriptsTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_default_settings(self):
+ cmd = self.get_build_scripts_cmd("/foo/bar", [])
+ self.assertFalse(cmd.force)
+ self.assertIsNone(cmd.build_dir)
+
+ cmd.finalize_options()
+
+ self.assertTrue(cmd.force)
+ self.assertEqual(cmd.build_dir, "/foo/bar")
+
+ def test_build(self):
+ source = self.mkdtemp()
+ target = self.mkdtemp()
+ expected = self.write_sample_scripts(source)
+
+ cmd = self.get_build_scripts_cmd(target,
+ [os.path.join(source, fn)
+ for fn in expected])
+ cmd.finalize_options()
+ cmd.run()
+
+ built = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, built)
+
+ def get_build_scripts_cmd(self, target, scripts):
+ import sys
+ dist = Distribution()
+ dist.scripts = scripts
+ dist.command_obj["build"] = support.DummyCommand(
+ build_scripts=target,
+ force=1,
+ executable=sys.executable
+ )
+ return build_scripts(dist)
+
+ def write_sample_scripts(self, dir):
+ expected = []
+ expected.append("script1.py")
+ self.write_script(dir, "script1.py",
+ ("#! /usr/bin/env python2.3\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ expected.append("script2.py")
+ self.write_script(dir, "script2.py",
+ ("#!/usr/bin/python\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ expected.append("shell.sh")
+ self.write_script(dir, "shell.sh",
+ ("#!/bin/sh\n"
+ "# bogus shell script w/ sh-bang\n"
+ "exit 0\n"))
+ return expected
+
+ def write_script(self, dir, name, text):
+ f = open(os.path.join(dir, name), "w")
+ try:
+ f.write(text)
+ finally:
+ f.close()
+
+ def test_version_int(self):
+ source = self.mkdtemp()
+ target = self.mkdtemp()
+ expected = self.write_sample_scripts(source)
+
+
+ cmd = self.get_build_scripts_cmd(target,
+ [os.path.join(source, fn)
+ for fn in expected])
+ cmd.finalize_options()
+
+ # http://bugs.python.org/issue4524
+ #
+ # On linux-g++-32 with command line `./configure --enable-ipv6
+ # --with-suffix=3`, python is compiled okay but the build scripts
+ # failed when writing the name of the executable
+ old = sysconfig.get_config_vars().get('VERSION')
+ sysconfig._config_vars['VERSION'] = 4
+ try:
+ cmd.run()
+ finally:
+ if old is not None:
+ sysconfig._config_vars['VERSION'] = old
+
+ built = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, built)
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py
new file mode 100644
index 0000000..91bcdce
--- /dev/null
+++ b/setuptools/_distutils/tests/test_check.py
@@ -0,0 +1,163 @@
+"""Tests for distutils.command.check."""
+import os
+import textwrap
+import unittest
+from test.support import run_unittest
+
+from distutils.command.check import check, HAS_DOCUTILS
+from distutils.tests import support
+from distutils.errors import DistutilsSetupError
+
+try:
+ import pygments
+except ImportError:
+ pygments = None
+
+
+HERE = os.path.dirname(__file__)
+
+
+class CheckTestCase(support.LoggingSilencer,
+ support.TempdirManager,
+ unittest.TestCase):
+
+ def _run(self, metadata=None, cwd=None, **options):
+ if metadata is None:
+ metadata = {}
+ if cwd is not None:
+ old_dir = os.getcwd()
+ os.chdir(cwd)
+ pkg_info, dist = self.create_dist(**metadata)
+ cmd = check(dist)
+ cmd.initialize_options()
+ for name, value in options.items():
+ setattr(cmd, name, value)
+ cmd.ensure_finalized()
+ cmd.run()
+ if cwd is not None:
+ os.chdir(old_dir)
+ return cmd
+
+ def test_check_metadata(self):
+ # let's run the command with no metadata at all
+ # by default, check is checking the metadata
+ # should have some warnings
+ cmd = self._run()
+ self.assertEqual(cmd._warnings, 2)
+
+ # now let's add the required fields
+ # and run it again, to make sure we don't get
+ # any warning anymore
+ metadata = {'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': 'xxx'}
+ cmd = self._run(metadata)
+ self.assertEqual(cmd._warnings, 0)
+
+ # now with the strict mode, we should
+ # get an error if there are missing metadata
+ self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1})
+
+ # and of course, no error when all metadata are present
+ cmd = self._run(metadata, strict=1)
+ self.assertEqual(cmd._warnings, 0)
+
+ # now a test with non-ASCII characters
+ metadata = {'url': 'xxx', 'author': '\u00c9ric',
+ 'author_email': 'xxx', 'name': 'xxx',
+ 'version': 'xxx',
+ 'description': 'Something about esszet \u00df',
+ 'long_description': 'More things about esszet \u00df'}
+ cmd = self._run(metadata)
+ self.assertEqual(cmd._warnings, 0)
+
+ @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
+ def test_check_document(self):
+ pkg_info, dist = self.create_dist()
+ cmd = check(dist)
+
+ # let's see if it detects broken rest
+ broken_rest = 'title\n===\n\ntest'
+ msgs = cmd._check_rst_data(broken_rest)
+ self.assertEqual(len(msgs), 1)
+
+ # and non-broken rest
+ rest = 'title\n=====\n\ntest'
+ msgs = cmd._check_rst_data(rest)
+ self.assertEqual(len(msgs), 0)
+
+ @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
+ def test_check_restructuredtext(self):
+ # let's see if it detects broken rest in long_description
+ broken_rest = 'title\n===\n\ntest'
+ pkg_info, dist = self.create_dist(long_description=broken_rest)
+ cmd = check(dist)
+ cmd.check_restructuredtext()
+ self.assertEqual(cmd._warnings, 1)
+
+ # let's see if we have an error with strict=1
+ metadata = {'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': 'xxx',
+ 'long_description': broken_rest}
+ self.assertRaises(DistutilsSetupError, self._run, metadata,
+ **{'strict': 1, 'restructuredtext': 1})
+
+ # and non-broken rest, including a non-ASCII character to test #12114
+ metadata['long_description'] = 'title\n=====\n\ntest \u00df'
+ cmd = self._run(metadata, strict=1, restructuredtext=1)
+ self.assertEqual(cmd._warnings, 0)
+
+ # check that includes work to test #31292
+ metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
+ cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1)
+ self.assertEqual(cmd._warnings, 0)
+
+ @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
+ def test_check_restructuredtext_with_syntax_highlight(self):
+ # Don't fail if there is a `code` or `code-block` directive
+
+ example_rst_docs = []
+ example_rst_docs.append(textwrap.dedent("""\
+ Here's some code:
+
+ .. code:: python
+
+ def foo():
+ pass
+ """))
+ example_rst_docs.append(textwrap.dedent("""\
+ Here's some code:
+
+ .. code-block:: python
+
+ def foo():
+ pass
+ """))
+
+ for rest_with_code in example_rst_docs:
+ pkg_info, dist = self.create_dist(long_description=rest_with_code)
+ cmd = check(dist)
+ cmd.check_restructuredtext()
+ msgs = cmd._check_rst_data(rest_with_code)
+ if pygments is not None:
+ self.assertEqual(len(msgs), 0)
+ else:
+ self.assertEqual(len(msgs), 1)
+ self.assertEqual(
+ str(msgs[0][1]),
+ 'Cannot analyze code. Pygments package not found.'
+ )
+
+ def test_check_all(self):
+
+ metadata = {'url': 'xxx', 'author': 'xxx'}
+ self.assertRaises(DistutilsSetupError, self._run,
+ {}, **{'strict': 1,
+ 'restructuredtext': 1})
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_clean.py b/setuptools/_distutils/tests/test_clean.py
new file mode 100644
index 0000000..9236749
--- /dev/null
+++ b/setuptools/_distutils/tests/test_clean.py
@@ -0,0 +1,49 @@
+"""Tests for distutils.command.clean."""
+import os
+import unittest
+
+from distutils.command.clean import clean
+from distutils.tests import support
+from test.support import run_unittest
+
+class cleanTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_simple_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = clean(dist)
+
+ # let's add some elements clean should remove
+ dirs = [(d, os.path.join(pkg_dir, d))
+ for d in ('build_temp', 'build_lib', 'bdist_base',
+ 'build_scripts', 'build_base')]
+
+ for name, path in dirs:
+ os.mkdir(path)
+ setattr(cmd, name, path)
+ if name == 'build_base':
+ continue
+ for f in ('one', 'two', 'three'):
+ self.write_file(os.path.join(path, f))
+
+ # let's run the command
+ cmd.all = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # make sure the files where removed
+ for name, path in dirs:
+ self.assertFalse(os.path.exists(path),
+ '%s was not removed' % path)
+
+ # let's run the command again (should spit warnings but succeed)
+ cmd.all = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_cmd.py b/setuptools/_distutils/tests/test_cmd.py
new file mode 100644
index 0000000..2319214
--- /dev/null
+++ b/setuptools/_distutils/tests/test_cmd.py
@@ -0,0 +1,126 @@
+"""Tests for distutils.cmd."""
+import unittest
+import os
+from test.support import captured_stdout, run_unittest
+
+from distutils.cmd import Command
+from distutils.dist import Distribution
+from distutils.errors import DistutilsOptionError
+from distutils import debug
+
+class MyCmd(Command):
+ def initialize_options(self):
+ pass
+
+class CommandTestCase(unittest.TestCase):
+
+ def setUp(self):
+ dist = Distribution()
+ self.cmd = MyCmd(dist)
+
+ def test_ensure_string_list(self):
+
+ cmd = self.cmd
+ cmd.not_string_list = ['one', 2, 'three']
+ cmd.yes_string_list = ['one', 'two', 'three']
+ cmd.not_string_list2 = object()
+ cmd.yes_string_list2 = 'ok'
+ cmd.ensure_string_list('yes_string_list')
+ cmd.ensure_string_list('yes_string_list2')
+
+ self.assertRaises(DistutilsOptionError,
+ cmd.ensure_string_list, 'not_string_list')
+
+ self.assertRaises(DistutilsOptionError,
+ cmd.ensure_string_list, 'not_string_list2')
+
+ cmd.option1 = 'ok,dok'
+ cmd.ensure_string_list('option1')
+ self.assertEqual(cmd.option1, ['ok', 'dok'])
+
+ cmd.option2 = ['xxx', 'www']
+ cmd.ensure_string_list('option2')
+
+ cmd.option3 = ['ok', 2]
+ self.assertRaises(DistutilsOptionError, cmd.ensure_string_list,
+ 'option3')
+
+
+ def test_make_file(self):
+
+ cmd = self.cmd
+
+ # making sure it raises when infiles is not a string or a list/tuple
+ self.assertRaises(TypeError, cmd.make_file,
+ infiles=1, outfile='', func='func', args=())
+
+ # making sure execute gets called properly
+ def _execute(func, args, exec_msg, level):
+ self.assertEqual(exec_msg, 'generating out from in')
+ cmd.force = True
+ cmd.execute = _execute
+ cmd.make_file(infiles='in', outfile='out', func='func', args=())
+
+ def test_dump_options(self):
+
+ msgs = []
+ def _announce(msg, level):
+ msgs.append(msg)
+ cmd = self.cmd
+ cmd.announce = _announce
+ cmd.option1 = 1
+ cmd.option2 = 1
+ cmd.user_options = [('option1', '', ''), ('option2', '', '')]
+ cmd.dump_options()
+
+ wanted = ["command options for 'MyCmd':", ' option1 = 1',
+ ' option2 = 1']
+ self.assertEqual(msgs, wanted)
+
+ def test_ensure_string(self):
+ cmd = self.cmd
+ cmd.option1 = 'ok'
+ cmd.ensure_string('option1')
+
+ cmd.option2 = None
+ cmd.ensure_string('option2', 'xxx')
+ self.assertTrue(hasattr(cmd, 'option2'))
+
+ cmd.option3 = 1
+ self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3')
+
+ def test_ensure_filename(self):
+ cmd = self.cmd
+ cmd.option1 = __file__
+ cmd.ensure_filename('option1')
+ cmd.option2 = 'xxx'
+ self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2')
+
+ def test_ensure_dirname(self):
+ cmd = self.cmd
+ cmd.option1 = os.path.dirname(__file__) or os.curdir
+ cmd.ensure_dirname('option1')
+ cmd.option2 = 'xxx'
+ self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2')
+
+ def test_debug_print(self):
+ cmd = self.cmd
+ with captured_stdout() as stdout:
+ cmd.debug_print('xxx')
+ stdout.seek(0)
+ self.assertEqual(stdout.read(), '')
+
+ debug.DEBUG = True
+ try:
+ with captured_stdout() as stdout:
+ cmd.debug_print('xxx')
+ stdout.seek(0)
+ self.assertEqual(stdout.read(), 'xxx\n')
+ finally:
+ debug.DEBUG = False
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_config.py b/setuptools/_distutils/tests/test_config.py
new file mode 100644
index 0000000..27bd9d4
--- /dev/null
+++ b/setuptools/_distutils/tests/test_config.py
@@ -0,0 +1,141 @@
+"""Tests for distutils.pypirc.pypirc."""
+import os
+import unittest
+
+from distutils.core import PyPIRCCommand
+from distutils.core import Distribution
+from distutils.log import set_threshold
+from distutils.log import WARN
+
+from distutils.tests import support
+from test.support import run_unittest
+
+PYPIRC = """\
+[distutils]
+
+index-servers =
+ server1
+ server2
+ server3
+
+[server1]
+username:me
+password:secret
+
+[server2]
+username:meagain
+password: secret
+realm:acme
+repository:http://another.pypi/
+
+[server3]
+username:cbiggles
+password:yh^%#rest-of-my-password
+"""
+
+PYPIRC_OLD = """\
+[server-login]
+username:tarek
+password:secret
+"""
+
+WANTED = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:tarek
+password:xxx
+"""
+
+
+class BasePyPIRCCommandTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def setUp(self):
+ """Patches the environment."""
+ super(BasePyPIRCCommandTestCase, self).setUp()
+ self.tmp_dir = self.mkdtemp()
+ os.environ['HOME'] = self.tmp_dir
+ os.environ['USERPROFILE'] = self.tmp_dir
+ self.rc = os.path.join(self.tmp_dir, '.pypirc')
+ self.dist = Distribution()
+
+ class command(PyPIRCCommand):
+ def __init__(self, dist):
+ super().__init__(dist)
+ def initialize_options(self):
+ pass
+ finalize_options = initialize_options
+
+ self._cmd = command
+ self.old_threshold = set_threshold(WARN)
+
+ def tearDown(self):
+ """Removes the patch."""
+ set_threshold(self.old_threshold)
+ super(BasePyPIRCCommandTestCase, self).tearDown()
+
+
+class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase):
+
+ def test_server_registration(self):
+ # This test makes sure PyPIRCCommand knows how to:
+ # 1. handle several sections in .pypirc
+ # 2. handle the old format
+
+ # new format
+ self.write_file(self.rc, PYPIRC)
+ cmd = self._cmd(self.dist)
+ config = cmd._read_pypirc()
+
+ config = list(sorted(config.items()))
+ waited = [('password', 'secret'), ('realm', 'pypi'),
+ ('repository', 'https://upload.pypi.org/legacy/'),
+ ('server', 'server1'), ('username', 'me')]
+ self.assertEqual(config, waited)
+
+ # old format
+ self.write_file(self.rc, PYPIRC_OLD)
+ config = cmd._read_pypirc()
+ config = list(sorted(config.items()))
+ waited = [('password', 'secret'), ('realm', 'pypi'),
+ ('repository', 'https://upload.pypi.org/legacy/'),
+ ('server', 'server-login'), ('username', 'tarek')]
+ self.assertEqual(config, waited)
+
+ def test_server_empty_registration(self):
+ cmd = self._cmd(self.dist)
+ rc = cmd._get_rc_file()
+ self.assertFalse(os.path.exists(rc))
+ cmd._store_pypirc('tarek', 'xxx')
+ self.assertTrue(os.path.exists(rc))
+ f = open(rc)
+ try:
+ content = f.read()
+ self.assertEqual(content, WANTED)
+ finally:
+ f.close()
+
+ def test_config_interpolation(self):
+ # using the % character in .pypirc should not raise an error (#20120)
+ self.write_file(self.rc, PYPIRC)
+ cmd = self._cmd(self.dist)
+ cmd.repository = 'server3'
+ config = cmd._read_pypirc()
+
+ config = list(sorted(config.items()))
+ waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'),
+ ('repository', 'https://upload.pypi.org/legacy/'),
+ ('server', 'server3'), ('username', 'cbiggles')]
+ self.assertEqual(config, waited)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_config_cmd.py b/setuptools/_distutils/tests/test_config_cmd.py
new file mode 100644
index 0000000..2c84719
--- /dev/null
+++ b/setuptools/_distutils/tests/test_config_cmd.py
@@ -0,0 +1,98 @@
+"""Tests for distutils.command.config."""
+import unittest
+import os
+import sys
+from test.support import run_unittest
+
+from .py35compat import missing_compiler_executable
+
+from distutils.command.config import dump_file, config
+from distutils.tests import support
+from distutils import log
+
+class ConfigTestCase(support.LoggingSilencer,
+ support.TempdirManager,
+ unittest.TestCase):
+
+ def _info(self, msg, *args):
+ for line in msg.splitlines():
+ self._logs.append(line)
+
+ def setUp(self):
+ super(ConfigTestCase, self).setUp()
+ self._logs = []
+ self.old_log = log.info
+ log.info = self._info
+
+ def tearDown(self):
+ log.info = self.old_log
+ super(ConfigTestCase, self).tearDown()
+
+ def test_dump_file(self):
+ this_file = os.path.splitext(__file__)[0] + '.py'
+ f = open(this_file)
+ try:
+ numlines = len(f.readlines())
+ finally:
+ f.close()
+
+ dump_file(this_file, 'I am the header')
+ self.assertEqual(len(self._logs), numlines+1)
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_search_cpp(self):
+ cmd = missing_compiler_executable(['preprocessor'])
+ if cmd is not None:
+ self.skipTest('The %r command is not found' % cmd)
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd._check_compiler()
+ compiler = cmd.compiler
+ if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower():
+ self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options')
+
+ # simple pattern searches
+ match = cmd.search_cpp(pattern='xxx', body='/* xxx */')
+ self.assertEqual(match, 0)
+
+ match = cmd.search_cpp(pattern='_configtest', body='/* xxx */')
+ self.assertEqual(match, 1)
+
+ def test_finalize_options(self):
+ # finalize_options does a bit of transformation
+ # on options
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd.include_dirs = 'one%stwo' % os.pathsep
+ cmd.libraries = 'one'
+ cmd.library_dirs = 'three%sfour' % os.pathsep
+ cmd.ensure_finalized()
+
+ self.assertEqual(cmd.include_dirs, ['one', 'two'])
+ self.assertEqual(cmd.libraries, ['one'])
+ self.assertEqual(cmd.library_dirs, ['three', 'four'])
+
+ def test_clean(self):
+ # _clean removes files
+ tmp_dir = self.mkdtemp()
+ f1 = os.path.join(tmp_dir, 'one')
+ f2 = os.path.join(tmp_dir, 'two')
+
+ self.write_file(f1, 'xxx')
+ self.write_file(f2, 'xxx')
+
+ for f in (f1, f2):
+ self.assertTrue(os.path.exists(f))
+
+ pkg_dir, dist = self.create_dist()
+ cmd = config(dist)
+ cmd._clean(f1, f2)
+
+ for f in (f1, f2):
+ self.assertFalse(os.path.exists(f))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_core.py b/setuptools/_distutils/tests/test_core.py
new file mode 100644
index 0000000..7270d69
--- /dev/null
+++ b/setuptools/_distutils/tests/test_core.py
@@ -0,0 +1,165 @@
+"""Tests for distutils.core."""
+
+import io
+import distutils.core
+import os
+import shutil
+import sys
+from test.support import captured_stdout, run_unittest
+from . import py38compat as os_helper
+import unittest
+from distutils.tests import support
+from distutils import log
+from distutils.dist import Distribution
+
+# setup script that uses __file__
+setup_using___file__ = """\
+
+__file__
+
+from distutils.core import setup
+setup()
+"""
+
+setup_prints_cwd = """\
+
+import os
+print(os.getcwd())
+
+from distutils.core import setup
+setup()
+"""
+
+setup_does_nothing = """\
+from distutils.core import setup
+setup()
+"""
+
+
+setup_defines_subclass = """\
+from distutils.core import setup
+from distutils.command.install import install as _install
+
+class install(_install):
+ sub_commands = _install.sub_commands + ['cmd']
+
+setup(cmdclass={'install': install})
+"""
+
+setup_within_if_main = """\
+from distutils.core import setup
+
+def main():
+ return setup(name="setup_within_if_main")
+
+if __name__ == "__main__":
+ main()
+"""
+
+class CoreTestCase(support.EnvironGuard, unittest.TestCase):
+
+ def setUp(self):
+ super(CoreTestCase, self).setUp()
+ self.old_stdout = sys.stdout
+ self.cleanup_testfn()
+ self.old_argv = sys.argv, sys.argv[:]
+ self.addCleanup(log.set_threshold, log._global_log.threshold)
+
+ def tearDown(self):
+ sys.stdout = self.old_stdout
+ self.cleanup_testfn()
+ sys.argv = self.old_argv[0]
+ sys.argv[:] = self.old_argv[1]
+ super(CoreTestCase, self).tearDown()
+
+ def cleanup_testfn(self):
+ path = os_helper.TESTFN
+ if os.path.isfile(path):
+ os.remove(path)
+ elif os.path.isdir(path):
+ shutil.rmtree(path)
+
+ def write_setup(self, text, path=os_helper.TESTFN):
+ f = open(path, "w")
+ try:
+ f.write(text)
+ finally:
+ f.close()
+ return path
+
+ def test_run_setup_provides_file(self):
+ # Make sure the script can use __file__; if that's missing, the test
+ # setup.py script will raise NameError.
+ distutils.core.run_setup(
+ self.write_setup(setup_using___file__))
+
+ def test_run_setup_preserves_sys_argv(self):
+ # Make sure run_setup does not clobber sys.argv
+ argv_copy = sys.argv.copy()
+ distutils.core.run_setup(
+ self.write_setup(setup_does_nothing))
+ self.assertEqual(sys.argv, argv_copy)
+
+ def test_run_setup_defines_subclass(self):
+ # Make sure the script can use __file__; if that's missing, the test
+ # setup.py script will raise NameError.
+ dist = distutils.core.run_setup(
+ self.write_setup(setup_defines_subclass))
+ install = dist.get_command_obj('install')
+ self.assertIn('cmd', install.sub_commands)
+
+ def test_run_setup_uses_current_dir(self):
+ # This tests that the setup script is run with the current directory
+ # as its own current directory; this was temporarily broken by a
+ # previous patch when TESTFN did not use the current directory.
+ sys.stdout = io.StringIO()
+ cwd = os.getcwd()
+
+ # Create a directory and write the setup.py file there:
+ os.mkdir(os_helper.TESTFN)
+ setup_py = os.path.join(os_helper.TESTFN, "setup.py")
+ distutils.core.run_setup(
+ self.write_setup(setup_prints_cwd, path=setup_py))
+
+ output = sys.stdout.getvalue()
+ if output.endswith("\n"):
+ output = output[:-1]
+ self.assertEqual(cwd, output)
+
+ def test_run_setup_within_if_main(self):
+ dist = distutils.core.run_setup(
+ self.write_setup(setup_within_if_main), stop_after="config")
+ self.assertIsInstance(dist, Distribution)
+ self.assertEqual(dist.get_name(), "setup_within_if_main")
+
+ def test_run_commands(self):
+ sys.argv = ['setup.py', 'build']
+ dist = distutils.core.run_setup(
+ self.write_setup(setup_within_if_main), stop_after="commandline")
+ self.assertNotIn('build', dist.have_run)
+ distutils.core.run_commands(dist)
+ self.assertIn('build', dist.have_run)
+
+ def test_debug_mode(self):
+ # this covers the code called when DEBUG is set
+ sys.argv = ['setup.py', '--name']
+ with captured_stdout() as stdout:
+ distutils.core.setup(name='bar')
+ stdout.seek(0)
+ self.assertEqual(stdout.read(), 'bar\n')
+
+ distutils.core.DEBUG = True
+ try:
+ with captured_stdout() as stdout:
+ distutils.core.setup(name='bar')
+ finally:
+ distutils.core.DEBUG = False
+ stdout.seek(0)
+ wanted = "options (after parsing config files):\n"
+ self.assertEqual(stdout.readlines()[0], wanted)
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_cygwinccompiler.py b/setuptools/_distutils/tests/test_cygwinccompiler.py
new file mode 100644
index 0000000..8715a53
--- /dev/null
+++ b/setuptools/_distutils/tests/test_cygwinccompiler.py
@@ -0,0 +1,96 @@
+"""Tests for distutils.cygwinccompiler."""
+import unittest
+import sys
+import os
+from test.support import run_unittest
+
+from distutils.cygwinccompiler import (check_config_h,
+ CONFIG_H_OK, CONFIG_H_NOTOK,
+ CONFIG_H_UNCERTAIN,
+ get_msvcr)
+from distutils.tests import support
+
+
+class CygwinCCompilerTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(CygwinCCompilerTestCase, self).setUp()
+ self.version = sys.version
+ self.python_h = os.path.join(self.mkdtemp(), 'python.h')
+ from distutils import sysconfig
+ self.old_get_config_h_filename = sysconfig.get_config_h_filename
+ sysconfig.get_config_h_filename = self._get_config_h_filename
+
+ def tearDown(self):
+ sys.version = self.version
+ from distutils import sysconfig
+ sysconfig.get_config_h_filename = self.old_get_config_h_filename
+ super(CygwinCCompilerTestCase, self).tearDown()
+
+ def _get_config_h_filename(self):
+ return self.python_h
+
+ def test_check_config_h(self):
+
+ # check_config_h looks for "GCC" in sys.version first
+ # returns CONFIG_H_OK if found
+ sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
+ '4.0.1 (Apple Computer, Inc. build 5370)]')
+
+ self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+ # then it tries to see if it can find "__GNUC__" in pyconfig.h
+ sys.version = 'something without the *CC word'
+
+ # if the file doesn't exist it returns CONFIG_H_UNCERTAIN
+ self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
+
+ # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
+ self.write_file(self.python_h, 'xxx')
+ self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
+
+ # and CONFIG_H_OK if __GNUC__ is found
+ self.write_file(self.python_h, 'xxx __GNUC__ xxx')
+ self.assertEqual(check_config_h()[0], CONFIG_H_OK)
+
+ def test_get_msvcr(self):
+
+ # none
+ sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
+ '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
+ self.assertEqual(get_msvcr(), None)
+
+ # MSVC 7.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1300 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr70'])
+
+ # MSVC 7.1
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1310 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr71'])
+
+ # VS2005 / MSVC 8.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1400 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr80'])
+
+ # VS2008 / MSVC 9.0
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.1500 32 bits (Intel)]')
+ self.assertEqual(get_msvcr(), ['msvcr90'])
+
+ sys.version = '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) [MSC v.1929 32 bit (Intel)]'
+ self.assertEqual(get_msvcr(), ['ucrt', 'vcruntime140'])
+
+ # unknown
+ sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
+ '[MSC v.2000 32 bits (Intel)]')
+ self.assertRaises(ValueError, get_msvcr)
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_dep_util.py b/setuptools/_distutils/tests/test_dep_util.py
new file mode 100644
index 0000000..0d52740
--- /dev/null
+++ b/setuptools/_distutils/tests/test_dep_util.py
@@ -0,0 +1,80 @@
+"""Tests for distutils.dep_util."""
+import unittest
+import os
+
+from distutils.dep_util import newer, newer_pairwise, newer_group
+from distutils.errors import DistutilsFileError
+from distutils.tests import support
+from test.support import run_unittest
+
+class DepUtilTestCase(support.TempdirManager, unittest.TestCase):
+
+ def test_newer(self):
+
+ tmpdir = self.mkdtemp()
+ new_file = os.path.join(tmpdir, 'new')
+ old_file = os.path.abspath(__file__)
+
+ # Raise DistutilsFileError if 'new_file' does not exist.
+ self.assertRaises(DistutilsFileError, newer, new_file, old_file)
+
+ # Return true if 'new_file' exists and is more recently modified than
+ # 'old_file', or if 'new_file' exists and 'old_file' doesn't.
+ self.write_file(new_file)
+ self.assertTrue(newer(new_file, 'I_dont_exist'))
+ self.assertTrue(newer(new_file, old_file))
+
+ # Return false if both exist and 'old_file' is the same age or younger
+ # than 'new_file'.
+ self.assertFalse(newer(old_file, new_file))
+
+ def test_newer_pairwise(self):
+ tmpdir = self.mkdtemp()
+ sources = os.path.join(tmpdir, 'sources')
+ targets = os.path.join(tmpdir, 'targets')
+ os.mkdir(sources)
+ os.mkdir(targets)
+ one = os.path.join(sources, 'one')
+ two = os.path.join(sources, 'two')
+ three = os.path.abspath(__file__) # I am the old file
+ four = os.path.join(targets, 'four')
+ self.write_file(one)
+ self.write_file(two)
+ self.write_file(four)
+
+ self.assertEqual(newer_pairwise([one, two], [three, four]),
+ ([one],[three]))
+
+ def test_newer_group(self):
+ tmpdir = self.mkdtemp()
+ sources = os.path.join(tmpdir, 'sources')
+ os.mkdir(sources)
+ one = os.path.join(sources, 'one')
+ two = os.path.join(sources, 'two')
+ three = os.path.join(sources, 'three')
+ old_file = os.path.abspath(__file__)
+
+ # return true if 'old_file' is out-of-date with respect to any file
+ # listed in 'sources'.
+ self.write_file(one)
+ self.write_file(two)
+ self.write_file(three)
+ self.assertTrue(newer_group([one, two, three], old_file))
+ self.assertFalse(newer_group([one, two, old_file], three))
+
+ # missing handling
+ os.remove(one)
+ self.assertRaises(OSError, newer_group, [one, two, old_file], three)
+
+ self.assertFalse(newer_group([one, two, old_file], three,
+ missing='ignore'))
+
+ self.assertTrue(newer_group([one, two, old_file], three,
+ missing='newer'))
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_dir_util.py b/setuptools/_distutils/tests/test_dir_util.py
new file mode 100644
index 0000000..1b1f3bb
--- /dev/null
+++ b/setuptools/_distutils/tests/test_dir_util.py
@@ -0,0 +1,139 @@
+"""Tests for distutils.dir_util."""
+import unittest
+import os
+import stat
+import sys
+from unittest.mock import patch
+
+from distutils import dir_util, errors
+from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree,
+ ensure_relative)
+
+from distutils import log
+from distutils.tests import support
+from test.support import run_unittest
+
+
+class DirUtilTestCase(support.TempdirManager, unittest.TestCase):
+
+ def _log(self, msg, *args):
+ if len(args) > 0:
+ self._logs.append(msg % args)
+ else:
+ self._logs.append(msg)
+
+ def setUp(self):
+ super(DirUtilTestCase, self).setUp()
+ self._logs = []
+ tmp_dir = self.mkdtemp()
+ self.root_target = os.path.join(tmp_dir, 'deep')
+ self.target = os.path.join(self.root_target, 'here')
+ self.target2 = os.path.join(tmp_dir, 'deep2')
+ self.old_log = log.info
+ log.info = self._log
+
+ def tearDown(self):
+ log.info = self.old_log
+ super(DirUtilTestCase, self).tearDown()
+
+ def test_mkpath_remove_tree_verbosity(self):
+
+ mkpath(self.target, verbose=0)
+ wanted = []
+ self.assertEqual(self._logs, wanted)
+ remove_tree(self.root_target, verbose=0)
+
+ mkpath(self.target, verbose=1)
+ wanted = ['creating %s' % self.root_target,
+ 'creating %s' % self.target]
+ self.assertEqual(self._logs, wanted)
+ self._logs = []
+
+ remove_tree(self.root_target, verbose=1)
+ wanted = ["removing '%s' (and everything under it)" % self.root_target]
+ self.assertEqual(self._logs, wanted)
+
+ @unittest.skipIf(sys.platform.startswith('win'),
+ "This test is only appropriate for POSIX-like systems.")
+ def test_mkpath_with_custom_mode(self):
+ # Get and set the current umask value for testing mode bits.
+ umask = os.umask(0o002)
+ os.umask(umask)
+ mkpath(self.target, 0o700)
+ self.assertEqual(
+ stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask)
+ mkpath(self.target2, 0o555)
+ self.assertEqual(
+ stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask)
+
+ def test_create_tree_verbosity(self):
+
+ create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
+ self.assertEqual(self._logs, [])
+ remove_tree(self.root_target, verbose=0)
+
+ wanted = ['creating %s' % self.root_target]
+ create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
+ self.assertEqual(self._logs, wanted)
+
+ remove_tree(self.root_target, verbose=0)
+
+ def test_copy_tree_verbosity(self):
+
+ mkpath(self.target, verbose=0)
+
+ copy_tree(self.target, self.target2, verbose=0)
+ self.assertEqual(self._logs, [])
+
+ remove_tree(self.root_target, verbose=0)
+
+ mkpath(self.target, verbose=0)
+ a_file = os.path.join(self.target, 'ok.txt')
+ with open(a_file, 'w') as f:
+ f.write('some content')
+
+ wanted = ['copying %s -> %s' % (a_file, self.target2)]
+ copy_tree(self.target, self.target2, verbose=1)
+ self.assertEqual(self._logs, wanted)
+
+ remove_tree(self.root_target, verbose=0)
+ remove_tree(self.target2, verbose=0)
+
+ def test_copy_tree_skips_nfs_temp_files(self):
+ mkpath(self.target, verbose=0)
+
+ a_file = os.path.join(self.target, 'ok.txt')
+ nfs_file = os.path.join(self.target, '.nfs123abc')
+ for f in a_file, nfs_file:
+ with open(f, 'w') as fh:
+ fh.write('some content')
+
+ copy_tree(self.target, self.target2)
+ self.assertEqual(os.listdir(self.target2), ['ok.txt'])
+
+ remove_tree(self.root_target, verbose=0)
+ remove_tree(self.target2, verbose=0)
+
+ def test_ensure_relative(self):
+ if os.sep == '/':
+ self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
+ self.assertEqual(ensure_relative('some/path'), 'some/path')
+ else: # \\
+ self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo')
+ self.assertEqual(ensure_relative('home\\foo'), 'home\\foo')
+
+ def test_copy_tree_exception_in_listdir(self):
+ """
+ An exception in listdir should raise a DistutilsFileError
+ """
+ with patch("os.listdir", side_effect=OSError()), \
+ self.assertRaises(errors.DistutilsFileError):
+ src = self.tempdirs[-1]
+ dir_util.copy_tree(src, None)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_dist.py b/setuptools/_distutils/tests/test_dist.py
new file mode 100644
index 0000000..36155be
--- /dev/null
+++ b/setuptools/_distutils/tests/test_dist.py
@@ -0,0 +1,533 @@
+"""Tests for distutils.dist."""
+import os
+import io
+import sys
+import unittest
+import warnings
+import textwrap
+
+from unittest import mock
+
+from distutils.dist import Distribution, fix_help_options
+from distutils.cmd import Command
+
+from test.support import (
+ captured_stdout, captured_stderr, run_unittest
+)
+from .py38compat import TESTFN
+from distutils.tests import support
+from distutils import log
+
+
+class test_dist(Command):
+ """Sample distutils extension command."""
+
+ user_options = [
+ ("sample-option=", "S", "help text"),
+ ]
+
+ def initialize_options(self):
+ self.sample_option = None
+
+
+class TestDistribution(Distribution):
+ """Distribution subclasses that avoids the default search for
+ configuration files.
+
+ The ._config_files attribute must be set before
+ .parse_config_files() is called.
+ """
+
+ def find_config_files(self):
+ return self._config_files
+
+
+class DistributionTestCase(support.LoggingSilencer,
+ support.TempdirManager,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(DistributionTestCase, self).setUp()
+ self.argv = sys.argv, sys.argv[:]
+ del sys.argv[1:]
+
+ def tearDown(self):
+ sys.argv = self.argv[0]
+ sys.argv[:] = self.argv[1]
+ super(DistributionTestCase, self).tearDown()
+
+ def create_distribution(self, configfiles=()):
+ d = TestDistribution()
+ d._config_files = configfiles
+ d.parse_config_files()
+ d.parse_command_line()
+ return d
+
+ def test_command_packages_unspecified(self):
+ sys.argv.append("build")
+ d = self.create_distribution()
+ self.assertEqual(d.get_command_packages(), ["distutils.command"])
+
+ def test_command_packages_cmdline(self):
+ from distutils.tests.test_dist import test_dist
+ sys.argv.extend(["--command-packages",
+ "foo.bar,distutils.tests",
+ "test_dist",
+ "-Ssometext",
+ ])
+ d = self.create_distribution()
+ # let's actually try to load our test command:
+ self.assertEqual(d.get_command_packages(),
+ ["distutils.command", "foo.bar", "distutils.tests"])
+ cmd = d.get_command_obj("test_dist")
+ self.assertIsInstance(cmd, test_dist)
+ self.assertEqual(cmd.sample_option, "sometext")
+
+ @unittest.skipIf(
+ 'distutils' not in Distribution.parse_config_files.__module__,
+ 'Cannot test when virtualenv has monkey-patched Distribution.',
+ )
+ def test_venv_install_options(self):
+ sys.argv.append("install")
+ self.addCleanup(os.unlink, TESTFN)
+
+ fakepath = '/somedir'
+
+ with open(TESTFN, "w") as f:
+ print(("[install]\n"
+ "install-base = {0}\n"
+ "install-platbase = {0}\n"
+ "install-lib = {0}\n"
+ "install-platlib = {0}\n"
+ "install-purelib = {0}\n"
+ "install-headers = {0}\n"
+ "install-scripts = {0}\n"
+ "install-data = {0}\n"
+ "prefix = {0}\n"
+ "exec-prefix = {0}\n"
+ "home = {0}\n"
+ "user = {0}\n"
+ "root = {0}").format(fakepath), file=f)
+
+ # Base case: Not in a Virtual Environment
+ with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values:
+ d = self.create_distribution([TESTFN])
+
+ option_tuple = (TESTFN, fakepath)
+
+ result_dict = {
+ 'install_base': option_tuple,
+ 'install_platbase': option_tuple,
+ 'install_lib': option_tuple,
+ 'install_platlib': option_tuple,
+ 'install_purelib': option_tuple,
+ 'install_headers': option_tuple,
+ 'install_scripts': option_tuple,
+ 'install_data': option_tuple,
+ 'prefix': option_tuple,
+ 'exec_prefix': option_tuple,
+ 'home': option_tuple,
+ 'user': option_tuple,
+ 'root': option_tuple,
+ }
+
+ self.assertEqual(
+ sorted(d.command_options.get('install').keys()),
+ sorted(result_dict.keys()))
+
+ for (key, value) in d.command_options.get('install').items():
+ self.assertEqual(value, result_dict[key])
+
+ # Test case: In a Virtual Environment
+ with mock.patch.multiple(sys, prefix='/a', base_prefix='/b') as values:
+ d = self.create_distribution([TESTFN])
+
+ for key in result_dict.keys():
+ self.assertNotIn(key, d.command_options.get('install', {}))
+
+ def test_command_packages_configfile(self):
+ sys.argv.append("build")
+ self.addCleanup(os.unlink, TESTFN)
+ f = open(TESTFN, "w")
+ try:
+ print("[global]", file=f)
+ print("command_packages = foo.bar, splat", file=f)
+ finally:
+ f.close()
+
+ d = self.create_distribution([TESTFN])
+ self.assertEqual(d.get_command_packages(),
+ ["distutils.command", "foo.bar", "splat"])
+
+ # ensure command line overrides config:
+ sys.argv[1:] = ["--command-packages", "spork", "build"]
+ d = self.create_distribution([TESTFN])
+ self.assertEqual(d.get_command_packages(),
+ ["distutils.command", "spork"])
+
+ # Setting --command-packages to '' should cause the default to
+ # be used even if a config file specified something else:
+ sys.argv[1:] = ["--command-packages", "", "build"]
+ d = self.create_distribution([TESTFN])
+ self.assertEqual(d.get_command_packages(), ["distutils.command"])
+
+ def test_empty_options(self):
+ # an empty options dictionary should not stay in the
+ # list of attributes
+
+ # catching warnings
+ warns = []
+
+ def _warn(msg):
+ warns.append(msg)
+
+ self.addCleanup(setattr, warnings, 'warn', warnings.warn)
+ warnings.warn = _warn
+ dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx',
+ 'version': 'xxx', 'url': 'xxxx',
+ 'options': {}})
+
+ self.assertEqual(len(warns), 0)
+ self.assertNotIn('options', dir(dist))
+
+ def test_finalize_options(self):
+ attrs = {'keywords': 'one,two',
+ 'platforms': 'one,two'}
+
+ dist = Distribution(attrs=attrs)
+ dist.finalize_options()
+
+ # finalize_option splits platforms and keywords
+ self.assertEqual(dist.metadata.platforms, ['one', 'two'])
+ self.assertEqual(dist.metadata.keywords, ['one', 'two'])
+
+ attrs = {'keywords': 'foo bar',
+ 'platforms': 'foo bar'}
+ dist = Distribution(attrs=attrs)
+ dist.finalize_options()
+ self.assertEqual(dist.metadata.platforms, ['foo bar'])
+ self.assertEqual(dist.metadata.keywords, ['foo bar'])
+
+ def test_get_command_packages(self):
+ dist = Distribution()
+ self.assertEqual(dist.command_packages, None)
+ cmds = dist.get_command_packages()
+ self.assertEqual(cmds, ['distutils.command'])
+ self.assertEqual(dist.command_packages,
+ ['distutils.command'])
+
+ dist.command_packages = 'one,two'
+ cmds = dist.get_command_packages()
+ self.assertEqual(cmds, ['distutils.command', 'one', 'two'])
+
+ def test_announce(self):
+ # make sure the level is known
+ dist = Distribution()
+ args = ('ok',)
+ kwargs = {'level': 'ok2'}
+ self.assertRaises(ValueError, dist.announce, args, kwargs)
+
+
+ def test_find_config_files_disable(self):
+ # Ticket #1180: Allow user to disable their home config file.
+ temp_home = self.mkdtemp()
+ if os.name == 'posix':
+ user_filename = os.path.join(temp_home, ".pydistutils.cfg")
+ else:
+ user_filename = os.path.join(temp_home, "pydistutils.cfg")
+
+ with open(user_filename, 'w') as f:
+ f.write('[distutils]\n')
+
+ def _expander(path):
+ return temp_home
+
+ old_expander = os.path.expanduser
+ os.path.expanduser = _expander
+ try:
+ d = Distribution()
+ all_files = d.find_config_files()
+
+ d = Distribution(attrs={'script_args': ['--no-user-cfg']})
+ files = d.find_config_files()
+ finally:
+ os.path.expanduser = old_expander
+
+ # make sure --no-user-cfg disables the user cfg file
+ self.assertEqual(len(all_files)-1, len(files))
+
+class MetadataTestCase(support.TempdirManager, support.EnvironGuard,
+ unittest.TestCase):
+
+ def setUp(self):
+ super(MetadataTestCase, self).setUp()
+ self.argv = sys.argv, sys.argv[:]
+
+ def tearDown(self):
+ sys.argv = self.argv[0]
+ sys.argv[:] = self.argv[1]
+ super(MetadataTestCase, self).tearDown()
+
+ def format_metadata(self, dist):
+ sio = io.StringIO()
+ dist.metadata.write_pkg_file(sio)
+ return sio.getvalue()
+
+ def test_simple_metadata(self):
+ attrs = {"name": "package",
+ "version": "1.0"}
+ dist = Distribution(attrs)
+ meta = self.format_metadata(dist)
+ self.assertIn("Metadata-Version: 1.0", meta)
+ self.assertNotIn("provides:", meta.lower())
+ self.assertNotIn("requires:", meta.lower())
+ self.assertNotIn("obsoletes:", meta.lower())
+
+ def test_provides(self):
+ attrs = {"name": "package",
+ "version": "1.0",
+ "provides": ["package", "package.sub"]}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.metadata.get_provides(),
+ ["package", "package.sub"])
+ self.assertEqual(dist.get_provides(),
+ ["package", "package.sub"])
+ meta = self.format_metadata(dist)
+ self.assertIn("Metadata-Version: 1.1", meta)
+ self.assertNotIn("requires:", meta.lower())
+ self.assertNotIn("obsoletes:", meta.lower())
+
+ def test_provides_illegal(self):
+ self.assertRaises(ValueError, Distribution,
+ {"name": "package",
+ "version": "1.0",
+ "provides": ["my.pkg (splat)"]})
+
+ def test_requires(self):
+ attrs = {"name": "package",
+ "version": "1.0",
+ "requires": ["other", "another (==1.0)"]}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.metadata.get_requires(),
+ ["other", "another (==1.0)"])
+ self.assertEqual(dist.get_requires(),
+ ["other", "another (==1.0)"])
+ meta = self.format_metadata(dist)
+ self.assertIn("Metadata-Version: 1.1", meta)
+ self.assertNotIn("provides:", meta.lower())
+ self.assertIn("Requires: other", meta)
+ self.assertIn("Requires: another (==1.0)", meta)
+ self.assertNotIn("obsoletes:", meta.lower())
+
+ def test_requires_illegal(self):
+ self.assertRaises(ValueError, Distribution,
+ {"name": "package",
+ "version": "1.0",
+ "requires": ["my.pkg (splat)"]})
+
+ def test_requires_to_list(self):
+ attrs = {"name": "package",
+ "requires": iter(["other"])}
+ dist = Distribution(attrs)
+ self.assertIsInstance(dist.metadata.requires, list)
+
+
+ def test_obsoletes(self):
+ attrs = {"name": "package",
+ "version": "1.0",
+ "obsoletes": ["other", "another (<1.0)"]}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.metadata.get_obsoletes(),
+ ["other", "another (<1.0)"])
+ self.assertEqual(dist.get_obsoletes(),
+ ["other", "another (<1.0)"])
+ meta = self.format_metadata(dist)
+ self.assertIn("Metadata-Version: 1.1", meta)
+ self.assertNotIn("provides:", meta.lower())
+ self.assertNotIn("requires:", meta.lower())
+ self.assertIn("Obsoletes: other", meta)
+ self.assertIn("Obsoletes: another (<1.0)", meta)
+
+ def test_obsoletes_illegal(self):
+ self.assertRaises(ValueError, Distribution,
+ {"name": "package",
+ "version": "1.0",
+ "obsoletes": ["my.pkg (splat)"]})
+
+ def test_obsoletes_to_list(self):
+ attrs = {"name": "package",
+ "obsoletes": iter(["other"])}
+ dist = Distribution(attrs)
+ self.assertIsInstance(dist.metadata.obsoletes, list)
+
+ def test_classifier(self):
+ attrs = {'name': 'Boa', 'version': '3.0',
+ 'classifiers': ['Programming Language :: Python :: 3']}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.get_classifiers(),
+ ['Programming Language :: Python :: 3'])
+ meta = self.format_metadata(dist)
+ self.assertIn('Metadata-Version: 1.1', meta)
+
+ def test_classifier_invalid_type(self):
+ attrs = {'name': 'Boa', 'version': '3.0',
+ 'classifiers': ('Programming Language :: Python :: 3',)}
+ with captured_stderr() as error:
+ d = Distribution(attrs)
+ # should have warning about passing a non-list
+ self.assertIn('should be a list', error.getvalue())
+ # should be converted to a list
+ self.assertIsInstance(d.metadata.classifiers, list)
+ self.assertEqual(d.metadata.classifiers,
+ list(attrs['classifiers']))
+
+ def test_keywords(self):
+ attrs = {'name': 'Monty', 'version': '1.0',
+ 'keywords': ['spam', 'eggs', 'life of brian']}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.get_keywords(),
+ ['spam', 'eggs', 'life of brian'])
+
+ def test_keywords_invalid_type(self):
+ attrs = {'name': 'Monty', 'version': '1.0',
+ 'keywords': ('spam', 'eggs', 'life of brian')}
+ with captured_stderr() as error:
+ d = Distribution(attrs)
+ # should have warning about passing a non-list
+ self.assertIn('should be a list', error.getvalue())
+ # should be converted to a list
+ self.assertIsInstance(d.metadata.keywords, list)
+ self.assertEqual(d.metadata.keywords, list(attrs['keywords']))
+
+ def test_platforms(self):
+ attrs = {'name': 'Monty', 'version': '1.0',
+ 'platforms': ['GNU/Linux', 'Some Evil Platform']}
+ dist = Distribution(attrs)
+ self.assertEqual(dist.get_platforms(),
+ ['GNU/Linux', 'Some Evil Platform'])
+
+ def test_platforms_invalid_types(self):
+ attrs = {'name': 'Monty', 'version': '1.0',
+ 'platforms': ('GNU/Linux', 'Some Evil Platform')}
+ with captured_stderr() as error:
+ d = Distribution(attrs)
+ # should have warning about passing a non-list
+ self.assertIn('should be a list', error.getvalue())
+ # should be converted to a list
+ self.assertIsInstance(d.metadata.platforms, list)
+ self.assertEqual(d.metadata.platforms, list(attrs['platforms']))
+
+ def test_download_url(self):
+ attrs = {'name': 'Boa', 'version': '3.0',
+ 'download_url': 'http://example.org/boa'}
+ dist = Distribution(attrs)
+ meta = self.format_metadata(dist)
+ self.assertIn('Metadata-Version: 1.1', meta)
+
+ def test_long_description(self):
+ long_desc = textwrap.dedent("""\
+ example::
+ We start here
+ and continue here
+ and end here.""")
+ attrs = {"name": "package",
+ "version": "1.0",
+ "long_description": long_desc}
+
+ dist = Distribution(attrs)
+ meta = self.format_metadata(dist)
+ meta = meta.replace('\n' + 8 * ' ', '\n')
+ self.assertIn(long_desc, meta)
+
+ def test_custom_pydistutils(self):
+ # fixes #2166
+ # make sure pydistutils.cfg is found
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+
+ temp_dir = self.mkdtemp()
+ user_filename = os.path.join(temp_dir, user_filename)
+ f = open(user_filename, 'w')
+ try:
+ f.write('.')
+ finally:
+ f.close()
+
+ try:
+ dist = Distribution()
+
+ # linux-style
+ if sys.platform in ('linux', 'darwin'):
+ os.environ['HOME'] = temp_dir
+ files = dist.find_config_files()
+ self.assertIn(user_filename, files)
+
+ # win32-style
+ if sys.platform == 'win32':
+ # home drive should be found
+ os.environ['USERPROFILE'] = temp_dir
+ files = dist.find_config_files()
+ self.assertIn(user_filename, files,
+ '%r not found in %r' % (user_filename, files))
+ finally:
+ os.remove(user_filename)
+
+ def test_fix_help_options(self):
+ help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)]
+ fancy_options = fix_help_options(help_tuples)
+ self.assertEqual(fancy_options[0], ('a', 'b', 'c'))
+ self.assertEqual(fancy_options[1], (1, 2, 3))
+
+ def test_show_help(self):
+ # smoke test, just makes sure some help is displayed
+ self.addCleanup(log.set_threshold, log._global_log.threshold)
+ dist = Distribution()
+ sys.argv = []
+ dist.help = 1
+ dist.script_name = 'setup.py'
+ with captured_stdout() as s:
+ dist.parse_command_line()
+
+ output = [line for line in s.getvalue().split('\n')
+ if line.strip() != '']
+ self.assertTrue(output)
+
+
+ def test_read_metadata(self):
+ attrs = {"name": "package",
+ "version": "1.0",
+ "long_description": "desc",
+ "description": "xxx",
+ "download_url": "http://example.com",
+ "keywords": ['one', 'two'],
+ "requires": ['foo']}
+
+ dist = Distribution(attrs)
+ metadata = dist.metadata
+
+ # write it then reloads it
+ PKG_INFO = io.StringIO()
+ metadata.write_pkg_file(PKG_INFO)
+ PKG_INFO.seek(0)
+ metadata.read_pkg_file(PKG_INFO)
+
+ self.assertEqual(metadata.name, "package")
+ self.assertEqual(metadata.version, "1.0")
+ self.assertEqual(metadata.description, "xxx")
+ self.assertEqual(metadata.download_url, 'http://example.com')
+ self.assertEqual(metadata.keywords, ['one', 'two'])
+ self.assertEqual(metadata.platforms, ['UNKNOWN'])
+ self.assertEqual(metadata.obsoletes, None)
+ self.assertEqual(metadata.requires, ['foo'])
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase))
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase))
+ return suite
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_extension.py b/setuptools/_distutils/tests/test_extension.py
new file mode 100644
index 0000000..78a55da
--- /dev/null
+++ b/setuptools/_distutils/tests/test_extension.py
@@ -0,0 +1,71 @@
+"""Tests for distutils.extension."""
+import unittest
+import os
+import warnings
+
+from test.support import run_unittest
+from distutils.extension import read_setup_file, Extension
+
+from .py38compat import check_warnings
+
+class ExtensionTestCase(unittest.TestCase):
+
+ def test_read_setup_file(self):
+ # trying to read a Setup file
+ # (sample extracted from the PyGame project)
+ setup = os.path.join(os.path.dirname(__file__), 'Setup.sample')
+
+ exts = read_setup_file(setup)
+ names = [ext.name for ext in exts]
+ names.sort()
+
+ # here are the extensions read_setup_file should have created
+ # out of the file
+ wanted = ['_arraysurfarray', '_camera', '_numericsndarray',
+ '_numericsurfarray', 'base', 'bufferproxy', 'cdrom',
+ 'color', 'constants', 'display', 'draw', 'event',
+ 'fastevent', 'font', 'gfxdraw', 'image', 'imageext',
+ 'joystick', 'key', 'mask', 'mixer', 'mixer_music',
+ 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm',
+ 'rect', 'rwobject', 'scrap', 'surface', 'surflock',
+ 'time', 'transform']
+
+ self.assertEqual(names, wanted)
+
+ def test_extension_init(self):
+ # the first argument, which is the name, must be a string
+ self.assertRaises(AssertionError, Extension, 1, [])
+ ext = Extension('name', [])
+ self.assertEqual(ext.name, 'name')
+
+ # the second argument, which is the list of files, must
+ # be a list of strings
+ self.assertRaises(AssertionError, Extension, 'name', 'file')
+ self.assertRaises(AssertionError, Extension, 'name', ['file', 1])
+ ext = Extension('name', ['file1', 'file2'])
+ self.assertEqual(ext.sources, ['file1', 'file2'])
+
+ # others arguments have defaults
+ for attr in ('include_dirs', 'define_macros', 'undef_macros',
+ 'library_dirs', 'libraries', 'runtime_library_dirs',
+ 'extra_objects', 'extra_compile_args', 'extra_link_args',
+ 'export_symbols', 'swig_opts', 'depends'):
+ self.assertEqual(getattr(ext, attr), [])
+
+ self.assertEqual(ext.language, None)
+ self.assertEqual(ext.optional, None)
+
+ # if there are unknown keyword options, warn about them
+ with check_warnings() as w:
+ warnings.simplefilter('always')
+ ext = Extension('name', ['file1', 'file2'], chic=True)
+
+ self.assertEqual(len(w.warnings), 1)
+ self.assertEqual(str(w.warnings[0].message),
+ "Unknown Extension options: 'chic'")
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_file_util.py b/setuptools/_distutils/tests/test_file_util.py
new file mode 100644
index 0000000..81b90d6
--- /dev/null
+++ b/setuptools/_distutils/tests/test_file_util.py
@@ -0,0 +1,124 @@
+"""Tests for distutils.file_util."""
+import unittest
+import os
+import errno
+from unittest.mock import patch
+
+from distutils.file_util import move_file, copy_file
+from distutils import log
+from distutils.tests import support
+from distutils.errors import DistutilsFileError
+from test.support import run_unittest
+from .py38compat import unlink
+
+
+class FileUtilTestCase(support.TempdirManager, unittest.TestCase):
+
+ def _log(self, msg, *args):
+ if len(args) > 0:
+ self._logs.append(msg % args)
+ else:
+ self._logs.append(msg)
+
+ def setUp(self):
+ super(FileUtilTestCase, self).setUp()
+ self._logs = []
+ self.old_log = log.info
+ log.info = self._log
+ tmp_dir = self.mkdtemp()
+ self.source = os.path.join(tmp_dir, 'f1')
+ self.target = os.path.join(tmp_dir, 'f2')
+ self.target_dir = os.path.join(tmp_dir, 'd1')
+
+ def tearDown(self):
+ log.info = self.old_log
+ super(FileUtilTestCase, self).tearDown()
+
+ def test_move_file_verbosity(self):
+ f = open(self.source, 'w')
+ try:
+ f.write('some content')
+ finally:
+ f.close()
+
+ move_file(self.source, self.target, verbose=0)
+ wanted = []
+ self.assertEqual(self._logs, wanted)
+
+ # back to original state
+ move_file(self.target, self.source, verbose=0)
+
+ move_file(self.source, self.target, verbose=1)
+ wanted = ['moving %s -> %s' % (self.source, self.target)]
+ self.assertEqual(self._logs, wanted)
+
+ # back to original state
+ move_file(self.target, self.source, verbose=0)
+
+ self._logs = []
+ # now the target is a dir
+ os.mkdir(self.target_dir)
+ move_file(self.source, self.target_dir, verbose=1)
+ wanted = ['moving %s -> %s' % (self.source, self.target_dir)]
+ self.assertEqual(self._logs, wanted)
+
+ def test_move_file_exception_unpacking_rename(self):
+ # see issue 22182
+ with patch("os.rename", side_effect=OSError("wrong", 1)), \
+ self.assertRaises(DistutilsFileError):
+ with open(self.source, 'w') as fobj:
+ fobj.write('spam eggs')
+ move_file(self.source, self.target, verbose=0)
+
+ def test_move_file_exception_unpacking_unlink(self):
+ # see issue 22182
+ with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \
+ patch("os.unlink", side_effect=OSError("wrong", 1)), \
+ self.assertRaises(DistutilsFileError):
+ with open(self.source, 'w') as fobj:
+ fobj.write('spam eggs')
+ move_file(self.source, self.target, verbose=0)
+
+ def test_copy_file_hard_link(self):
+ with open(self.source, 'w') as f:
+ f.write('some content')
+ # Check first that copy_file() will not fall back on copying the file
+ # instead of creating the hard link.
+ try:
+ os.link(self.source, self.target)
+ except OSError as e:
+ self.skipTest('os.link: %s' % e)
+ else:
+ unlink(self.target)
+ st = os.stat(self.source)
+ copy_file(self.source, self.target, link='hard')
+ st2 = os.stat(self.source)
+ st3 = os.stat(self.target)
+ self.assertTrue(os.path.samestat(st, st2), (st, st2))
+ self.assertTrue(os.path.samestat(st2, st3), (st2, st3))
+ with open(self.source, 'r') as f:
+ self.assertEqual(f.read(), 'some content')
+
+ def test_copy_file_hard_link_failure(self):
+ # If hard linking fails, copy_file() falls back on copying file
+ # (some special filesystems don't support hard linking even under
+ # Unix, see issue #8876).
+ with open(self.source, 'w') as f:
+ f.write('some content')
+ st = os.stat(self.source)
+ with patch("os.link", side_effect=OSError(0, "linking unsupported")):
+ copy_file(self.source, self.target, link='hard')
+ st2 = os.stat(self.source)
+ st3 = os.stat(self.target)
+ self.assertTrue(os.path.samestat(st, st2), (st, st2))
+ self.assertFalse(os.path.samestat(st2, st3), (st2, st3))
+ for fn in (self.source, self.target):
+ with open(fn, 'r') as f:
+ self.assertEqual(f.read(), 'some content')
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_filelist.py b/setuptools/_distutils/tests/test_filelist.py
new file mode 100644
index 0000000..a90edcf
--- /dev/null
+++ b/setuptools/_distutils/tests/test_filelist.py
@@ -0,0 +1,353 @@
+"""Tests for distutils.filelist."""
+import os
+import re
+import unittest
+from distutils import debug
+from distutils.log import WARN
+from distutils.errors import DistutilsTemplateError
+from distutils.filelist import glob_to_re, translate_pattern, FileList
+from distutils import filelist
+
+from test.support import captured_stdout, run_unittest
+from distutils.tests import support
+
+from .py35compat import adapt_glob
+from . import py38compat as os_helper
+
+
+MANIFEST_IN = """\
+include ok
+include xo
+exclude xo
+include foo.tmp
+include buildout.cfg
+global-include *.x
+global-include *.txt
+global-exclude *.tmp
+recursive-include f *.oo
+recursive-exclude global *.x
+graft dir
+prune dir3
+"""
+
+
+def make_local_path(s):
+ """Converts '/' in a string to os.sep"""
+ return s.replace('/', os.sep)
+
+
+class FileListTestCase(support.LoggingSilencer,
+ unittest.TestCase):
+
+ def assertNoWarnings(self):
+ self.assertEqual(self.get_logs(WARN), [])
+ self.clear_logs()
+
+ def assertWarnings(self):
+ self.assertGreater(len(self.get_logs(WARN)), 0)
+ self.clear_logs()
+
+ def test_glob_to_re(self):
+ sep = os.sep
+ if os.sep == '\\':
+ sep = re.escape(os.sep)
+
+ for glob, regex in (
+ # simple cases
+ ('foo*', r'(?s:foo[^%(sep)s]*)\Z'),
+ ('foo?', r'(?s:foo[^%(sep)s])\Z'),
+ ('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'),
+ # special cases
+ (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'),
+ (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'),
+ ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'),
+ (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')):
+ regex = regex % {'sep': sep}
+ self.assertEqual(glob_to_re(glob), adapt_glob(regex))
+
+ def test_process_template_line(self):
+ # testing all MANIFEST.in template patterns
+ file_list = FileList()
+ l = make_local_path
+
+ # simulated file list
+ file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt',
+ 'buildout.cfg',
+ # filelist does not filter out VCS directories,
+ # it's sdist that does
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('global/files.x'),
+ l('global/here.tmp'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('dir3/ok'),
+ l('dir3/sub/ok.txt'),
+ ]
+
+ for line in MANIFEST_IN.split('\n'):
+ if line.strip() == '':
+ continue
+ file_list.process_template_line(line)
+
+ wanted = ['ok',
+ 'buildout.cfg',
+ 'four.txt',
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ ]
+
+ self.assertEqual(file_list.files, wanted)
+
+ def test_debug_print(self):
+ file_list = FileList()
+ with captured_stdout() as stdout:
+ file_list.debug_print('xxx')
+ self.assertEqual(stdout.getvalue(), '')
+
+ debug.DEBUG = True
+ try:
+ with captured_stdout() as stdout:
+ file_list.debug_print('xxx')
+ self.assertEqual(stdout.getvalue(), 'xxx\n')
+ finally:
+ debug.DEBUG = False
+
+ def test_set_allfiles(self):
+ file_list = FileList()
+ files = ['a', 'b', 'c']
+ file_list.set_allfiles(files)
+ self.assertEqual(file_list.allfiles, files)
+
+ def test_remove_duplicates(self):
+ file_list = FileList()
+ file_list.files = ['a', 'b', 'a', 'g', 'c', 'g']
+ # files must be sorted beforehand (sdist does it)
+ file_list.sort()
+ file_list.remove_duplicates()
+ self.assertEqual(file_list.files, ['a', 'b', 'c', 'g'])
+
+ def test_translate_pattern(self):
+ # not regex
+ self.assertTrue(hasattr(
+ translate_pattern('a', anchor=True, is_regex=False),
+ 'search'))
+
+ # is a regex
+ regex = re.compile('a')
+ self.assertEqual(
+ translate_pattern(regex, anchor=True, is_regex=True),
+ regex)
+
+ # plain string flagged as regex
+ self.assertTrue(hasattr(
+ translate_pattern('a', anchor=True, is_regex=True),
+ 'search'))
+
+ # glob support
+ self.assertTrue(translate_pattern(
+ '*.py', anchor=True, is_regex=False).search('filelist.py'))
+
+ def test_exclude_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ self.assertFalse(file_list.exclude_pattern('*.py'))
+
+ # return True if files match
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.py']
+ self.assertTrue(file_list.exclude_pattern('*.py'))
+
+ # test excludes
+ file_list = FileList()
+ file_list.files = ['a.py', 'a.txt']
+ file_list.exclude_pattern('*.py')
+ self.assertEqual(file_list.files, ['a.txt'])
+
+ def test_include_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ file_list.set_allfiles([])
+ self.assertFalse(file_list.include_pattern('*.py'))
+
+ # return True if files match
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt'])
+ self.assertTrue(file_list.include_pattern('*.py'))
+
+ # test * matches all files
+ file_list = FileList()
+ self.assertIsNone(file_list.allfiles)
+ file_list.set_allfiles(['a.py', 'b.txt'])
+ file_list.include_pattern('*')
+ self.assertEqual(file_list.allfiles, ['a.py', 'b.txt'])
+
+ def test_process_template(self):
+ l = make_local_path
+ # invalid lines
+ file_list = FileList()
+ for action in ('include', 'exclude', 'global-include',
+ 'global-exclude', 'recursive-include',
+ 'recursive-exclude', 'graft', 'prune', 'blarg'):
+ self.assertRaises(DistutilsTemplateError,
+ file_list.process_template_line, action)
+
+ # include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('include *.py')
+ self.assertEqual(file_list.files, ['a.py'])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('include *.rb')
+ self.assertEqual(file_list.files, ['a.py'])
+ self.assertWarnings()
+
+ # exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('exclude *.py')
+ self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('exclude *.rb')
+ self.assertEqual(file_list.files, ['b.txt', l('d/c.py')])
+ self.assertWarnings()
+
+ # global-include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('global-include *.py')
+ self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-include *.rb')
+ self.assertEqual(file_list.files, ['a.py', l('d/c.py')])
+ self.assertWarnings()
+
+ # global-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('global-exclude *.py')
+ self.assertEqual(file_list.files, ['b.txt'])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-exclude *.rb')
+ self.assertEqual(file_list.files, ['b.txt'])
+ self.assertWarnings()
+
+ # recursive-include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'),
+ l('d/d/e.py')])
+
+ file_list.process_template_line('recursive-include d *.py')
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-include e *.py')
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertWarnings()
+
+ # recursive-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+
+ file_list.process_template_line('recursive-exclude d *.py')
+ self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-exclude e *.py')
+ self.assertEqual(file_list.files, ['a.py', l('d/c.txt')])
+ self.assertWarnings()
+
+ # graft
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'),
+ l('f/f.py')])
+
+ file_list.process_template_line('graft d')
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('graft e')
+ self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')])
+ self.assertWarnings()
+
+ # prune
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+
+ file_list.process_template_line('prune d')
+ self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
+ self.assertNoWarnings()
+
+ file_list.process_template_line('prune e')
+ self.assertEqual(file_list.files, ['a.py', l('f/f.py')])
+ self.assertWarnings()
+
+
+class FindAllTestCase(unittest.TestCase):
+ @os_helper.skip_unless_symlink
+ def test_missing_symlink(self):
+ with os_helper.temp_cwd():
+ os.symlink('foo', 'bar')
+ self.assertEqual(filelist.findall(), [])
+
+ def test_basic_discovery(self):
+ """
+ When findall is called with no parameters or with
+ '.' as the parameter, the dot should be omitted from
+ the results.
+ """
+ with os_helper.temp_cwd():
+ os.mkdir('foo')
+ file1 = os.path.join('foo', 'file1.txt')
+ os_helper.create_empty_file(file1)
+ os.mkdir('bar')
+ file2 = os.path.join('bar', 'file2.txt')
+ os_helper.create_empty_file(file2)
+ expected = [file2, file1]
+ self.assertEqual(sorted(filelist.findall()), expected)
+
+ def test_non_local_discovery(self):
+ """
+ When findall is called with another path, the full
+ path name should be returned.
+ """
+ with os_helper.temp_dir() as temp_dir:
+ file1 = os.path.join(temp_dir, 'file1.txt')
+ os_helper.create_empty_file(file1)
+ expected = [file1]
+ self.assertEqual(filelist.findall(temp_dir), expected)
+
+ @os_helper.skip_unless_symlink
+ def test_symlink_loop(self):
+ with os_helper.temp_dir() as temp_dir:
+ link = os.path.join(temp_dir, 'link-to-parent')
+ content = os.path.join(temp_dir, 'somefile')
+ os_helper.create_empty_file(content)
+ os.symlink('.', link)
+ files = filelist.findall(temp_dir)
+ assert len(files) == 1
+
+
+def test_suite():
+ return unittest.TestSuite([
+ unittest.TestLoader().loadTestsFromTestCase(FileListTestCase),
+ unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase),
+ ])
+
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_install.py b/setuptools/_distutils/tests/test_install.py
new file mode 100644
index 0000000..5dbc06b
--- /dev/null
+++ b/setuptools/_distutils/tests/test_install.py
@@ -0,0 +1,263 @@
+"""Tests for distutils.command.install."""
+
+import os
+import sys
+import unittest
+import site
+
+from test.support import captured_stdout, run_unittest
+
+from distutils import sysconfig
+from distutils.command.install import install
+from distutils.command import install as install_module
+from distutils.command.build_ext import build_ext
+from distutils.command.install import INSTALL_SCHEMES
+from distutils.core import Distribution
+from distutils.errors import DistutilsOptionError
+from distutils.extension import Extension
+
+from distutils.tests import support
+from test import support as test_support
+
+
+def _make_ext_name(modname):
+ return modname + sysconfig.get_config_var('EXT_SUFFIX')
+
+
+class InstallTestCase(support.TempdirManager,
+ support.EnvironGuard,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_home_installation_scheme(self):
+ # This ensure two things:
+ # - that --home generates the desired set of directory names
+ # - test --home is supported on all platforms
+ builddir = self.mkdtemp()
+ destination = os.path.join(builddir, "installation")
+
+ dist = Distribution({"name": "foopkg"})
+ # script_name need not exist, it just need to be initialized
+ dist.script_name = os.path.join(builddir, "setup.py")
+ dist.command_obj["build"] = support.DummyCommand(
+ build_base=builddir,
+ build_lib=os.path.join(builddir, "lib"),
+ )
+
+ cmd = install(dist)
+ cmd.home = destination
+ cmd.ensure_finalized()
+
+ self.assertEqual(cmd.install_base, destination)
+ self.assertEqual(cmd.install_platbase, destination)
+
+ def check_path(got, expected):
+ got = os.path.normpath(got)
+ expected = os.path.normpath(expected)
+ self.assertEqual(got, expected)
+
+ libdir = os.path.join(destination, "lib", "python")
+ check_path(cmd.install_lib, libdir)
+ _platlibdir = getattr(sys, "platlibdir", "lib")
+ platlibdir = os.path.join(destination, _platlibdir, "python")
+ check_path(cmd.install_platlib, platlibdir)
+ check_path(cmd.install_purelib, libdir)
+ check_path(cmd.install_headers,
+ os.path.join(destination, "include", "python", "foopkg"))
+ check_path(cmd.install_scripts, os.path.join(destination, "bin"))
+ check_path(cmd.install_data, destination)
+
+ def test_user_site(self):
+ # test install with --user
+ # preparing the environment for the test
+ self.old_user_base = site.USER_BASE
+ self.old_user_site = site.USER_SITE
+ self.tmpdir = self.mkdtemp()
+ self.user_base = os.path.join(self.tmpdir, 'B')
+ self.user_site = os.path.join(self.tmpdir, 'S')
+ site.USER_BASE = self.user_base
+ site.USER_SITE = self.user_site
+ install_module.USER_BASE = self.user_base
+ install_module.USER_SITE = self.user_site
+
+ def _expanduser(path):
+ if path.startswith('~'):
+ return os.path.normpath(self.tmpdir + path[1:])
+ return path
+ self.old_expand = os.path.expanduser
+ os.path.expanduser = _expanduser
+
+ def cleanup():
+ site.USER_BASE = self.old_user_base
+ site.USER_SITE = self.old_user_site
+ install_module.USER_BASE = self.old_user_base
+ install_module.USER_SITE = self.old_user_site
+ os.path.expanduser = self.old_expand
+
+ self.addCleanup(cleanup)
+
+ for key in ('nt_user', 'posix_user'):
+ self.assertIn(key, INSTALL_SCHEMES)
+
+ dist = Distribution({'name': 'xx'})
+ cmd = install(dist)
+
+ # making sure the user option is there
+ options = [name for name, short, lable in
+ cmd.user_options]
+ self.assertIn('user', options)
+
+ # setting a value
+ cmd.user = 1
+
+ # user base and site shouldn't be created yet
+ self.assertFalse(os.path.exists(self.user_base))
+ self.assertFalse(os.path.exists(self.user_site))
+
+ # let's run finalize
+ cmd.ensure_finalized()
+
+ # now they should
+ self.assertTrue(os.path.exists(self.user_base))
+ self.assertTrue(os.path.exists(self.user_site))
+
+ self.assertIn('userbase', cmd.config_vars)
+ self.assertIn('usersite', cmd.config_vars)
+
+ actual_headers = os.path.relpath(cmd.install_headers, self.user_base)
+ if os.name == 'nt':
+ site_path = os.path.relpath(
+ os.path.dirname(self.old_user_site), self.old_user_base)
+ include = os.path.join(site_path, 'Include')
+ else:
+ include = sysconfig.get_python_inc(0, '')
+ expect_headers = os.path.join(include, 'xx')
+
+ self.assertEqual(os.path.normcase(actual_headers), os.path.normcase(expect_headers))
+
+ def test_handle_extra_path(self):
+ dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
+ cmd = install(dist)
+
+ # two elements
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, ['path', 'dirs'])
+ self.assertEqual(cmd.extra_dirs, 'dirs')
+ self.assertEqual(cmd.path_file, 'path')
+
+ # one element
+ cmd.extra_path = ['path']
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, ['path'])
+ self.assertEqual(cmd.extra_dirs, 'path')
+ self.assertEqual(cmd.path_file, 'path')
+
+ # none
+ dist.extra_path = cmd.extra_path = None
+ cmd.handle_extra_path()
+ self.assertEqual(cmd.extra_path, None)
+ self.assertEqual(cmd.extra_dirs, '')
+ self.assertEqual(cmd.path_file, None)
+
+ # three elements (no way !)
+ cmd.extra_path = 'path,dirs,again'
+ self.assertRaises(DistutilsOptionError, cmd.handle_extra_path)
+
+ def test_finalize_options(self):
+ dist = Distribution({'name': 'xx'})
+ cmd = install(dist)
+
+ # must supply either prefix/exec-prefix/home or
+ # install-base/install-platbase -- not both
+ cmd.prefix = 'prefix'
+ cmd.install_base = 'base'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+
+ # must supply either home or prefix/exec-prefix -- not both
+ cmd.install_base = None
+ cmd.home = 'home'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+
+ # can't combine user with prefix/exec_prefix/home or
+ # install_(plat)base
+ cmd.prefix = None
+ cmd.user = 'user'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+
+ def test_record(self):
+ install_dir = self.mkdtemp()
+ project_dir, dist = self.create_dist(py_modules=['hello'],
+ scripts=['sayhi'])
+ os.chdir(project_dir)
+ self.write_file('hello.py', "def main(): print('o hai')")
+ self.write_file('sayhi', 'from hello import main; main()')
+
+ cmd = install(dist)
+ dist.command_obj['install'] = cmd
+ cmd.root = install_dir
+ cmd.record = os.path.join(project_dir, 'filelist')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ f = open(cmd.record)
+ try:
+ content = f.read()
+ finally:
+ f.close()
+
+ found = [os.path.basename(line) for line in content.splitlines()]
+ expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag,
+ 'sayhi',
+ 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]]
+ self.assertEqual(found, expected)
+
+ def test_record_extensions(self):
+ cmd = test_support.missing_compiler_executable()
+ if cmd is not None:
+ self.skipTest('The %r command is not found' % cmd)
+ install_dir = self.mkdtemp()
+ project_dir, dist = self.create_dist(ext_modules=[
+ Extension('xx', ['xxmodule.c'])])
+ os.chdir(project_dir)
+ support.copy_xxmodule_c(project_dir)
+
+ buildextcmd = build_ext(dist)
+ support.fixup_build_ext(buildextcmd)
+ buildextcmd.ensure_finalized()
+
+ cmd = install(dist)
+ dist.command_obj['install'] = cmd
+ dist.command_obj['build_ext'] = buildextcmd
+ cmd.root = install_dir
+ cmd.record = os.path.join(project_dir, 'filelist')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ f = open(cmd.record)
+ try:
+ content = f.read()
+ finally:
+ f.close()
+
+ found = [os.path.basename(line) for line in content.splitlines()]
+ expected = [_make_ext_name('xx'),
+ 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]]
+ self.assertEqual(found, expected)
+
+ def test_debug_mode(self):
+ # this covers the code called when DEBUG is set
+ old_logs_len = len(self.logs)
+ install_module.DEBUG = True
+ try:
+ with captured_stdout():
+ self.test_record()
+ finally:
+ install_module.DEBUG = False
+ self.assertGreater(len(self.logs), old_logs_len)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_install_data.py b/setuptools/_distutils/tests/test_install_data.py
new file mode 100644
index 0000000..6191d2f
--- /dev/null
+++ b/setuptools/_distutils/tests/test_install_data.py
@@ -0,0 +1,75 @@
+"""Tests for distutils.command.install_data."""
+import os
+import unittest
+
+from distutils.command.install_data import install_data
+from distutils.tests import support
+from test.support import run_unittest
+
+class InstallDataTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def test_simple_run(self):
+ pkg_dir, dist = self.create_dist()
+ cmd = install_data(dist)
+ cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
+
+ # data_files can contain
+ # - simple files
+ # - a tuple with a path, and a list of file
+ one = os.path.join(pkg_dir, 'one')
+ self.write_file(one, 'xxx')
+ inst2 = os.path.join(pkg_dir, 'inst2')
+ two = os.path.join(pkg_dir, 'two')
+ self.write_file(two, 'xxx')
+
+ cmd.data_files = [one, (inst2, [two])]
+ self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])])
+
+ # let's run the command
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 2)
+ rtwo = os.path.split(two)[-1]
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ rone = os.path.split(one)[-1]
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+ cmd.outfiles = []
+
+ # let's try with warn_dir one
+ cmd.warn_dir = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 2)
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+ cmd.outfiles = []
+
+ # now using root and empty dir
+ cmd.root = os.path.join(pkg_dir, 'root')
+ inst3 = os.path.join(cmd.install_dir, 'inst3')
+ inst4 = os.path.join(pkg_dir, 'inst4')
+ three = os.path.join(cmd.install_dir, 'three')
+ self.write_file(three, 'xx')
+ cmd.data_files = [one, (inst2, [two]),
+ ('inst3', [three]),
+ (inst4, [])]
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the result
+ self.assertEqual(len(cmd.get_outputs()), 4)
+ self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
+ self.assertTrue(os.path.exists(os.path.join(inst, rone)))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_install_headers.py b/setuptools/_distutils/tests/test_install_headers.py
new file mode 100644
index 0000000..1aa4d09
--- /dev/null
+++ b/setuptools/_distutils/tests/test_install_headers.py
@@ -0,0 +1,39 @@
+"""Tests for distutils.command.install_headers."""
+import os
+import unittest
+
+from distutils.command.install_headers import install_headers
+from distutils.tests import support
+from test.support import run_unittest
+
+class InstallHeadersTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def test_simple_run(self):
+ # we have two headers
+ header_list = self.mkdtemp()
+ header1 = os.path.join(header_list, 'header1')
+ header2 = os.path.join(header_list, 'header2')
+ self.write_file(header1)
+ self.write_file(header2)
+ headers = [header1, header2]
+
+ pkg_dir, dist = self.create_dist(headers=headers)
+ cmd = install_headers(dist)
+ self.assertEqual(cmd.get_inputs(), headers)
+
+ # let's run the command
+ cmd.install_dir = os.path.join(pkg_dir, 'inst')
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # let's check the results
+ self.assertEqual(len(cmd.get_outputs()), 2)
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_install_lib.py b/setuptools/_distutils/tests/test_install_lib.py
new file mode 100644
index 0000000..652653f
--- /dev/null
+++ b/setuptools/_distutils/tests/test_install_lib.py
@@ -0,0 +1,115 @@
+"""Tests for distutils.command.install_data."""
+import sys
+import os
+import importlib.util
+import unittest
+
+from distutils.command.install_lib import install_lib
+from distutils.extension import Extension
+from distutils.tests import support
+from distutils.errors import DistutilsOptionError
+from test.support import run_unittest
+
+
+class InstallLibTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ support.EnvironGuard,
+ unittest.TestCase):
+
+ def test_finalize_options(self):
+ dist = self.create_dist()[1]
+ cmd = install_lib(dist)
+
+ cmd.finalize_options()
+ self.assertEqual(cmd.compile, 1)
+ self.assertEqual(cmd.optimize, 0)
+
+ # optimize must be 0, 1, or 2
+ cmd.optimize = 'foo'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+ cmd.optimize = '4'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+
+ cmd.optimize = '2'
+ cmd.finalize_options()
+ self.assertEqual(cmd.optimize, 2)
+
+ @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
+ def test_byte_compile(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ cmd = install_lib(dist)
+ cmd.compile = cmd.optimize = 1
+
+ f = os.path.join(project_dir, 'foo.py')
+ self.write_file(f, '# python file')
+ cmd.byte_compile([f])
+ pyc_file = importlib.util.cache_from_source('foo.py', optimization='')
+ pyc_opt_file = importlib.util.cache_from_source('foo.py',
+ optimization=cmd.optimize)
+ self.assertTrue(os.path.exists(pyc_file))
+ self.assertTrue(os.path.exists(pyc_opt_file))
+
+ def test_get_outputs(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ os.mkdir('spam')
+ cmd = install_lib(dist)
+
+ # setting up a dist environment
+ cmd.compile = cmd.optimize = 1
+ cmd.install_dir = self.mkdtemp()
+ f = os.path.join(project_dir, 'spam', '__init__.py')
+ self.write_file(f, '# python package')
+ cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+ cmd.distribution.packages = ['spam']
+ cmd.distribution.script_name = 'setup.py'
+
+ # get_outputs should return 4 elements: spam/__init__.py and .pyc,
+ # foo.import-tag-abiflags.so / foo.pyd
+ outputs = cmd.get_outputs()
+ self.assertEqual(len(outputs), 4, outputs)
+
+ def test_get_inputs(self):
+ project_dir, dist = self.create_dist()
+ os.chdir(project_dir)
+ os.mkdir('spam')
+ cmd = install_lib(dist)
+
+ # setting up a dist environment
+ cmd.compile = cmd.optimize = 1
+ cmd.install_dir = self.mkdtemp()
+ f = os.path.join(project_dir, 'spam', '__init__.py')
+ self.write_file(f, '# python package')
+ cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
+ cmd.distribution.packages = ['spam']
+ cmd.distribution.script_name = 'setup.py'
+
+ # get_inputs should return 2 elements: spam/__init__.py and
+ # foo.import-tag-abiflags.so / foo.pyd
+ inputs = cmd.get_inputs()
+ self.assertEqual(len(inputs), 2, inputs)
+
+ def test_dont_write_bytecode(self):
+ # makes sure byte_compile is not used
+ dist = self.create_dist()[1]
+ cmd = install_lib(dist)
+ cmd.compile = 1
+ cmd.optimize = 1
+
+ old_dont_write_bytecode = sys.dont_write_bytecode
+ sys.dont_write_bytecode = True
+ try:
+ cmd.byte_compile([])
+ finally:
+ sys.dont_write_bytecode = old_dont_write_bytecode
+
+ self.assertIn('byte-compiling is disabled',
+ self.logs[0][1] % self.logs[0][2])
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_install_scripts.py b/setuptools/_distutils/tests/test_install_scripts.py
new file mode 100644
index 0000000..648db3b
--- /dev/null
+++ b/setuptools/_distutils/tests/test_install_scripts.py
@@ -0,0 +1,82 @@
+"""Tests for distutils.command.install_scripts."""
+
+import os
+import unittest
+
+from distutils.command.install_scripts import install_scripts
+from distutils.core import Distribution
+
+from distutils.tests import support
+from test.support import run_unittest
+
+
+class InstallScriptsTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ def test_default_settings(self):
+ dist = Distribution()
+ dist.command_obj["build"] = support.DummyCommand(
+ build_scripts="/foo/bar")
+ dist.command_obj["install"] = support.DummyCommand(
+ install_scripts="/splat/funk",
+ force=1,
+ skip_build=1,
+ )
+ cmd = install_scripts(dist)
+ self.assertFalse(cmd.force)
+ self.assertFalse(cmd.skip_build)
+ self.assertIsNone(cmd.build_dir)
+ self.assertIsNone(cmd.install_dir)
+
+ cmd.finalize_options()
+
+ self.assertTrue(cmd.force)
+ self.assertTrue(cmd.skip_build)
+ self.assertEqual(cmd.build_dir, "/foo/bar")
+ self.assertEqual(cmd.install_dir, "/splat/funk")
+
+ def test_installation(self):
+ source = self.mkdtemp()
+ expected = []
+
+ def write_script(name, text):
+ expected.append(name)
+ f = open(os.path.join(source, name), "w")
+ try:
+ f.write(text)
+ finally:
+ f.close()
+
+ write_script("script1.py", ("#! /usr/bin/env python2.3\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ write_script("script2.py", ("#!/usr/bin/python\n"
+ "# bogus script w/ Python sh-bang\n"
+ "pass\n"))
+ write_script("shell.sh", ("#!/bin/sh\n"
+ "# bogus shell script w/ sh-bang\n"
+ "exit 0\n"))
+
+ target = self.mkdtemp()
+ dist = Distribution()
+ dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
+ dist.command_obj["install"] = support.DummyCommand(
+ install_scripts=target,
+ force=1,
+ skip_build=1,
+ )
+ cmd = install_scripts(dist)
+ cmd.finalize_options()
+ cmd.run()
+
+ installed = os.listdir(target)
+ for name in expected:
+ self.assertIn(name, installed)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_log.py b/setuptools/_distutils/tests/test_log.py
new file mode 100644
index 0000000..ec2ae02
--- /dev/null
+++ b/setuptools/_distutils/tests/test_log.py
@@ -0,0 +1,46 @@
+"""Tests for distutils.log"""
+
+import io
+import sys
+import unittest
+from test.support import swap_attr, run_unittest
+
+from distutils import log
+
+class TestLog(unittest.TestCase):
+ def test_non_ascii(self):
+ # Issues #8663, #34421: test that non-encodable text is escaped with
+ # backslashreplace error handler and encodable non-ASCII text is
+ # output as is.
+ for errors in ('strict', 'backslashreplace', 'surrogateescape',
+ 'replace', 'ignore'):
+ with self.subTest(errors=errors):
+ stdout = io.TextIOWrapper(io.BytesIO(),
+ encoding='cp437', errors=errors)
+ stderr = io.TextIOWrapper(io.BytesIO(),
+ encoding='cp437', errors=errors)
+ old_threshold = log.set_threshold(log.DEBUG)
+ try:
+ with swap_attr(sys, 'stdout', stdout), \
+ swap_attr(sys, 'stderr', stderr):
+ log.debug('Dεbug\tMėssãge')
+ log.fatal('Fαtal\tÈrrōr')
+ finally:
+ log.set_threshold(old_threshold)
+
+ stdout.seek(0)
+ self.assertEqual(stdout.read().rstrip(),
+ 'Dεbug\tM?ss?ge' if errors == 'replace' else
+ 'Dεbug\tMssge' if errors == 'ignore' else
+ 'Dεbug\tM\\u0117ss\\xe3ge')
+ stderr.seek(0)
+ self.assertEqual(stderr.read().rstrip(),
+ 'Fαtal\t?rr?r' if errors == 'replace' else
+ 'Fαtal\trrr' if errors == 'ignore' else
+ 'Fαtal\t\\xc8rr\\u014dr')
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(TestLog)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_msvc9compiler.py b/setuptools/_distutils/tests/test_msvc9compiler.py
new file mode 100644
index 0000000..6235405
--- /dev/null
+++ b/setuptools/_distutils/tests/test_msvc9compiler.py
@@ -0,0 +1,184 @@
+"""Tests for distutils.msvc9compiler."""
+import sys
+import unittest
+import os
+
+from distutils.errors import DistutilsPlatformError
+from distutils.tests import support
+from test.support import run_unittest
+
+# A manifest with the only assembly reference being the msvcrt assembly, so
+# should have the assembly completely stripped. Note that although the
+# assembly has a <security> reference the assembly is removed - that is
+# currently a "feature", not a bug :)
+_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+ manifestVersion="1.0">
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false">
+ </requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX">
+ </assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+</assembly>
+"""
+
+# A manifest with references to assemblies other than msvcrt. When processed,
+# this assembly should be returned with just the msvcrt part removed.
+_MANIFEST_WITH_MULTIPLE_REFERENCES = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+ manifestVersion="1.0">
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false">
+ </requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX">
+ </assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX"></assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+</assembly>
+"""
+
+_CLEANED_MANIFEST = """\
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
+ manifestVersion="1.0">
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false">
+ </requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <dependency>
+
+ </dependency>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
+ version="9.0.21022.8" processorArchitecture="x86"
+ publicKeyToken="XXXX"></assemblyIdentity>
+ </dependentAssembly>
+ </dependency>
+</assembly>"""
+
+if sys.platform=="win32":
+ from distutils.msvccompiler import get_build_version
+ if get_build_version()>=8.0:
+ SKIP_MESSAGE = None
+ else:
+ SKIP_MESSAGE = "These tests are only for MSVC8.0 or above"
+else:
+ SKIP_MESSAGE = "These tests are only for win32"
+
+@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
+class msvc9compilerTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ def test_no_compiler(self):
+ # makes sure query_vcvarsall raises
+ # a DistutilsPlatformError if the compiler
+ # is not found
+ from distutils.msvc9compiler import query_vcvarsall
+ def _find_vcvarsall(version):
+ return None
+
+ from distutils import msvc9compiler
+ old_find_vcvarsall = msvc9compiler.find_vcvarsall
+ msvc9compiler.find_vcvarsall = _find_vcvarsall
+ try:
+ self.assertRaises(DistutilsPlatformError, query_vcvarsall,
+ 'wont find this version')
+ finally:
+ msvc9compiler.find_vcvarsall = old_find_vcvarsall
+
+ def test_reg_class(self):
+ from distutils.msvc9compiler import Reg
+ self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
+
+ # looking for values that should exist on all
+ # windows registry versions.
+ path = r'Control Panel\Desktop'
+ v = Reg.get_value(path, 'dragfullwindows')
+ self.assertIn(v, ('0', '1', '2'))
+
+ import winreg
+ HKCU = winreg.HKEY_CURRENT_USER
+ keys = Reg.read_keys(HKCU, 'xxxx')
+ self.assertEqual(keys, None)
+
+ keys = Reg.read_keys(HKCU, r'Control Panel')
+ self.assertIn('Desktop', keys)
+
+ def test_remove_visual_c_ref(self):
+ from distutils.msvc9compiler import MSVCCompiler
+ tempdir = self.mkdtemp()
+ manifest = os.path.join(tempdir, 'manifest')
+ f = open(manifest, 'w')
+ try:
+ f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES)
+ finally:
+ f.close()
+
+ compiler = MSVCCompiler()
+ compiler._remove_visual_c_ref(manifest)
+
+ # see what we got
+ f = open(manifest)
+ try:
+ # removing trailing spaces
+ content = '\n'.join([line.rstrip() for line in f.readlines()])
+ finally:
+ f.close()
+
+ # makes sure the manifest was properly cleaned
+ self.assertEqual(content, _CLEANED_MANIFEST)
+
+ def test_remove_entire_manifest(self):
+ from distutils.msvc9compiler import MSVCCompiler
+ tempdir = self.mkdtemp()
+ manifest = os.path.join(tempdir, 'manifest')
+ f = open(manifest, 'w')
+ try:
+ f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE)
+ finally:
+ f.close()
+
+ compiler = MSVCCompiler()
+ got = compiler._remove_visual_c_ref(manifest)
+ self.assertIsNone(got)
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_msvccompiler.py b/setuptools/_distutils/tests/test_msvccompiler.py
new file mode 100644
index 0000000..846e5bb
--- /dev/null
+++ b/setuptools/_distutils/tests/test_msvccompiler.py
@@ -0,0 +1,138 @@
+"""Tests for distutils._msvccompiler."""
+import sys
+import unittest
+import os
+import threading
+
+from distutils.errors import DistutilsPlatformError
+from distutils.tests import support
+from test.support import run_unittest
+
+
+SKIP_MESSAGE = (None if sys.platform == "win32" else
+ "These tests are only for win32")
+
+@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
+class msvccompilerTestCase(support.TempdirManager,
+ unittest.TestCase):
+
+ def test_no_compiler(self):
+ import distutils._msvccompiler as _msvccompiler
+ # makes sure query_vcvarsall raises
+ # a DistutilsPlatformError if the compiler
+ # is not found
+ def _find_vcvarsall(plat_spec):
+ return None, None
+
+ old_find_vcvarsall = _msvccompiler._find_vcvarsall
+ _msvccompiler._find_vcvarsall = _find_vcvarsall
+ try:
+ self.assertRaises(DistutilsPlatformError,
+ _msvccompiler._get_vc_env,
+ 'wont find this version')
+ finally:
+ _msvccompiler._find_vcvarsall = old_find_vcvarsall
+
+ def test_get_vc_env_unicode(self):
+ import distutils._msvccompiler as _msvccompiler
+
+ test_var = 'ṰḖṤṪ┅ṼẨṜ'
+ test_value = '₃⁴₅'
+
+ # Ensure we don't early exit from _get_vc_env
+ old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
+ os.environ[test_var] = test_value
+ try:
+ env = _msvccompiler._get_vc_env('x86')
+ self.assertIn(test_var.lower(), env)
+ self.assertEqual(test_value, env[test_var.lower()])
+ finally:
+ os.environ.pop(test_var)
+ if old_distutils_use_sdk:
+ os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
+
+ def test_get_vc2017(self):
+ import distutils._msvccompiler as _msvccompiler
+
+ # This function cannot be mocked, so pass it if we find VS 2017
+ # and mark it skipped if we do not.
+ version, path = _msvccompiler._find_vc2017()
+ if version:
+ self.assertGreaterEqual(version, 15)
+ self.assertTrue(os.path.isdir(path))
+ else:
+ raise unittest.SkipTest("VS 2017 is not installed")
+
+ def test_get_vc2015(self):
+ import distutils._msvccompiler as _msvccompiler
+
+ # This function cannot be mocked, so pass it if we find VS 2015
+ # and mark it skipped if we do not.
+ version, path = _msvccompiler._find_vc2015()
+ if version:
+ self.assertGreaterEqual(version, 14)
+ self.assertTrue(os.path.isdir(path))
+ else:
+ raise unittest.SkipTest("VS 2015 is not installed")
+
+
+class CheckThread(threading.Thread):
+ exc_info = None
+
+ def run(self):
+ try:
+ super().run()
+ except Exception:
+ self.exc_info = sys.exc_info()
+
+ def __bool__(self):
+ return not self.exc_info
+
+
+class TestSpawn(unittest.TestCase):
+ def test_concurrent_safe(self):
+ """
+ Concurrent calls to spawn should have consistent results.
+ """
+ import distutils._msvccompiler as _msvccompiler
+ compiler = _msvccompiler.MSVCCompiler()
+ compiler._paths = "expected"
+ inner_cmd = 'import os; assert os.environ["PATH"] == "expected"'
+ command = [sys.executable, '-c', inner_cmd]
+
+ threads = [
+ CheckThread(target=compiler.spawn, args=[command])
+ for n in range(100)
+ ]
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
+ assert all(threads)
+
+ def test_concurrent_safe_fallback(self):
+ """
+ If CCompiler.spawn has been monkey-patched without support
+ for an env, it should still execute.
+ """
+ import distutils._msvccompiler as _msvccompiler
+ from distutils import ccompiler
+ compiler = _msvccompiler.MSVCCompiler()
+ compiler._paths = "expected"
+
+ def CCompiler_spawn(self, cmd):
+ "A spawn without an env argument."
+ assert os.environ["PATH"] == "expected"
+
+ with unittest.mock.patch.object(
+ ccompiler.CCompiler, 'spawn', CCompiler_spawn):
+ compiler.spawn(["n/a"])
+
+ assert os.environ.get("PATH") != "expected"
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_register.py b/setuptools/_distutils/tests/test_register.py
new file mode 100644
index 0000000..5770ed5
--- /dev/null
+++ b/setuptools/_distutils/tests/test_register.py
@@ -0,0 +1,325 @@
+"""Tests for distutils.command.register."""
+import os
+import unittest
+import getpass
+import urllib
+import warnings
+
+from test.support import run_unittest
+
+from .py38compat import check_warnings
+
+from distutils.command import register as register_module
+from distutils.command.register import register
+from distutils.errors import DistutilsSetupError
+from distutils.log import INFO
+
+from distutils.tests.test_config import BasePyPIRCCommandTestCase
+
+try:
+ import docutils
+except ImportError:
+ docutils = None
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+ server1
+
+[server1]
+username:me
+"""
+
+WANTED_PYPIRC = """\
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username:tarek
+password:password
+"""
+
+class Inputs(object):
+ """Fakes user inputs."""
+ def __init__(self, *answers):
+ self.answers = answers
+ self.index = 0
+
+ def __call__(self, prompt=''):
+ try:
+ return self.answers[self.index]
+ finally:
+ self.index += 1
+
+class FakeOpener(object):
+ """Fakes a PyPI server"""
+ def __init__(self):
+ self.reqs = []
+
+ def __call__(self, *args):
+ return self
+
+ def open(self, req, data=None, timeout=None):
+ self.reqs.append(req)
+ return self
+
+ def read(self):
+ return b'xxx'
+
+ def getheader(self, name, default=None):
+ return {
+ 'content-type': 'text/plain; charset=utf-8',
+ }.get(name.lower(), default)
+
+
+class RegisterTestCase(BasePyPIRCCommandTestCase):
+
+ def setUp(self):
+ super(RegisterTestCase, self).setUp()
+ # patching the password prompt
+ self._old_getpass = getpass.getpass
+ def _getpass(prompt):
+ return 'password'
+ getpass.getpass = _getpass
+ urllib.request._opener = None
+ self.old_opener = urllib.request.build_opener
+ self.conn = urllib.request.build_opener = FakeOpener()
+
+ def tearDown(self):
+ getpass.getpass = self._old_getpass
+ urllib.request._opener = None
+ urllib.request.build_opener = self.old_opener
+ super(RegisterTestCase, self).tearDown()
+
+ def _get_cmd(self, metadata=None):
+ if metadata is None:
+ metadata = {'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': 'xxx'}
+ pkg_info, dist = self.create_dist(**metadata)
+ return register(dist)
+
+ def test_create_pypirc(self):
+ # this test makes sure a .pypirc file
+ # is created when requested.
+
+ # let's create a register instance
+ cmd = self._get_cmd()
+
+ # we shouldn't have a .pypirc file yet
+ self.assertFalse(os.path.exists(self.rc))
+
+ # patching input and getpass.getpass
+ # so register gets happy
+ #
+ # Here's what we are faking :
+ # use your existing login (choice 1.)
+ # Username : 'tarek'
+ # Password : 'password'
+ # Save your login (y/N)? : 'y'
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs.__call__
+ # let's run the command
+ try:
+ cmd.run()
+ finally:
+ del register_module.input
+
+ # we should have a brand new .pypirc file
+ self.assertTrue(os.path.exists(self.rc))
+
+ # with the content similar to WANTED_PYPIRC
+ f = open(self.rc)
+ try:
+ content = f.read()
+ self.assertEqual(content, WANTED_PYPIRC)
+ finally:
+ f.close()
+
+ # now let's make sure the .pypirc file generated
+ # really works : we shouldn't be asked anything
+ # if we run the command again
+ def _no_way(prompt=''):
+ raise AssertionError(prompt)
+ register_module.input = _no_way
+
+ cmd.show_response = 1
+ cmd.run()
+
+ # let's see what the server received : we should
+ # have 2 similar requests
+ self.assertEqual(len(self.conn.reqs), 2)
+ req1 = dict(self.conn.reqs[0].headers)
+ req2 = dict(self.conn.reqs[1].headers)
+
+ self.assertEqual(req1['Content-length'], '1374')
+ self.assertEqual(req2['Content-length'], '1374')
+ self.assertIn(b'xxx', self.conn.reqs[1].data)
+
+ def test_password_not_in_file(self):
+
+ self.write_file(self.rc, PYPIRC_NOPASSWORD)
+ cmd = self._get_cmd()
+ cmd._set_config()
+ cmd.finalize_options()
+ cmd.send_metadata()
+
+ # dist.password should be set
+ # therefore used afterwards by other commands
+ self.assertEqual(cmd.distribution.password, 'password')
+
+ def test_registering(self):
+ # this test runs choice 2
+ cmd = self._get_cmd()
+ inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
+ register_module.input = inputs.__call__
+ try:
+ # let's run the command
+ cmd.run()
+ finally:
+ del register_module.input
+
+ # we should have send a request
+ self.assertEqual(len(self.conn.reqs), 1)
+ req = self.conn.reqs[0]
+ headers = dict(req.headers)
+ self.assertEqual(headers['Content-length'], '608')
+ self.assertIn(b'tarek', req.data)
+
+ def test_password_reset(self):
+ # this test runs choice 3
+ cmd = self._get_cmd()
+ inputs = Inputs('3', 'tarek@ziade.org')
+ register_module.input = inputs.__call__
+ try:
+ # let's run the command
+ cmd.run()
+ finally:
+ del register_module.input
+
+ # we should have send a request
+ self.assertEqual(len(self.conn.reqs), 1)
+ req = self.conn.reqs[0]
+ headers = dict(req.headers)
+ self.assertEqual(headers['Content-length'], '290')
+ self.assertIn(b'tarek', req.data)
+
+ @unittest.skipUnless(docutils is not None, 'needs docutils')
+ def test_strict(self):
+ # testing the script option
+ # when on, the register command stops if
+ # the metadata is incomplete or if
+ # long_description is not reSt compliant
+
+ # empty metadata
+ cmd = self._get_cmd({})
+ cmd.ensure_finalized()
+ cmd.strict = 1
+ self.assertRaises(DistutilsSetupError, cmd.run)
+
+ # metadata are OK but long_description is broken
+ metadata = {'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'éxéxé',
+ 'name': 'xxx', 'version': 'xxx',
+ 'long_description': 'title\n==\n\ntext'}
+
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = 1
+ self.assertRaises(DistutilsSetupError, cmd.run)
+
+ # now something that works
+ metadata['long_description'] = 'title\n=====\n\ntext'
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = 1
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs.__call__
+ # let's run the command
+ try:
+ cmd.run()
+ finally:
+ del register_module.input
+
+ # strict is not by default
+ cmd = self._get_cmd()
+ cmd.ensure_finalized()
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs.__call__
+ # let's run the command
+ try:
+ cmd.run()
+ finally:
+ del register_module.input
+
+ # and finally a Unicode test (bug #12114)
+ metadata = {'url': 'xxx', 'author': '\u00c9ric',
+ 'author_email': 'xxx', 'name': 'xxx',
+ 'version': 'xxx',
+ 'description': 'Something about esszet \u00df',
+ 'long_description': 'More things about esszet \u00df'}
+
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = 1
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs.__call__
+ # let's run the command
+ try:
+ cmd.run()
+ finally:
+ del register_module.input
+
+ @unittest.skipUnless(docutils is not None, 'needs docutils')
+ def test_register_invalid_long_description(self):
+ description = ':funkie:`str`' # mimic Sphinx-specific markup
+ metadata = {'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx',
+ 'name': 'xxx', 'version': 'xxx',
+ 'long_description': description}
+ cmd = self._get_cmd(metadata)
+ cmd.ensure_finalized()
+ cmd.strict = True
+ inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
+ register_module.input = inputs
+ self.addCleanup(delattr, register_module, 'input')
+
+ self.assertRaises(DistutilsSetupError, cmd.run)
+
+ def test_check_metadata_deprecated(self):
+ # makes sure make_metadata is deprecated
+ cmd = self._get_cmd()
+ with check_warnings() as w:
+ warnings.simplefilter("always")
+ cmd.check_metadata()
+ self.assertEqual(len(w.warnings), 1)
+
+ def test_list_classifiers(self):
+ cmd = self._get_cmd()
+ cmd.list_classifiers = 1
+ cmd.run()
+ results = self.get_logs(INFO)
+ self.assertEqual(results, ['running check', 'xxx'])
+
+ def test_show_response(self):
+ # test that the --show-response option return a well formatted response
+ cmd = self._get_cmd()
+ inputs = Inputs('1', 'tarek', 'y')
+ register_module.input = inputs.__call__
+ cmd.show_response = 1
+ try:
+ cmd.run()
+ finally:
+ del register_module.input
+
+ results = self.get_logs(INFO)
+ self.assertEqual(results[3], 75 * '-' + '\nxxx\n' + 75 * '-')
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_sdist.py b/setuptools/_distutils/tests/test_sdist.py
new file mode 100644
index 0000000..4c51717
--- /dev/null
+++ b/setuptools/_distutils/tests/test_sdist.py
@@ -0,0 +1,489 @@
+"""Tests for distutils.command.sdist."""
+import os
+import tarfile
+import unittest
+import warnings
+import zipfile
+from os.path import join
+from textwrap import dedent
+from test.support import captured_stdout, run_unittest
+from .unix_compat import require_unix_id, require_uid_0, pwd, grp
+
+from .py38compat import check_warnings
+
+try:
+ import zlib
+ ZLIB_SUPPORT = True
+except ImportError:
+ ZLIB_SUPPORT = False
+
+from distutils.command.sdist import sdist, show_formats
+from distutils.core import Distribution
+from distutils.tests.test_config import BasePyPIRCCommandTestCase
+from distutils.errors import DistutilsOptionError
+from distutils.spawn import find_executable
+from distutils.log import WARN
+from distutils.filelist import FileList
+from distutils.archive_util import ARCHIVE_FORMATS
+
+SETUP_PY = """
+from distutils.core import setup
+import somecode
+
+setup(name='fake')
+"""
+
+MANIFEST = """\
+# file GENERATED by distutils, do NOT edit
+README
+buildout.cfg
+inroot.txt
+setup.py
+data%(sep)sdata.dt
+scripts%(sep)sscript.py
+some%(sep)sfile.txt
+some%(sep)sother_file.txt
+somecode%(sep)s__init__.py
+somecode%(sep)sdoc.dat
+somecode%(sep)sdoc.txt
+"""
+
+class SDistTestCase(BasePyPIRCCommandTestCase):
+
+ def setUp(self):
+ # PyPIRCCommandTestCase creates a temp dir already
+ # and put it in self.tmp_dir
+ super(SDistTestCase, self).setUp()
+ # setting up an environment
+ self.old_path = os.getcwd()
+ os.mkdir(join(self.tmp_dir, 'somecode'))
+ os.mkdir(join(self.tmp_dir, 'dist'))
+ # a package, and a README
+ self.write_file((self.tmp_dir, 'README'), 'xxx')
+ self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#')
+ self.write_file((self.tmp_dir, 'setup.py'), SETUP_PY)
+ os.chdir(self.tmp_dir)
+
+ def tearDown(self):
+ # back to normal
+ os.chdir(self.old_path)
+ super(SDistTestCase, self).tearDown()
+
+ def get_cmd(self, metadata=None):
+ """Returns a cmd"""
+ if metadata is None:
+ metadata = {'name': 'fake', 'version': '1.0',
+ 'url': 'xxx', 'author': 'xxx',
+ 'author_email': 'xxx'}
+ dist = Distribution(metadata)
+ dist.script_name = 'setup.py'
+ dist.packages = ['somecode']
+ dist.include_package_data = True
+ cmd = sdist(dist)
+ cmd.dist_dir = 'dist'
+ return dist, cmd
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_prune_file_list(self):
+ # this test creates a project with some VCS dirs and an NFS rename
+ # file, then launches sdist to check they get pruned on all systems
+
+ # creating VCS directories with some files in them
+ os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
+ self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
+
+ os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
+ self.write_file((self.tmp_dir, 'somecode', '.hg',
+ 'ok'), 'xxx')
+
+ os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
+ self.write_file((self.tmp_dir, 'somecode', '.git',
+ 'ok'), 'xxx')
+
+ self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx')
+
+ # now building a sdist
+ dist, cmd = self.get_cmd()
+
+ # zip is available universally
+ # (tar might not be installed under win32)
+ cmd.formats = ['zip']
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # now let's check what we have
+ dist_folder = join(self.tmp_dir, 'dist')
+ files = os.listdir(dist_folder)
+ self.assertEqual(files, ['fake-1.0.zip'])
+
+ zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip'))
+ try:
+ content = zip_file.namelist()
+ finally:
+ zip_file.close()
+
+ # making sure everything has been pruned correctly
+ expected = ['', 'PKG-INFO', 'README', 'setup.py',
+ 'somecode/', 'somecode/__init__.py']
+ self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected])
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ @unittest.skipIf(find_executable('tar') is None,
+ "The tar command is not found")
+ @unittest.skipIf(find_executable('gzip') is None,
+ "The gzip command is not found")
+ def test_make_distribution(self):
+ # now building a sdist
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar then a tar
+ cmd.formats = ['gztar', 'tar']
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have two files
+ dist_folder = join(self.tmp_dir, 'dist')
+ result = os.listdir(dist_folder)
+ result.sort()
+ self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+ os.remove(join(dist_folder, 'fake-1.0.tar'))
+ os.remove(join(dist_folder, 'fake-1.0.tar.gz'))
+
+ # now trying a tar then a gztar
+ cmd.formats = ['tar', 'gztar']
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ result = os.listdir(dist_folder)
+ result.sort()
+ self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_add_defaults(self):
+
+ # http://bugs.python.org/issue2279
+
+ # add_default should also include
+ # data_files and package_data
+ dist, cmd = self.get_cmd()
+
+ # filling data_files by pointing files
+ # in package_data
+ dist.package_data = {'': ['*.cfg', '*.dat'],
+ 'somecode': ['*.txt']}
+ self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+ self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
+
+ # adding some data in data_files
+ data_dir = join(self.tmp_dir, 'data')
+ os.mkdir(data_dir)
+ self.write_file((data_dir, 'data.dt'), '#')
+ some_dir = join(self.tmp_dir, 'some')
+ os.mkdir(some_dir)
+ # make sure VCS directories are pruned (#14004)
+ hg_dir = join(self.tmp_dir, '.hg')
+ os.mkdir(hg_dir)
+ self.write_file((hg_dir, 'last-message.txt'), '#')
+ # a buggy regex used to prevent this from working on windows (#6884)
+ self.write_file((self.tmp_dir, 'buildout.cfg'), '#')
+ self.write_file((self.tmp_dir, 'inroot.txt'), '#')
+ self.write_file((some_dir, 'file.txt'), '#')
+ self.write_file((some_dir, 'other_file.txt'), '#')
+
+ dist.data_files = [('data', ['data/data.dt',
+ 'buildout.cfg',
+ 'inroot.txt',
+ 'notexisting']),
+ 'some/file.txt',
+ 'some/other_file.txt']
+
+ # adding a script
+ script_dir = join(self.tmp_dir, 'scripts')
+ os.mkdir(script_dir)
+ self.write_file((script_dir, 'script.py'), '#')
+ dist.scripts = [join('scripts', 'script.py')]
+
+ cmd.formats = ['zip']
+ cmd.use_defaults = True
+
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # now let's check what we have
+ dist_folder = join(self.tmp_dir, 'dist')
+ files = os.listdir(dist_folder)
+ self.assertEqual(files, ['fake-1.0.zip'])
+
+ zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip'))
+ try:
+ content = zip_file.namelist()
+ finally:
+ zip_file.close()
+
+ # making sure everything was added
+ expected = ['', 'PKG-INFO', 'README', 'buildout.cfg',
+ 'data/', 'data/data.dt', 'inroot.txt',
+ 'scripts/', 'scripts/script.py', 'setup.py',
+ 'some/', 'some/file.txt', 'some/other_file.txt',
+ 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat',
+ 'somecode/doc.txt']
+ self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected])
+
+ # checking the MANIFEST
+ f = open(join(self.tmp_dir, 'MANIFEST'))
+ try:
+ manifest = f.read()
+ finally:
+ f.close()
+ self.assertEqual(manifest, MANIFEST % {'sep': os.sep})
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_metadata_check_option(self):
+ # testing the `medata-check` option
+ dist, cmd = self.get_cmd(metadata={})
+
+ # this should raise some warnings !
+ # with the `check` subcommand
+ cmd.ensure_finalized()
+ cmd.run()
+ warnings = [msg for msg in self.get_logs(WARN) if
+ msg.startswith('warning: check:')]
+ self.assertEqual(len(warnings), 2)
+
+ # trying with a complete set of metadata
+ self.clear_logs()
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ cmd.metadata_check = 0
+ cmd.run()
+ warnings = [msg for msg in self.get_logs(WARN) if
+ msg.startswith('warning: check:')]
+ self.assertEqual(len(warnings), 0)
+
+ def test_check_metadata_deprecated(self):
+ # makes sure make_metadata is deprecated
+ dist, cmd = self.get_cmd()
+ with check_warnings() as w:
+ warnings.simplefilter("always")
+ cmd.check_metadata()
+ self.assertEqual(len(w.warnings), 1)
+
+ def test_show_formats(self):
+ with captured_stdout() as stdout:
+ show_formats()
+
+ # the output should be a header line + one line per format
+ num_formats = len(ARCHIVE_FORMATS.keys())
+ output = [line for line in stdout.getvalue().split('\n')
+ if line.strip().startswith('--formats=')]
+ self.assertEqual(len(output), num_formats)
+
+ def test_finalize_options(self):
+ dist, cmd = self.get_cmd()
+ cmd.finalize_options()
+
+ # default options set by finalize
+ self.assertEqual(cmd.manifest, 'MANIFEST')
+ self.assertEqual(cmd.template, 'MANIFEST.in')
+ self.assertEqual(cmd.dist_dir, 'dist')
+
+ # formats has to be a string splitable on (' ', ',') or
+ # a stringlist
+ cmd.formats = 1
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+ cmd.formats = ['zip']
+ cmd.finalize_options()
+
+ # formats has to be known
+ cmd.formats = 'supazipa'
+ self.assertRaises(DistutilsOptionError, cmd.finalize_options)
+
+ # the following tests make sure there is a nice error message instead
+ # of a traceback when parsing an invalid manifest template
+
+ def _check_template(self, content):
+ dist, cmd = self.get_cmd()
+ os.chdir(self.tmp_dir)
+ self.write_file('MANIFEST.in', content)
+ cmd.ensure_finalized()
+ cmd.filelist = FileList()
+ cmd.read_template()
+ warnings = self.get_logs(WARN)
+ self.assertEqual(len(warnings), 1)
+
+ def test_invalid_template_unknown_command(self):
+ self._check_template('taunt knights *')
+
+ def test_invalid_template_wrong_arguments(self):
+ # this manifest command takes one argument
+ self._check_template('prune')
+
+ @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only')
+ def test_invalid_template_wrong_path(self):
+ # on Windows, trailing slashes are not allowed
+ # this used to crash instead of raising a warning: #8286
+ self._check_template('include examples/')
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_get_file_list(self):
+ # make sure MANIFEST is recalculated
+ dist, cmd = self.get_cmd()
+
+ # filling data_files by pointing files in package_data
+ dist.package_data = {'somecode': ['*.txt']}
+ self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+ cmd.formats = ['gztar']
+ cmd.ensure_finalized()
+ cmd.run()
+
+ f = open(cmd.manifest)
+ try:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+ finally:
+ f.close()
+
+ self.assertEqual(len(manifest), 5)
+
+ # adding a file
+ self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
+
+ # make sure build_py is reinitialized, like a fresh run
+ build_py = dist.get_command_obj('build_py')
+ build_py.finalized = False
+ build_py.ensure_finalized()
+
+ cmd.run()
+
+ f = open(cmd.manifest)
+ try:
+ manifest2 = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+ finally:
+ f.close()
+
+ # do we have the new file in MANIFEST ?
+ self.assertEqual(len(manifest2), 6)
+ self.assertIn('doc2.txt', manifest2[-1])
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_manifest_marker(self):
+ # check that autogenerated MANIFESTs have a marker
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ cmd.run()
+
+ f = open(cmd.manifest)
+ try:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+ finally:
+ f.close()
+
+ self.assertEqual(manifest[0],
+ '# file GENERATED by distutils, do NOT edit')
+
+ @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run")
+ def test_manifest_comments(self):
+ # make sure comments don't cause exceptions or wrong includes
+ contents = dedent("""\
+ # bad.py
+ #bad.py
+ good.py
+ """)
+ dist, cmd = self.get_cmd()
+ cmd.ensure_finalized()
+ self.write_file((self.tmp_dir, cmd.manifest), contents)
+ self.write_file((self.tmp_dir, 'good.py'), '# pick me!')
+ self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!")
+ self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!")
+ cmd.run()
+ self.assertEqual(cmd.filelist.files, ['good.py'])
+
+ @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run')
+ def test_manual_manifest(self):
+ # check that a MANIFEST without a marker is left alone
+ dist, cmd = self.get_cmd()
+ cmd.formats = ['gztar']
+ cmd.ensure_finalized()
+ self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
+ self.write_file((self.tmp_dir, 'README.manual'),
+ 'This project maintains its MANIFEST file itself.')
+ cmd.run()
+ self.assertEqual(cmd.filelist.files, ['README.manual'])
+
+ f = open(cmd.manifest)
+ try:
+ manifest = [line.strip() for line in f.read().split('\n')
+ if line.strip() != '']
+ finally:
+ f.close()
+
+ self.assertEqual(manifest, ['README.manual'])
+
+ archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+ archive = tarfile.open(archive_name)
+ try:
+ filenames = [tarinfo.name for tarinfo in archive]
+ finally:
+ archive.close()
+ self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO',
+ 'fake-1.0/README.manual'])
+
+ @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib")
+ @require_unix_id
+ @require_uid_0
+ @unittest.skipIf(find_executable('tar') is None,
+ "The tar command is not found")
+ @unittest.skipIf(find_executable('gzip') is None,
+ "The gzip command is not found")
+ def test_make_distribution_owner_group(self):
+ # now building a sdist
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar and specifying the owner+group
+ cmd.formats = ['gztar']
+ cmd.owner = pwd.getpwuid(0)[0]
+ cmd.group = grp.getgrgid(0)[0]
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have the good rights
+ archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+ archive = tarfile.open(archive_name)
+ try:
+ for member in archive.getmembers():
+ self.assertEqual(member.uid, 0)
+ self.assertEqual(member.gid, 0)
+ finally:
+ archive.close()
+
+ # building a sdist again
+ dist, cmd = self.get_cmd()
+
+ # creating a gztar
+ cmd.formats = ['gztar']
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # making sure we have the good rights
+ archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz')
+ archive = tarfile.open(archive_name)
+
+ # note that we are not testing the group ownership here
+ # because, depending on the platforms and the container
+ # rights (see #7408)
+ try:
+ for member in archive.getmembers():
+ self.assertEqual(member.uid, os.getuid())
+ finally:
+ archive.close()
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_spawn.py b/setuptools/_distutils/tests/test_spawn.py
new file mode 100644
index 0000000..c5ed8e2
--- /dev/null
+++ b/setuptools/_distutils/tests/test_spawn.py
@@ -0,0 +1,139 @@
+"""Tests for distutils.spawn."""
+import os
+import stat
+import sys
+import unittest.mock
+from test.support import run_unittest
+
+from .py35compat import unix_shell
+from . import py38compat as os_helper
+
+from distutils.spawn import find_executable
+from distutils.spawn import spawn
+from distutils.errors import DistutilsExecError
+from distutils.tests import support
+
+class SpawnTestCase(support.TempdirManager,
+ support.LoggingSilencer,
+ unittest.TestCase):
+
+ @unittest.skipUnless(os.name in ('nt', 'posix'),
+ 'Runs only under posix or nt')
+ def test_spawn(self):
+ tmpdir = self.mkdtemp()
+
+ # creating something executable
+ # through the shell that returns 1
+ if sys.platform != 'win32':
+ exe = os.path.join(tmpdir, 'foo.sh')
+ self.write_file(exe, '#!%s\nexit 1' % unix_shell)
+ else:
+ exe = os.path.join(tmpdir, 'foo.bat')
+ self.write_file(exe, 'exit 1')
+
+ os.chmod(exe, 0o777)
+ self.assertRaises(DistutilsExecError, spawn, [exe])
+
+ # now something that works
+ if sys.platform != 'win32':
+ exe = os.path.join(tmpdir, 'foo.sh')
+ self.write_file(exe, '#!%s\nexit 0' % unix_shell)
+ else:
+ exe = os.path.join(tmpdir, 'foo.bat')
+ self.write_file(exe, 'exit 0')
+
+ os.chmod(exe, 0o777)
+ spawn([exe]) # should work without any error
+
+ def test_find_executable(self):
+ with os_helper.temp_dir() as tmp_dir:
+ # use TESTFN to get a pseudo-unique filename
+ program_noeext = os_helper.TESTFN
+ # Give the temporary program an ".exe" suffix for all.
+ # It's needed on Windows and not harmful on other platforms.
+ program = program_noeext + ".exe"
+
+ filename = os.path.join(tmp_dir, program)
+ with open(filename, "wb"):
+ pass
+ os.chmod(filename, stat.S_IXUSR)
+
+ # test path parameter
+ rv = find_executable(program, path=tmp_dir)
+ self.assertEqual(rv, filename)
+
+ if sys.platform == 'win32':
+ # test without ".exe" extension
+ rv = find_executable(program_noeext, path=tmp_dir)
+ self.assertEqual(rv, filename)
+
+ # test find in the current directory
+ with os_helper.change_cwd(tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, program)
+
+ # test non-existent program
+ dont_exist_program = "dontexist_" + program
+ rv = find_executable(dont_exist_program , path=tmp_dir)
+ self.assertIsNone(rv)
+
+ # PATH='': no match, except in the current directory
+ with os_helper.EnvironmentVarGuard() as env:
+ env['PATH'] = ''
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value=tmp_dir, create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath',
+ tmp_dir):
+ rv = find_executable(program)
+ self.assertIsNone(rv)
+
+ # look in current directory
+ with os_helper.change_cwd(tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, program)
+
+ # PATH=':': explicitly looks in the current directory
+ with os_helper.EnvironmentVarGuard() as env:
+ env['PATH'] = os.pathsep
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value='', create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
+ rv = find_executable(program)
+ self.assertIsNone(rv)
+
+ # look in current directory
+ with os_helper.change_cwd(tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, program)
+
+ # missing PATH: test os.confstr("CS_PATH") and os.defpath
+ with os_helper.EnvironmentVarGuard() as env:
+ env.pop('PATH', None)
+
+ # without confstr
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ side_effect=ValueError,
+ create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath',
+ tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, filename)
+
+ # with confstr
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value=tmp_dir, create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
+ rv = find_executable(program)
+ self.assertEqual(rv, filename)
+
+ def test_spawn_missing_exe(self):
+ with self.assertRaises(DistutilsExecError) as ctx:
+ spawn(['does-not-exist'])
+ self.assertIn("command 'does-not-exist' failed", str(ctx.exception))
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_sysconfig.py b/setuptools/_distutils/tests/test_sysconfig.py
new file mode 100644
index 0000000..9de3cb7
--- /dev/null
+++ b/setuptools/_distutils/tests/test_sysconfig.py
@@ -0,0 +1,309 @@
+"""Tests for distutils.sysconfig."""
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import textwrap
+import unittest
+
+from distutils import sysconfig
+from distutils.ccompiler import get_default_compiler
+from distutils.unixccompiler import UnixCCompiler
+from distutils.tests import support
+from test.support import run_unittest, swap_item
+
+from .py38compat import TESTFN
+from .py38compat import check_warnings
+
+
+class SysconfigTestCase(support.EnvironGuard, unittest.TestCase):
+ def setUp(self):
+ super(SysconfigTestCase, self).setUp()
+ self.makefile = None
+
+ def tearDown(self):
+ if self.makefile is not None:
+ os.unlink(self.makefile)
+ self.cleanup_testfn()
+ super(SysconfigTestCase, self).tearDown()
+
+ def cleanup_testfn(self):
+ if os.path.isfile(TESTFN):
+ os.remove(TESTFN)
+ elif os.path.isdir(TESTFN):
+ shutil.rmtree(TESTFN)
+
+ def test_get_config_h_filename(self):
+ config_h = sysconfig.get_config_h_filename()
+ self.assertTrue(os.path.isfile(config_h), config_h)
+
+ @unittest.skipIf(sys.platform == 'win32',
+ 'Makefile only exists on Unix like systems')
+ def test_get_makefile_filename(self):
+ makefile = sysconfig.get_makefile_filename()
+ self.assertTrue(os.path.isfile(makefile), makefile)
+
+ def test_get_python_lib(self):
+ # XXX doesn't work on Linux when Python was never installed before
+ #self.assertTrue(os.path.isdir(lib_dir), lib_dir)
+ # test for pythonxx.lib?
+ self.assertNotEqual(sysconfig.get_python_lib(),
+ sysconfig.get_python_lib(prefix=TESTFN))
+
+ def test_get_config_vars(self):
+ cvars = sysconfig.get_config_vars()
+ self.assertIsInstance(cvars, dict)
+ self.assertTrue(cvars)
+
+ @unittest.skip('sysconfig.IS_PYPY')
+ def test_srcdir(self):
+ # See Issues #15322, #15364.
+ srcdir = sysconfig.get_config_var('srcdir')
+
+ self.assertTrue(os.path.isabs(srcdir), srcdir)
+ self.assertTrue(os.path.isdir(srcdir), srcdir)
+
+ if sysconfig.python_build:
+ # The python executable has not been installed so srcdir
+ # should be a full source checkout.
+ Python_h = os.path.join(srcdir, 'Include', 'Python.h')
+ self.assertTrue(os.path.exists(Python_h), Python_h)
+ self.assertTrue(sysconfig._is_python_source_dir(srcdir))
+ elif os.name == 'posix':
+ self.assertEqual(
+ os.path.dirname(sysconfig.get_makefile_filename()),
+ srcdir)
+
+ def test_srcdir_independent_of_cwd(self):
+ # srcdir should be independent of the current working directory
+ # See Issues #15322, #15364.
+ srcdir = sysconfig.get_config_var('srcdir')
+ cwd = os.getcwd()
+ try:
+ os.chdir('..')
+ srcdir2 = sysconfig.get_config_var('srcdir')
+ finally:
+ os.chdir(cwd)
+ self.assertEqual(srcdir, srcdir2)
+
+ def customize_compiler(self):
+ # make sure AR gets caught
+ class compiler:
+ compiler_type = 'unix'
+ executables = UnixCCompiler.executables
+
+ def __init__(self):
+ self.exes = {}
+
+ def set_executables(self, **kw):
+ for k, v in kw.items():
+ self.exes[k] = v
+
+ sysconfig_vars = {
+ 'AR': 'sc_ar',
+ 'CC': 'sc_cc',
+ 'CXX': 'sc_cxx',
+ 'ARFLAGS': '--sc-arflags',
+ 'CFLAGS': '--sc-cflags',
+ 'CCSHARED': '--sc-ccshared',
+ 'LDSHARED': 'sc_ldshared',
+ 'SHLIB_SUFFIX': 'sc_shutil_suffix',
+
+ # On macOS, disable _osx_support.customize_compiler()
+ 'CUSTOMIZED_OSX_COMPILER': 'True',
+ }
+
+ comp = compiler()
+ with contextlib.ExitStack() as cm:
+ for key, value in sysconfig_vars.items():
+ cm.enter_context(swap_item(sysconfig._config_vars, key, value))
+ sysconfig.customize_compiler(comp)
+
+ return comp
+
+ @unittest.skipUnless(get_default_compiler() == 'unix',
+ 'not testing if default compiler is not unix')
+ def test_customize_compiler(self):
+ # Make sure that sysconfig._config_vars is initialized
+ sysconfig.get_config_vars()
+
+ os.environ['AR'] = 'env_ar'
+ os.environ['CC'] = 'env_cc'
+ os.environ['CPP'] = 'env_cpp'
+ os.environ['CXX'] = 'env_cxx --env-cxx-flags'
+ os.environ['LDSHARED'] = 'env_ldshared'
+ os.environ['LDFLAGS'] = '--env-ldflags'
+ os.environ['ARFLAGS'] = '--env-arflags'
+ os.environ['CFLAGS'] = '--env-cflags'
+ os.environ['CPPFLAGS'] = '--env-cppflags'
+ os.environ['RANLIB'] = 'env_ranlib'
+
+ comp = self.customize_compiler()
+ self.assertEqual(comp.exes['archiver'],
+ 'env_ar --env-arflags')
+ self.assertEqual(comp.exes['preprocessor'],
+ 'env_cpp --env-cppflags')
+ self.assertEqual(comp.exes['compiler'],
+ 'env_cc --sc-cflags --env-cflags --env-cppflags')
+ self.assertEqual(comp.exes['compiler_so'],
+ ('env_cc --sc-cflags '
+ '--env-cflags ''--env-cppflags --sc-ccshared'))
+ self.assertEqual(comp.exes['compiler_cxx'],
+ 'env_cxx --env-cxx-flags')
+ self.assertEqual(comp.exes['linker_exe'],
+ 'env_cc')
+ self.assertEqual(comp.exes['linker_so'],
+ ('env_ldshared --env-ldflags --env-cflags'
+ ' --env-cppflags'))
+ self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
+
+ if sys.platform == "darwin":
+ self.assertEqual(comp.exes['ranlib'],
+ 'env_ranlib')
+ else:
+ self.assertTrue('ranlib' not in comp.exes)
+
+ del os.environ['AR']
+ del os.environ['CC']
+ del os.environ['CPP']
+ del os.environ['CXX']
+ del os.environ['LDSHARED']
+ del os.environ['LDFLAGS']
+ del os.environ['ARFLAGS']
+ del os.environ['CFLAGS']
+ del os.environ['CPPFLAGS']
+ del os.environ['RANLIB']
+
+ comp = self.customize_compiler()
+ self.assertEqual(comp.exes['archiver'],
+ 'sc_ar --sc-arflags')
+ self.assertEqual(comp.exes['preprocessor'],
+ 'sc_cc -E')
+ self.assertEqual(comp.exes['compiler'],
+ 'sc_cc --sc-cflags')
+ self.assertEqual(comp.exes['compiler_so'],
+ 'sc_cc --sc-cflags --sc-ccshared')
+ self.assertEqual(comp.exes['compiler_cxx'],
+ 'sc_cxx')
+ self.assertEqual(comp.exes['linker_exe'],
+ 'sc_cc')
+ self.assertEqual(comp.exes['linker_so'],
+ 'sc_ldshared')
+ self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
+ self.assertTrue('ranlib' not in comp.exes)
+
+ def test_parse_makefile_base(self):
+ self.makefile = TESTFN
+ fd = open(self.makefile, 'w')
+ try:
+ fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n')
+ fd.write('VAR=$OTHER\nOTHER=foo')
+ finally:
+ fd.close()
+ d = sysconfig.parse_makefile(self.makefile)
+ self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
+ 'OTHER': 'foo'})
+
+ def test_parse_makefile_literal_dollar(self):
+ self.makefile = TESTFN
+ fd = open(self.makefile, 'w')
+ try:
+ fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
+ fd.write('VAR=$OTHER\nOTHER=foo')
+ finally:
+ fd.close()
+ d = sysconfig.parse_makefile(self.makefile)
+ self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
+ 'OTHER': 'foo'})
+
+
+ def test_sysconfig_module(self):
+ import sysconfig as global_sysconfig
+ self.assertEqual(global_sysconfig.get_config_var('CFLAGS'),
+ sysconfig.get_config_var('CFLAGS'))
+ self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'),
+ sysconfig.get_config_var('LDFLAGS'))
+
+ @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'),
+ 'compiler flags customized')
+ def test_sysconfig_compiler_vars(self):
+ # On OS X, binary installers support extension module building on
+ # various levels of the operating system with differing Xcode
+ # configurations. This requires customization of some of the
+ # compiler configuration directives to suit the environment on
+ # the installed machine. Some of these customizations may require
+ # running external programs and, so, are deferred until needed by
+ # the first extension module build. With Python 3.3, only
+ # the Distutils version of sysconfig is used for extension module
+ # builds, which happens earlier in the Distutils tests. This may
+ # cause the following tests to fail since no tests have caused
+ # the global version of sysconfig to call the customization yet.
+ # The solution for now is to simply skip this test in this case.
+ # The longer-term solution is to only have one version of sysconfig.
+
+ import sysconfig as global_sysconfig
+ if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
+ self.skipTest('compiler flags customized')
+ self.assertEqual(global_sysconfig.get_config_var('LDSHARED'),
+ sysconfig.get_config_var('LDSHARED'))
+ self.assertEqual(global_sysconfig.get_config_var('CC'),
+ sysconfig.get_config_var('CC'))
+
+ @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
+ 'EXT_SUFFIX required for this test')
+ def test_SO_deprecation(self):
+ self.assertWarns(DeprecationWarning,
+ sysconfig.get_config_var, 'SO')
+
+ @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
+ 'EXT_SUFFIX required for this test')
+ def test_SO_value(self):
+ with check_warnings(('', DeprecationWarning)):
+ self.assertEqual(sysconfig.get_config_var('SO'),
+ sysconfig.get_config_var('EXT_SUFFIX'))
+
+ @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None,
+ 'EXT_SUFFIX required for this test')
+ def test_SO_in_vars(self):
+ vars = sysconfig.get_config_vars()
+ self.assertIsNotNone(vars['SO'])
+ self.assertEqual(vars['SO'], vars['EXT_SUFFIX'])
+
+ def test_customize_compiler_before_get_config_vars(self):
+ # Issue #21923: test that a Distribution compiler
+ # instance can be called without an explicit call to
+ # get_config_vars().
+ with open(TESTFN, 'w') as f:
+ f.writelines(textwrap.dedent('''\
+ from distutils.core import Distribution
+ config = Distribution().get_command_obj('config')
+ # try_compile may pass or it may fail if no compiler
+ # is found but it should not raise an exception.
+ rc = config.try_compile('int x;')
+ '''))
+ p = subprocess.Popen([str(sys.executable), TESTFN],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True)
+ outs, errs = p.communicate()
+ self.assertEqual(0, p.returncode, "Subprocess failed: " + outs)
+
+ def test_parse_config_h(self):
+ config_h = sysconfig.get_config_h_filename()
+ input = {}
+ with open(config_h, encoding="utf-8") as f:
+ result = sysconfig.parse_config_h(f, g=input)
+ self.assertTrue(input is result)
+ with open(config_h, encoding="utf-8") as f:
+ result = sysconfig.parse_config_h(f)
+ self.assertTrue(isinstance(result, dict))
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(SysconfigTestCase))
+ return suite
+
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_text_file.py b/setuptools/_distutils/tests/test_text_file.py
new file mode 100644
index 0000000..ebac3d5
--- /dev/null
+++ b/setuptools/_distutils/tests/test_text_file.py
@@ -0,0 +1,107 @@
+"""Tests for distutils.text_file."""
+import os
+import unittest
+from distutils.text_file import TextFile
+from distutils.tests import support
+from test.support import run_unittest
+
+TEST_DATA = """# test file
+
+line 3 \\
+# intervening comment
+ continues on next line
+"""
+
+class TextFileTestCase(support.TempdirManager, unittest.TestCase):
+
+ def test_class(self):
+ # old tests moved from text_file.__main__
+ # so they are really called by the buildbots
+
+ # result 1: no fancy options
+ result1 = ['# test file\n', '\n', 'line 3 \\\n',
+ '# intervening comment\n',
+ ' continues on next line\n']
+
+ # result 2: just strip comments
+ result2 = ["\n",
+ "line 3 \\\n",
+ " continues on next line\n"]
+
+ # result 3: just strip blank lines
+ result3 = ["# test file\n",
+ "line 3 \\\n",
+ "# intervening comment\n",
+ " continues on next line\n"]
+
+ # result 4: default, strip comments, blank lines,
+ # and trailing whitespace
+ result4 = ["line 3 \\",
+ " continues on next line"]
+
+ # result 5: strip comments and blanks, plus join lines (but don't
+ # "collapse" joined lines
+ result5 = ["line 3 continues on next line"]
+
+ # result 6: strip comments and blanks, plus join lines (and
+ # "collapse" joined lines
+ result6 = ["line 3 continues on next line"]
+
+ def test_input(count, description, file, expected_result):
+ result = file.readlines()
+ self.assertEqual(result, expected_result)
+
+ tmpdir = self.mkdtemp()
+ filename = os.path.join(tmpdir, "test.txt")
+ out_file = open(filename, "w")
+ try:
+ out_file.write(TEST_DATA)
+ finally:
+ out_file.close()
+
+ in_file = TextFile(filename, strip_comments=0, skip_blanks=0,
+ lstrip_ws=0, rstrip_ws=0)
+ try:
+ test_input(1, "no processing", in_file, result1)
+ finally:
+ in_file.close()
+
+ in_file = TextFile(filename, strip_comments=1, skip_blanks=0,
+ lstrip_ws=0, rstrip_ws=0)
+ try:
+ test_input(2, "strip comments", in_file, result2)
+ finally:
+ in_file.close()
+
+ in_file = TextFile(filename, strip_comments=0, skip_blanks=1,
+ lstrip_ws=0, rstrip_ws=0)
+ try:
+ test_input(3, "strip blanks", in_file, result3)
+ finally:
+ in_file.close()
+
+ in_file = TextFile(filename)
+ try:
+ test_input(4, "default processing", in_file, result4)
+ finally:
+ in_file.close()
+
+ in_file = TextFile(filename, strip_comments=1, skip_blanks=1,
+ join_lines=1, rstrip_ws=1)
+ try:
+ test_input(5, "join lines without collapsing", in_file, result5)
+ finally:
+ in_file.close()
+
+ in_file = TextFile(filename, strip_comments=1, skip_blanks=1,
+ join_lines=1, rstrip_ws=1, collapse_join=1)
+ try:
+ test_input(6, "join lines with collapsing", in_file, result6)
+ finally:
+ in_file.close()
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py
new file mode 100644
index 0000000..c8b4c14
--- /dev/null
+++ b/setuptools/_distutils/tests/test_unixccompiler.py
@@ -0,0 +1,289 @@
+"""Tests for distutils.unixccompiler."""
+import os
+import sys
+import unittest
+from test.support import run_unittest
+from unittest.mock import patch
+
+from .py38compat import EnvironmentVarGuard
+
+from distutils import sysconfig
+from distutils.errors import DistutilsPlatformError
+from distutils.unixccompiler import UnixCCompiler
+from distutils.util import _clear_cached_macosx_ver
+
+from . import support
+
+class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase):
+
+ def setUp(self):
+ super().setUp()
+ self._backup_platform = sys.platform
+ self._backup_get_config_var = sysconfig.get_config_var
+ self._backup_get_config_vars = sysconfig.get_config_vars
+ class CompilerWrapper(UnixCCompiler):
+ def rpath_foo(self):
+ return self.runtime_library_dir_option('/foo')
+ self.cc = CompilerWrapper()
+
+ def tearDown(self):
+ super().tearDown()
+ sys.platform = self._backup_platform
+ sysconfig.get_config_var = self._backup_get_config_var
+ sysconfig.get_config_vars = self._backup_get_config_vars
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_runtime_libdir_option(self):
+ # Issue #5900; GitHub Issue #37
+ #
+ # Ensure RUNPATH is added to extension modules with RPATH if
+ # GNU ld is used
+
+ # darwin
+ sys.platform = 'darwin'
+ darwin_ver_var = 'MACOSX_DEPLOYMENT_TARGET'
+ darwin_rpath_flag = '-Wl,-rpath,/foo'
+ darwin_lib_flag = '-L/foo'
+
+ # (macOS version from syscfg, macOS version from env var) -> flag
+ # Version value of None generates two tests: as None and as empty string
+ # Expected flag value of None means an mismatch exception is expected
+ darwin_test_cases = [
+ ((None , None ), darwin_lib_flag),
+ ((None , '11' ), darwin_rpath_flag),
+ (('10' , None ), darwin_lib_flag),
+ (('10.3' , None ), darwin_lib_flag),
+ (('10.3.1', None ), darwin_lib_flag),
+ (('10.5' , None ), darwin_rpath_flag),
+ (('10.5.1', None ), darwin_rpath_flag),
+ (('10.3' , '10.3' ), darwin_lib_flag),
+ (('10.3' , '10.5' ), darwin_rpath_flag),
+ (('10.5' , '10.3' ), darwin_lib_flag),
+ (('10.5' , '11' ), darwin_rpath_flag),
+ (('10.4' , '10' ), None),
+ ]
+
+ def make_darwin_gcv(syscfg_macosx_ver):
+ def gcv(var):
+ if var == darwin_ver_var:
+ return syscfg_macosx_ver
+ return "xxx"
+ return gcv
+
+ def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag):
+ env = os.environ
+ msg = "macOS version = (sysconfig=%r, env=%r)" % \
+ (syscfg_macosx_ver, env_macosx_ver)
+
+ # Save
+ old_gcv = sysconfig.get_config_var
+ old_env_macosx_ver = env.get(darwin_ver_var)
+
+ # Setup environment
+ _clear_cached_macosx_ver()
+ sysconfig.get_config_var = make_darwin_gcv(syscfg_macosx_ver)
+ if env_macosx_ver is not None:
+ env[darwin_ver_var] = env_macosx_ver
+ elif darwin_ver_var in env:
+ env.pop(darwin_ver_var)
+
+ # Run the test
+ if expected_flag is not None:
+ self.assertEqual(self.cc.rpath_foo(), expected_flag, msg=msg)
+ else:
+ with self.assertRaisesRegex(DistutilsPlatformError,
+ darwin_ver_var + r' mismatch', msg=msg):
+ self.cc.rpath_foo()
+
+ # Restore
+ if old_env_macosx_ver is not None:
+ env[darwin_ver_var] = old_env_macosx_ver
+ elif darwin_ver_var in env:
+ env.pop(darwin_ver_var)
+ sysconfig.get_config_var = old_gcv
+ _clear_cached_macosx_ver()
+
+ for macosx_vers, expected_flag in darwin_test_cases:
+ syscfg_macosx_ver, env_macosx_ver = macosx_vers
+ do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag)
+ # Bonus test cases with None interpreted as empty string
+ if syscfg_macosx_ver is None:
+ do_darwin_test("", env_macosx_ver, expected_flag)
+ if env_macosx_ver is None:
+ do_darwin_test(syscfg_macosx_ver, "", expected_flag)
+ if syscfg_macosx_ver is None and env_macosx_ver is None:
+ do_darwin_test("", "", expected_flag)
+
+ old_gcv = sysconfig.get_config_var
+
+ # hp-ux
+ sys.platform = 'hp-ux'
+ def gcv(v):
+ return 'xxx'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo'])
+
+ def gcv(v):
+ return 'gcc'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+ def gcv(v):
+ return 'g++'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo'])
+
+ sysconfig.get_config_var = old_gcv
+
+ # GCC GNULD
+ sys.platform = 'bar'
+ def gcv(v):
+ if v == 'CC':
+ return 'gcc'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ def gcv(v):
+ if v == 'CC':
+ return 'gcc -pthread -B /bar'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ # GCC non-GNULD
+ sys.platform = 'bar'
+ def gcv(v):
+ if v == 'CC':
+ return 'gcc'
+ elif v == 'GNULD':
+ return 'no'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
+
+ # GCC GNULD with fully qualified configuration prefix
+ # see #7617
+ sys.platform = 'bar'
+ def gcv(v):
+ if v == 'CC':
+ return 'x86_64-pc-linux-gnu-gcc-4.4.2'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ # non-GCC GNULD
+ sys.platform = 'bar'
+ def gcv(v):
+ if v == 'CC':
+ return 'cc'
+ elif v == 'GNULD':
+ return 'yes'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo')
+
+ # non-GCC non-GNULD
+ sys.platform = 'bar'
+ def gcv(v):
+ if v == 'CC':
+ return 'cc'
+ elif v == 'GNULD':
+ return 'no'
+ sysconfig.get_config_var = gcv
+ self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo')
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_cc_overrides_ldshared(self):
+ # Issue #18080:
+ # ensure that setting CC env variable also changes default linker
+ def gcv(v):
+ if v == 'LDSHARED':
+ return 'gcc-4.2 -bundle -undefined dynamic_lookup '
+ return 'gcc-4.2'
+
+ def gcvs(*args, _orig=sysconfig.get_config_vars):
+ if args:
+ return list(map(sysconfig.get_config_var, args))
+ return _orig()
+ sysconfig.get_config_var = gcv
+ sysconfig.get_config_vars = gcvs
+ with EnvironmentVarGuard() as env:
+ env['CC'] = 'my_cc'
+ del env['LDSHARED']
+ sysconfig.customize_compiler(self.cc)
+ self.assertEqual(self.cc.linker_so[0], 'my_cc')
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_cc_overrides_ldshared_for_cxx_correctly(self):
+ """
+ Ensure that setting CC env variable also changes default linker
+ correctly when building C++ extensions.
+
+ pypa/distutils#126
+ """
+ def gcv(v):
+ if v == 'LDSHARED':
+ return 'gcc-4.2 -bundle -undefined dynamic_lookup '
+ elif v == 'CXX':
+ return 'g++-4.2'
+ return 'gcc-4.2'
+
+ def gcvs(*args, _orig=sysconfig.get_config_vars):
+ if args:
+ return list(map(sysconfig.get_config_var, args))
+ return _orig()
+
+ sysconfig.get_config_var = gcv
+ sysconfig.get_config_vars = gcvs
+ with patch.object(self.cc, 'spawn', return_value=None) as mock_spawn, \
+ patch.object(self.cc, '_need_link', return_value=True), \
+ patch.object(self.cc, 'mkpath', return_value=None), \
+ EnvironmentVarGuard() as env:
+ env['CC'] = 'ccache my_cc'
+ env['CXX'] = 'my_cxx'
+ del env['LDSHARED']
+ sysconfig.customize_compiler(self.cc)
+ self.assertEqual(self.cc.linker_so[0:2], ['ccache', 'my_cc'])
+ self.cc.link(None, [], 'a.out', target_lang='c++')
+ call_args = mock_spawn.call_args[0][0]
+ expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup']
+ assert call_args[:4] == expected
+
+ @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ def test_explicit_ldshared(self):
+ # Issue #18080:
+ # ensure that setting CC env variable does not change
+ # explicit LDSHARED setting for linker
+ def gcv(v):
+ if v == 'LDSHARED':
+ return 'gcc-4.2 -bundle -undefined dynamic_lookup '
+ return 'gcc-4.2'
+
+ def gcvs(*args, _orig=sysconfig.get_config_vars):
+ if args:
+ return list(map(sysconfig.get_config_var, args))
+ return _orig()
+ sysconfig.get_config_var = gcv
+ sysconfig.get_config_vars = gcvs
+ with EnvironmentVarGuard() as env:
+ env['CC'] = 'my_cc'
+ env['LDSHARED'] = 'my_ld -bundle -dynamic'
+ sysconfig.customize_compiler(self.cc)
+ self.assertEqual(self.cc.linker_so[0], 'my_ld')
+
+ def test_has_function(self):
+ # Issue https://github.com/pypa/distutils/issues/64:
+ # ensure that setting output_dir does not raise
+ # FileNotFoundError: [Errno 2] No such file or directory: 'a.out'
+ self.cc.output_dir = 'scratch'
+ os.chdir(self.mkdtemp())
+ self.cc.has_function('abort', includes=['stdlib.h'])
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_upload.py b/setuptools/_distutils/tests/test_upload.py
new file mode 100644
index 0000000..ce3e84a
--- /dev/null
+++ b/setuptools/_distutils/tests/test_upload.py
@@ -0,0 +1,223 @@
+"""Tests for distutils.command.upload."""
+import os
+import unittest
+import unittest.mock as mock
+from urllib.request import HTTPError
+
+from test.support import run_unittest
+
+from distutils.command import upload as upload_mod
+from distutils.command.upload import upload
+from distutils.core import Distribution
+from distutils.errors import DistutilsError
+from distutils.log import ERROR, INFO
+
+from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
+
+PYPIRC_LONG_PASSWORD = """\
+[distutils]
+
+index-servers =
+ server1
+ server2
+
+[server1]
+username:me
+password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+
+[server2]
+username:meagain
+password: secret
+realm:acme
+repository:http://another.pypi/
+"""
+
+
+PYPIRC_NOPASSWORD = """\
+[distutils]
+
+index-servers =
+ server1
+
+[server1]
+username:me
+"""
+
+class FakeOpen(object):
+
+ def __init__(self, url, msg=None, code=None):
+ self.url = url
+ if not isinstance(url, str):
+ self.req = url
+ else:
+ self.req = None
+ self.msg = msg or 'OK'
+ self.code = code or 200
+
+ def getheader(self, name, default=None):
+ return {
+ 'content-type': 'text/plain; charset=utf-8',
+ }.get(name.lower(), default)
+
+ def read(self):
+ return b'xyzzy'
+
+ def getcode(self):
+ return self.code
+
+
+class uploadTestCase(BasePyPIRCCommandTestCase):
+
+ def setUp(self):
+ super(uploadTestCase, self).setUp()
+ self.old_open = upload_mod.urlopen
+ upload_mod.urlopen = self._urlopen
+ self.last_open = None
+ self.next_msg = None
+ self.next_code = None
+
+ def tearDown(self):
+ upload_mod.urlopen = self.old_open
+ super(uploadTestCase, self).tearDown()
+
+ def _urlopen(self, url):
+ self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code)
+ return self.last_open
+
+ def test_finalize_options(self):
+
+ # new format
+ self.write_file(self.rc, PYPIRC)
+ dist = Distribution()
+ cmd = upload(dist)
+ cmd.finalize_options()
+ for attr, waited in (('username', 'me'), ('password', 'secret'),
+ ('realm', 'pypi'),
+ ('repository', 'https://upload.pypi.org/legacy/')):
+ self.assertEqual(getattr(cmd, attr), waited)
+
+ def test_saved_password(self):
+ # file with no password
+ self.write_file(self.rc, PYPIRC_NOPASSWORD)
+
+ # make sure it passes
+ dist = Distribution()
+ cmd = upload(dist)
+ cmd.finalize_options()
+ self.assertEqual(cmd.password, None)
+
+ # make sure we get it as well, if another command
+ # initialized it at the dist level
+ dist.password = 'xxx'
+ cmd = upload(dist)
+ cmd.finalize_options()
+ self.assertEqual(cmd.password, 'xxx')
+
+ def test_upload(self):
+ tmp = self.mkdtemp()
+ path = os.path.join(tmp, 'xxx')
+ self.write_file(path)
+ command, pyversion, filename = 'xxx', '2.6', path
+ dist_files = [(command, pyversion, filename)]
+ self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
+
+ # lets run it
+ pkg_dir, dist = self.create_dist(dist_files=dist_files)
+ cmd = upload(dist)
+ cmd.show_response = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ # what did we send ?
+ headers = dict(self.last_open.req.headers)
+ self.assertGreaterEqual(int(headers['Content-length']), 2162)
+ content_type = headers['Content-type']
+ self.assertTrue(content_type.startswith('multipart/form-data'))
+ self.assertEqual(self.last_open.req.get_method(), 'POST')
+ expected_url = 'https://upload.pypi.org/legacy/'
+ self.assertEqual(self.last_open.req.get_full_url(), expected_url)
+ data = self.last_open.req.data
+ self.assertIn(b'xxx',data)
+ self.assertIn(b'protocol_version', data)
+ self.assertIn(b'sha256_digest', data)
+ self.assertIn(
+ b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
+ b'6860',
+ data
+ )
+ if b'md5_digest' in data:
+ self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data)
+ if b'blake2_256_digest' in data:
+ self.assertIn(
+ b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
+ b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
+ b'ce443f1534330a',
+ data
+ )
+
+ # The PyPI response body was echoed
+ results = self.get_logs(INFO)
+ self.assertEqual(results[-1], 75 * '-' + '\nxyzzy\n' + 75 * '-')
+
+ # bpo-32304: archives whose last byte was b'\r' were corrupted due to
+ # normalization intended for Mac OS 9.
+ def test_upload_correct_cr(self):
+ # content that ends with \r should not be modified.
+ tmp = self.mkdtemp()
+ path = os.path.join(tmp, 'xxx')
+ self.write_file(path, content='yy\r')
+ command, pyversion, filename = 'xxx', '2.6', path
+ dist_files = [(command, pyversion, filename)]
+ self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
+
+ # other fields that ended with \r used to be modified, now are
+ # preserved.
+ pkg_dir, dist = self.create_dist(
+ dist_files=dist_files,
+ description='long description\r'
+ )
+ cmd = upload(dist)
+ cmd.show_response = 1
+ cmd.ensure_finalized()
+ cmd.run()
+
+ headers = dict(self.last_open.req.headers)
+ self.assertGreaterEqual(int(headers['Content-length']), 2172)
+ self.assertIn(b'long description\r', self.last_open.req.data)
+
+ def test_upload_fails(self):
+ self.next_msg = "Not Found"
+ self.next_code = 404
+ self.assertRaises(DistutilsError, self.test_upload)
+
+ def test_wrong_exception_order(self):
+ tmp = self.mkdtemp()
+ path = os.path.join(tmp, 'xxx')
+ self.write_file(path)
+ dist_files = [('xxx', '2.6', path)] # command, pyversion, filename
+ self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
+
+ pkg_dir, dist = self.create_dist(dist_files=dist_files)
+ tests = [
+ (OSError('oserror'), 'oserror', OSError),
+ (HTTPError('url', 400, 'httperror', {}, None),
+ 'Upload failed (400): httperror', DistutilsError),
+ ]
+ for exception, expected, raised_exception in tests:
+ with self.subTest(exception=type(exception).__name__):
+ with mock.patch('distutils.command.upload.urlopen',
+ new=mock.Mock(side_effect=exception)):
+ with self.assertRaises(raised_exception):
+ cmd = upload(dist)
+ cmd.ensure_finalized()
+ cmd.run()
+ results = self.get_logs(ERROR)
+ self.assertIn(expected, results[-1])
+ self.clear_logs()
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_util.py b/setuptools/_distutils/tests/test_util.py
new file mode 100644
index 0000000..2738388
--- /dev/null
+++ b/setuptools/_distutils/tests/test_util.py
@@ -0,0 +1,224 @@
+"""Tests for distutils.util."""
+import os
+import sys
+import unittest
+import sysconfig as stdlib_sysconfig
+from copy import copy
+from test.support import run_unittest
+from unittest import mock
+
+from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError
+from distutils.util import (get_platform, convert_path, change_root,
+ check_environ, split_quoted, strtobool,
+ rfc822_escape, byte_compile,
+ grok_environment_error, get_host_platform)
+from distutils import util # used to patch _environ_checked
+from distutils import sysconfig
+from distutils.tests import support
+
+class UtilTestCase(support.EnvironGuard, unittest.TestCase):
+
+ def setUp(self):
+ super(UtilTestCase, self).setUp()
+ # saving the environment
+ self.name = os.name
+ self.platform = sys.platform
+ self.version = sys.version
+ self.sep = os.sep
+ self.join = os.path.join
+ self.isabs = os.path.isabs
+ self.splitdrive = os.path.splitdrive
+ self._config_vars = copy(sysconfig._config_vars)
+
+ # patching os.uname
+ if hasattr(os, 'uname'):
+ self.uname = os.uname
+ self._uname = os.uname()
+ else:
+ self.uname = None
+ self._uname = None
+
+ os.uname = self._get_uname
+
+ def tearDown(self):
+ # getting back the environment
+ os.name = self.name
+ sys.platform = self.platform
+ sys.version = self.version
+ os.sep = self.sep
+ os.path.join = self.join
+ os.path.isabs = self.isabs
+ os.path.splitdrive = self.splitdrive
+ if self.uname is not None:
+ os.uname = self.uname
+ else:
+ del os.uname
+ sysconfig._config_vars = copy(self._config_vars)
+ super(UtilTestCase, self).tearDown()
+
+ def _set_uname(self, uname):
+ self._uname = uname
+
+ def _get_uname(self):
+ return self._uname
+
+ def test_get_host_platform(self):
+ with unittest.mock.patch('os.name', 'nt'):
+ with unittest.mock.patch('sys.version', '... [... (ARM64)]'):
+ self.assertEqual(get_host_platform(), 'win-arm64')
+ with unittest.mock.patch('sys.version', '... [... (ARM)]'):
+ self.assertEqual(get_host_platform(), 'win-arm32')
+
+ with unittest.mock.patch('sys.version_info', (3, 9, 0, 'final', 0)):
+ self.assertEqual(get_host_platform(), stdlib_sysconfig.get_platform())
+
+ def test_get_platform(self):
+ with unittest.mock.patch('os.name', 'nt'):
+ with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}):
+ self.assertEqual(get_platform(), 'win32')
+ with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}):
+ self.assertEqual(get_platform(), 'win-amd64')
+ with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}):
+ self.assertEqual(get_platform(), 'win-arm32')
+ with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
+ self.assertEqual(get_platform(), 'win-arm64')
+
+ def test_convert_path(self):
+ # linux/mac
+ os.sep = '/'
+ def _join(path):
+ return '/'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(convert_path('/home/to/my/stuff'),
+ '/home/to/my/stuff')
+
+ # win
+ os.sep = '\\'
+ def _join(*path):
+ return '\\'.join(path)
+ os.path.join = _join
+
+ self.assertRaises(ValueError, convert_path, '/home/to/my/stuff')
+ self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
+
+ self.assertEqual(convert_path('home/to/my/stuff'),
+ 'home\\to\\my\\stuff')
+ self.assertEqual(convert_path('.'),
+ os.curdir)
+
+ def test_change_root(self):
+ # linux/mac
+ os.name = 'posix'
+ def _isabs(path):
+ return path[0] == '/'
+ os.path.isabs = _isabs
+ def _join(*path):
+ return '/'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(change_root('/root', '/old/its/here'),
+ '/root/old/its/here')
+ self.assertEqual(change_root('/root', 'its/here'),
+ '/root/its/here')
+
+ # windows
+ os.name = 'nt'
+ def _isabs(path):
+ return path.startswith('c:\\')
+ os.path.isabs = _isabs
+ def _splitdrive(path):
+ if path.startswith('c:'):
+ return ('', path.replace('c:', ''))
+ return ('', path)
+ os.path.splitdrive = _splitdrive
+ def _join(*path):
+ return '\\'.join(path)
+ os.path.join = _join
+
+ self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
+ 'c:\\root\\old\\its\\here')
+ self.assertEqual(change_root('c:\\root', 'its\\here'),
+ 'c:\\root\\its\\here')
+
+ # BugsBunny os (it's a great os)
+ os.name = 'BugsBunny'
+ self.assertRaises(DistutilsPlatformError,
+ change_root, 'c:\\root', 'its\\here')
+
+ # XXX platforms to be covered: mac
+
+ def test_check_environ(self):
+ util._environ_checked = 0
+ os.environ.pop('HOME', None)
+
+ check_environ()
+
+ self.assertEqual(os.environ['PLAT'], get_platform())
+ self.assertEqual(util._environ_checked, 1)
+
+ @unittest.skipUnless(os.name == 'posix', 'specific to posix')
+ def test_check_environ_getpwuid(self):
+ util._environ_checked = 0
+ os.environ.pop('HOME', None)
+
+ import pwd
+
+ # only set pw_dir field, other fields are not used
+ result = pwd.struct_passwd((None, None, None, None, None,
+ '/home/distutils', None))
+ with mock.patch.object(pwd, 'getpwuid', return_value=result):
+ check_environ()
+ self.assertEqual(os.environ['HOME'], '/home/distutils')
+
+ util._environ_checked = 0
+ os.environ.pop('HOME', None)
+
+ # bpo-10496: Catch pwd.getpwuid() error
+ with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError):
+ check_environ()
+ self.assertNotIn('HOME', os.environ)
+
+ def test_split_quoted(self):
+ self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
+ ['one', 'two', 'three', 'four'])
+
+ def test_strtobool(self):
+ yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
+ no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
+
+ for y in yes:
+ self.assertTrue(strtobool(y))
+
+ for n in no:
+ self.assertFalse(strtobool(n))
+
+ def test_rfc822_escape(self):
+ header = 'I am a\npoor\nlonesome\nheader\n'
+ res = rfc822_escape(header)
+ wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s'
+ 'header%(8s)s') % {'8s': '\n'+8*' '}
+ self.assertEqual(res, wanted)
+
+ def test_dont_write_bytecode(self):
+ # makes sure byte_compile raise a DistutilsError
+ # if sys.dont_write_bytecode is True
+ old_dont_write_bytecode = sys.dont_write_bytecode
+ sys.dont_write_bytecode = True
+ try:
+ self.assertRaises(DistutilsByteCompileError, byte_compile, [])
+ finally:
+ sys.dont_write_bytecode = old_dont_write_bytecode
+
+ def test_grok_environment_error(self):
+ # test obsolete function to ensure backward compat (#4931)
+ exc = IOError("Unable to find batch file")
+ msg = grok_environment_error(exc)
+ self.assertEqual(msg, "error: Unable to find batch file")
+
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_version.py b/setuptools/_distutils/tests/test_version.py
new file mode 100644
index 0000000..8405aa3
--- /dev/null
+++ b/setuptools/_distutils/tests/test_version.py
@@ -0,0 +1,95 @@
+"""Tests for distutils.version."""
+import unittest
+import distutils
+from distutils.version import LooseVersion
+from distutils.version import StrictVersion
+from test.support import run_unittest
+
+class VersionTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = distutils.version.suppress_known_deprecation()
+ self.ctx.__enter__()
+
+ def tearDown(self):
+ self.ctx.__exit__(None, None, None)
+
+ def test_prerelease(self):
+ version = StrictVersion('1.2.3a1')
+ self.assertEqual(version.version, (1, 2, 3))
+ self.assertEqual(version.prerelease, ('a', 1))
+ self.assertEqual(str(version), '1.2.3a1')
+
+ version = StrictVersion('1.2.0')
+ self.assertEqual(str(version), '1.2')
+
+ def test_cmp_strict(self):
+ versions = (('1.5.1', '1.5.2b2', -1),
+ ('161', '3.10a', ValueError),
+ ('8.02', '8.02', 0),
+ ('3.4j', '1996.07.12', ValueError),
+ ('3.2.pl0', '3.1.1.6', ValueError),
+ ('2g6', '11g', ValueError),
+ ('0.9', '2.2', -1),
+ ('1.2.1', '1.2', 1),
+ ('1.1', '1.2.2', -1),
+ ('1.2', '1.1', 1),
+ ('1.2.1', '1.2.2', -1),
+ ('1.2.2', '1.2', 1),
+ ('1.2', '1.2.2', -1),
+ ('0.4.0', '0.4', 0),
+ ('1.13++', '5.5.kw', ValueError))
+
+ for v1, v2, wanted in versions:
+ try:
+ res = StrictVersion(v1)._cmp(StrictVersion(v2))
+ except ValueError:
+ if wanted is ValueError:
+ continue
+ else:
+ raise AssertionError(("cmp(%s, %s) "
+ "shouldn't raise ValueError")
+ % (v1, v2))
+ self.assertEqual(res, wanted,
+ 'cmp(%s, %s) should be %s, got %s' %
+ (v1, v2, wanted, res))
+ res = StrictVersion(v1)._cmp(v2)
+ self.assertEqual(res, wanted,
+ 'cmp(%s, %s) should be %s, got %s' %
+ (v1, v2, wanted, res))
+ res = StrictVersion(v1)._cmp(object())
+ self.assertIs(res, NotImplemented,
+ 'cmp(%s, %s) should be NotImplemented, got %s' %
+ (v1, v2, res))
+
+
+ def test_cmp(self):
+ versions = (('1.5.1', '1.5.2b2', -1),
+ ('161', '3.10a', 1),
+ ('8.02', '8.02', 0),
+ ('3.4j', '1996.07.12', -1),
+ ('3.2.pl0', '3.1.1.6', 1),
+ ('2g6', '11g', -1),
+ ('0.960923', '2.2beta29', -1),
+ ('1.13++', '5.5.kw', -1))
+
+
+ for v1, v2, wanted in versions:
+ res = LooseVersion(v1)._cmp(LooseVersion(v2))
+ self.assertEqual(res, wanted,
+ 'cmp(%s, %s) should be %s, got %s' %
+ (v1, v2, wanted, res))
+ res = LooseVersion(v1)._cmp(v2)
+ self.assertEqual(res, wanted,
+ 'cmp(%s, %s) should be %s, got %s' %
+ (v1, v2, wanted, res))
+ res = LooseVersion(v1)._cmp(object())
+ self.assertIs(res, NotImplemented,
+ 'cmp(%s, %s) should be NotImplemented, got %s' %
+ (v1, v2, res))
+
+def test_suite():
+ return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase)
+
+if __name__ == "__main__":
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/test_versionpredicate.py b/setuptools/_distutils/tests/test_versionpredicate.py
new file mode 100644
index 0000000..28ae09d
--- /dev/null
+++ b/setuptools/_distutils/tests/test_versionpredicate.py
@@ -0,0 +1,13 @@
+"""Tests harness for distutils.versionpredicate.
+
+"""
+
+import distutils.versionpredicate
+import doctest
+from test.support import run_unittest
+
+def test_suite():
+ return doctest.DocTestSuite(distutils.versionpredicate)
+
+if __name__ == '__main__':
+ run_unittest(test_suite())
diff --git a/setuptools/_distutils/tests/unix_compat.py b/setuptools/_distutils/tests/unix_compat.py
new file mode 100644
index 0000000..b7718c2
--- /dev/null
+++ b/setuptools/_distutils/tests/unix_compat.py
@@ -0,0 +1,16 @@
+import sys
+import unittest
+
+try:
+ import grp
+ import pwd
+except ImportError:
+ grp = pwd = None
+
+
+UNIX_ID_SUPPORT = grp and pwd
+UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin"
+
+require_unix_id = unittest.skipUnless(
+ UNIX_ID_SUPPORT, "Requires grp and pwd support")
+require_uid_0 = unittest.skipUnless(UID_0_SUPPORT, "Requires UID 0 support")
diff --git a/setuptools/_distutils/text_file.py b/setuptools/_distutils/text_file.py
new file mode 100644
index 0000000..93abad3
--- /dev/null
+++ b/setuptools/_distutils/text_file.py
@@ -0,0 +1,286 @@
+"""text_file
+
+provides the TextFile class, which gives an interface to text files
+that (optionally) takes care of stripping comments, ignoring blank
+lines, and joining lines with backslashes."""
+
+import sys, io
+
+
+class TextFile:
+ """Provides a file-like object that takes care of all the things you
+ commonly want to do when processing a text file that has some
+ line-by-line syntax: strip comments (as long as "#" is your
+ comment character), skip blank lines, join adjacent lines by
+ escaping the newline (ie. backslash at end of line), strip
+ leading and/or trailing whitespace. All of these are optional
+ and independently controllable.
+
+ Provides a 'warn()' method so you can generate warning messages that
+ report physical line number, even if the logical line in question
+ spans multiple physical lines. Also provides 'unreadline()' for
+ implementing line-at-a-time lookahead.
+
+ Constructor is called as:
+
+ TextFile (filename=None, file=None, **options)
+
+ It bombs (RuntimeError) if both 'filename' and 'file' are None;
+ 'filename' should be a string, and 'file' a file object (or
+ something that provides 'readline()' and 'close()' methods). It is
+ recommended that you supply at least 'filename', so that TextFile
+ can include it in warning messages. If 'file' is not supplied,
+ TextFile creates its own using 'io.open()'.
+
+ The options are all boolean, and affect the value returned by
+ 'readline()':
+ strip_comments [default: true]
+ strip from "#" to end-of-line, as well as any whitespace
+ leading up to the "#" -- unless it is escaped by a backslash
+ lstrip_ws [default: false]
+ strip leading whitespace from each line before returning it
+ rstrip_ws [default: true]
+ strip trailing whitespace (including line terminator!) from
+ each line before returning it
+ skip_blanks [default: true}
+ skip lines that are empty *after* stripping comments and
+ whitespace. (If both lstrip_ws and rstrip_ws are false,
+ then some lines may consist of solely whitespace: these will
+ *not* be skipped, even if 'skip_blanks' is true.)
+ join_lines [default: false]
+ if a backslash is the last non-newline character on a line
+ after stripping comments and whitespace, join the following line
+ to it to form one "logical line"; if N consecutive lines end
+ with a backslash, then N+1 physical lines will be joined to
+ form one logical line.
+ collapse_join [default: false]
+ strip leading whitespace from lines that are joined to their
+ predecessor; only matters if (join_lines and not lstrip_ws)
+ errors [default: 'strict']
+ error handler used to decode the file content
+
+ Note that since 'rstrip_ws' can strip the trailing newline, the
+ semantics of 'readline()' must differ from those of the builtin file
+ object's 'readline()' method! In particular, 'readline()' returns
+ None for end-of-file: an empty string might just be a blank line (or
+ an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
+ not."""
+
+ default_options = { 'strip_comments': 1,
+ 'skip_blanks': 1,
+ 'lstrip_ws': 0,
+ 'rstrip_ws': 1,
+ 'join_lines': 0,
+ 'collapse_join': 0,
+ 'errors': 'strict',
+ }
+
+ def __init__(self, filename=None, file=None, **options):
+ """Construct a new TextFile object. At least one of 'filename'
+ (a string) and 'file' (a file-like object) must be supplied.
+ They keyword argument options are described above and affect
+ the values returned by 'readline()'."""
+ if filename is None and file is None:
+ raise RuntimeError("you must supply either or both of 'filename' and 'file'")
+
+ # set values for all options -- either from client option hash
+ # or fallback to default_options
+ for opt in self.default_options.keys():
+ if opt in options:
+ setattr(self, opt, options[opt])
+ else:
+ setattr(self, opt, self.default_options[opt])
+
+ # sanity check client option hash
+ for opt in options.keys():
+ if opt not in self.default_options:
+ raise KeyError("invalid TextFile option '%s'" % opt)
+
+ if file is None:
+ self.open(filename)
+ else:
+ self.filename = filename
+ self.file = file
+ self.current_line = 0 # assuming that file is at BOF!
+
+ # 'linebuf' is a stack of lines that will be emptied before we
+ # actually read from the file; it's only populated by an
+ # 'unreadline()' operation
+ self.linebuf = []
+
+ def open(self, filename):
+ """Open a new file named 'filename'. This overrides both the
+ 'filename' and 'file' arguments to the constructor."""
+ self.filename = filename
+ self.file = io.open(self.filename, 'r', errors=self.errors)
+ self.current_line = 0
+
+ def close(self):
+ """Close the current file and forget everything we know about it
+ (filename, current line number)."""
+ file = self.file
+ self.file = None
+ self.filename = None
+ self.current_line = None
+ file.close()
+
+ def gen_error(self, msg, line=None):
+ outmsg = []
+ if line is None:
+ line = self.current_line
+ outmsg.append(self.filename + ", ")
+ if isinstance(line, (list, tuple)):
+ outmsg.append("lines %d-%d: " % tuple(line))
+ else:
+ outmsg.append("line %d: " % line)
+ outmsg.append(str(msg))
+ return "".join(outmsg)
+
+ def error(self, msg, line=None):
+ raise ValueError("error: " + self.gen_error(msg, line))
+
+ def warn(self, msg, line=None):
+ """Print (to stderr) a warning message tied to the current logical
+ line in the current file. If the current logical line in the
+ file spans multiple physical lines, the warning refers to the
+ whole range, eg. "lines 3-5". If 'line' supplied, it overrides
+ the current line number; it may be a list or tuple to indicate a
+ range of physical lines, or an integer for a single physical
+ line."""
+ sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
+
+ def readline(self):
+ """Read and return a single logical line from the current file (or
+ from an internal buffer if lines have previously been "unread"
+ with 'unreadline()'). If the 'join_lines' option is true, this
+ may involve reading multiple physical lines concatenated into a
+ single string. Updates the current line number, so calling
+ 'warn()' after 'readline()' emits a warning about the physical
+ line(s) just read. Returns None on end-of-file, since the empty
+ string can occur if 'rstrip_ws' is true but 'strip_blanks' is
+ not."""
+ # If any "unread" lines waiting in 'linebuf', return the top
+ # one. (We don't actually buffer read-ahead data -- lines only
+ # get put in 'linebuf' if the client explicitly does an
+ # 'unreadline()'.
+ if self.linebuf:
+ line = self.linebuf[-1]
+ del self.linebuf[-1]
+ return line
+
+ buildup_line = ''
+
+ while True:
+ # read the line, make it None if EOF
+ line = self.file.readline()
+ if line == '':
+ line = None
+
+ if self.strip_comments and line:
+
+ # Look for the first "#" in the line. If none, never
+ # mind. If we find one and it's the first character, or
+ # is not preceded by "\", then it starts a comment --
+ # strip the comment, strip whitespace before it, and
+ # carry on. Otherwise, it's just an escaped "#", so
+ # unescape it (and any other escaped "#"'s that might be
+ # lurking in there) and otherwise leave the line alone.
+
+ pos = line.find("#")
+ if pos == -1: # no "#" -- no comments
+ pass
+
+ # It's definitely a comment -- either "#" is the first
+ # character, or it's elsewhere and unescaped.
+ elif pos == 0 or line[pos-1] != "\\":
+ # Have to preserve the trailing newline, because it's
+ # the job of a later step (rstrip_ws) to remove it --
+ # and if rstrip_ws is false, we'd better preserve it!
+ # (NB. this means that if the final line is all comment
+ # and has no trailing newline, we will think that it's
+ # EOF; I think that's OK.)
+ eol = (line[-1] == '\n') and '\n' or ''
+ line = line[0:pos] + eol
+
+ # If all that's left is whitespace, then skip line
+ # *now*, before we try to join it to 'buildup_line' --
+ # that way constructs like
+ # hello \\
+ # # comment that should be ignored
+ # there
+ # result in "hello there".
+ if line.strip() == "":
+ continue
+ else: # it's an escaped "#"
+ line = line.replace("\\#", "#")
+
+ # did previous line end with a backslash? then accumulate
+ if self.join_lines and buildup_line:
+ # oops: end of file
+ if line is None:
+ self.warn("continuation line immediately precedes "
+ "end-of-file")
+ return buildup_line
+
+ if self.collapse_join:
+ line = line.lstrip()
+ line = buildup_line + line
+
+ # careful: pay attention to line number when incrementing it
+ if isinstance(self.current_line, list):
+ self.current_line[1] = self.current_line[1] + 1
+ else:
+ self.current_line = [self.current_line,
+ self.current_line + 1]
+ # just an ordinary line, read it as usual
+ else:
+ if line is None: # eof
+ return None
+
+ # still have to be careful about incrementing the line number!
+ if isinstance(self.current_line, list):
+ self.current_line = self.current_line[1] + 1
+ else:
+ self.current_line = self.current_line + 1
+
+ # strip whitespace however the client wants (leading and
+ # trailing, or one or the other, or neither)
+ if self.lstrip_ws and self.rstrip_ws:
+ line = line.strip()
+ elif self.lstrip_ws:
+ line = line.lstrip()
+ elif self.rstrip_ws:
+ line = line.rstrip()
+
+ # blank line (whether we rstrip'ed or not)? skip to next line
+ # if appropriate
+ if (line == '' or line == '\n') and self.skip_blanks:
+ continue
+
+ if self.join_lines:
+ if line[-1] == '\\':
+ buildup_line = line[:-1]
+ continue
+
+ if line[-2:] == '\\\n':
+ buildup_line = line[0:-2] + '\n'
+ continue
+
+ # well, I guess there's some actual content there: return it
+ return line
+
+ def readlines(self):
+ """Read and return the list of all logical lines remaining in the
+ current file."""
+ lines = []
+ while True:
+ line = self.readline()
+ if line is None:
+ return lines
+ lines.append(line)
+
+ def unreadline(self, line):
+ """Push 'line' (a string) onto an internal buffer that will be
+ checked by future 'readline()' calls. Handy for implementing
+ a parser with line-at-a-time lookahead."""
+ self.linebuf.append(line)
diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py
new file mode 100644
index 0000000..715408f
--- /dev/null
+++ b/setuptools/_distutils/unixccompiler.py
@@ -0,0 +1,370 @@
+"""distutils.unixccompiler
+
+Contains the UnixCCompiler class, a subclass of CCompiler that handles
+the "typical" Unix-style command-line C compiler:
+ * macros defined with -Dname[=value]
+ * macros undefined with -Uname
+ * include search directories specified with -Idir
+ * libraries specified with -lllib
+ * library search directories specified with -Ldir
+ * compile handled by 'cc' (or similar) executable with -c option:
+ compiles .c to .o
+ * link static library handled by 'ar' command (possibly with 'ranlib')
+ * link shared library handled by 'cc -shared'
+"""
+
+import os, sys, re, shlex
+
+from distutils import sysconfig
+from distutils.dep_util import newer
+from distutils.ccompiler import \
+ CCompiler, gen_preprocess_options, gen_lib_options
+from distutils.errors import \
+ DistutilsExecError, CompileError, LibError, LinkError
+from distutils import log
+from ._macos_compat import compiler_fixup
+
+# XXX Things not currently handled:
+# * optimization/debug/warning flags; we just use whatever's in Python's
+# Makefile and live with it. Is this adequate? If not, we might
+# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+# SunCCompiler, and I suspect down that road lies madness.
+# * even if we don't know a warning flag from an optimization flag,
+# we need some way for outsiders to feed preprocessor/compiler/linker
+# flags in to us -- eg. a sysadmin might want to mandate certain flags
+# via a site config file, or a user might want to set something for
+# compiling this module distribution only via the setup.py command
+# line, whatever. As long as these options come from something on the
+# current system, they can be as system-dependent as they like, and we
+# should just happily stuff them into the preprocessor/compiler/linker
+# options and carry on.
+
+
+def _split_env(cmd):
+ """
+ For macOS, split command into 'env' portion (if any)
+ and the rest of the linker command.
+
+ >>> _split_env(['a', 'b', 'c'])
+ ([], ['a', 'b', 'c'])
+ >>> _split_env(['/usr/bin/env', 'A=3', 'gcc'])
+ (['/usr/bin/env', 'A=3'], ['gcc'])
+ """
+ pivot = 0
+ if os.path.basename(cmd[0]) == "env":
+ pivot = 1
+ while '=' in cmd[pivot]:
+ pivot += 1
+ return cmd[:pivot], cmd[pivot:]
+
+
+def _split_aix(cmd):
+ """
+ AIX platforms prefix the compiler with the ld_so_aix
+ script, so split that from the linker command.
+
+ >>> _split_aix(['a', 'b', 'c'])
+ ([], ['a', 'b', 'c'])
+ >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc'])
+ (['/bin/foo/ld_so_aix'], ['gcc'])
+ """
+ pivot = os.path.basename(cmd[0]) == 'ld_so_aix'
+ return cmd[:pivot], cmd[pivot:]
+
+
+def _linker_params(linker_cmd, compiler_cmd):
+ """
+ The linker command usually begins with the compiler
+ command (possibly multiple elements), followed by zero or more
+ params for shared library building.
+
+ If the LDSHARED env variable overrides the linker command,
+ however, the commands may not match.
+
+ Return the best guess of the linker parameters by stripping
+ the linker command. If the compiler command does not
+ match the linker command, assume the linker command is
+ just the first element.
+
+ >>> _linker_params('gcc foo bar'.split(), ['gcc'])
+ ['foo', 'bar']
+ >>> _linker_params('gcc foo bar'.split(), ['other'])
+ ['foo', 'bar']
+ >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split())
+ ['foo', 'bar']
+ >>> _linker_params(['gcc'], ['gcc'])
+ []
+ """
+ c_len = len(compiler_cmd)
+ pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1
+ return linker_cmd[pivot:]
+
+
+class UnixCCompiler(CCompiler):
+
+ compiler_type = 'unix'
+
+ # These are used by CCompiler in two places: the constructor sets
+ # instance attributes 'preprocessor', 'compiler', etc. from them, and
+ # 'set_executable()' allows any of these to be set. The defaults here
+ # are pretty generic; they will probably have to be set by an outsider
+ # (eg. using information discovered by the sysconfig about building
+ # Python extensions).
+ executables = {'preprocessor' : None,
+ 'compiler' : ["cc"],
+ 'compiler_so' : ["cc"],
+ 'compiler_cxx' : ["cc"],
+ 'linker_so' : ["cc", "-shared"],
+ 'linker_exe' : ["cc"],
+ 'archiver' : ["ar", "-cr"],
+ 'ranlib' : None,
+ }
+
+ if sys.platform[:6] == "darwin":
+ executables['ranlib'] = ["ranlib"]
+
+ # Needed for the filename generation methods provided by the base
+ # class, CCompiler. NB. whoever instantiates/uses a particular
+ # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+ # reasonable common default here, but it's not necessarily used on all
+ # Unices!
+
+ src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+ obj_extension = ".o"
+ static_lib_extension = ".a"
+ shared_lib_extension = ".so"
+ dylib_lib_extension = ".dylib"
+ xcode_stub_lib_extension = ".tbd"
+ static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+ xcode_stub_lib_format = dylib_lib_format
+ if sys.platform == "cygwin":
+ exe_extension = ".exe"
+
+ def preprocess(self, source, output_file=None, macros=None,
+ include_dirs=None, extra_preargs=None, extra_postargs=None):
+ fixed_args = self._fix_compile_args(None, macros, include_dirs)
+ ignore, macros, include_dirs = fixed_args
+ pp_opts = gen_preprocess_options(macros, include_dirs)
+ pp_args = self.preprocessor + pp_opts
+ if output_file:
+ pp_args.extend(['-o', output_file])
+ if extra_preargs:
+ pp_args[:0] = extra_preargs
+ if extra_postargs:
+ pp_args.extend(extra_postargs)
+ pp_args.append(source)
+
+ # We need to preprocess: either we're being forced to, or we're
+ # generating output to stdout, or there's a target output file and
+ # the source file is newer than the target (or the target doesn't
+ # exist).
+ if self.force or output_file is None or newer(source, output_file):
+ if output_file:
+ self.mkpath(os.path.dirname(output_file))
+ try:
+ self.spawn(pp_args)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ compiler_so = compiler_fixup(
+ self.compiler_so, cc_args + extra_postargs)
+ try:
+ self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+ extra_postargs)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ def create_static_lib(self, objects, output_libname,
+ output_dir=None, debug=0, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+
+ output_filename = \
+ self.library_filename(output_libname, output_dir=output_dir)
+
+ if self._need_link(objects, output_filename):
+ self.mkpath(os.path.dirname(output_filename))
+ self.spawn(self.archiver +
+ [output_filename] +
+ objects + self.objects)
+
+ # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+ # think the only major Unix that does. Maybe we need some
+ # platform intelligence here to skip ranlib if it's not
+ # needed -- or maybe Python's configure script took care of
+ # it for us, hence the check for leading colon.
+ if self.ranlib:
+ try:
+ self.spawn(self.ranlib + [output_filename])
+ except DistutilsExecError as msg:
+ raise LibError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ def link(self, target_desc, objects,
+ output_filename, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=0, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None):
+ objects, output_dir = self._fix_object_args(objects, output_dir)
+ fixed_args = self._fix_lib_args(libraries, library_dirs,
+ runtime_library_dirs)
+ libraries, library_dirs, runtime_library_dirs = fixed_args
+
+ lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+ libraries)
+ if not isinstance(output_dir, (str, type(None))):
+ raise TypeError("'output_dir' must be a string or None")
+ if output_dir is not None:
+ output_filename = os.path.join(output_dir, output_filename)
+
+ if self._need_link(objects, output_filename):
+ ld_args = (objects + self.objects +
+ lib_opts + ['-o', output_filename])
+ if debug:
+ ld_args[:0] = ['-g']
+ if extra_preargs:
+ ld_args[:0] = extra_preargs
+ if extra_postargs:
+ ld_args.extend(extra_postargs)
+ self.mkpath(os.path.dirname(output_filename))
+ try:
+ # Select a linker based on context: linker_exe when
+ # building an executable or linker_so (with shared options)
+ # when building a shared library.
+ building_exe = target_desc == CCompiler.EXECUTABLE
+ linker = (self.linker_exe if building_exe else self.linker_so)[:]
+
+ if target_lang == "c++" and self.compiler_cxx:
+ env, linker_ne = _split_env(linker)
+ aix, linker_na = _split_aix(linker_ne)
+ _, compiler_cxx_ne = _split_env(self.compiler_cxx)
+ _, linker_exe_ne = _split_env(self.linker_exe)
+
+ params = _linker_params(linker_na, linker_exe_ne)
+ linker = env + aix + compiler_cxx_ne + params
+
+ linker = compiler_fixup(linker, ld_args)
+
+ self.spawn(linker + ld_args)
+ except DistutilsExecError as msg:
+ raise LinkError(msg)
+ else:
+ log.debug("skipping %s (up-to-date)", output_filename)
+
+ # -- Miscellaneous methods -----------------------------------------
+ # These are all used by the 'gen_lib_options() function, in
+ # ccompiler.py.
+
+ def library_dir_option(self, dir):
+ return "-L" + dir
+
+ def _is_gcc(self, compiler_name):
+ return "gcc" in compiler_name or "g++" in compiler_name
+
+ def runtime_library_dir_option(self, dir):
+ # XXX Hackish, at the very least. See Python bug #445902:
+ # http://sourceforge.net/tracker/index.php
+ # ?func=detail&aid=445902&group_id=5470&atid=105470
+ # Linkers on different platforms need different options to
+ # specify that directories need to be added to the list of
+ # directories searched for dependencies when a dynamic library
+ # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
+ # be told to pass the -R option through to the linker, whereas
+ # other compilers and gcc on other systems just know this.
+ # Other compilers may need something slightly different. At
+ # this time, there's no way to determine this information from
+ # the configuration data stored in the Python installation, so
+ # we use this hack.
+ compiler = os.path.basename(shlex.split(sysconfig.get_config_var("CC"))[0])
+ if sys.platform[:6] == "darwin":
+ from distutils.util import get_macosx_target_ver, split_version
+ macosx_target_ver = get_macosx_target_ver()
+ if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]:
+ return "-Wl,-rpath," + dir
+ else: # no support for -rpath on earlier macOS versions
+ return "-L" + dir
+ elif sys.platform[:7] == "freebsd":
+ return "-Wl,-rpath=" + dir
+ elif sys.platform[:5] == "hp-ux":
+ if self._is_gcc(compiler):
+ return ["-Wl,+s", "-L" + dir]
+ return ["+s", "-L" + dir]
+
+ # For all compilers, `-Wl` is the presumed way to
+ # pass a compiler option to the linker and `-R` is
+ # the way to pass an RPATH.
+ if sysconfig.get_config_var("GNULD") == "yes":
+ # GNU ld needs an extra option to get a RUNPATH
+ # instead of just an RPATH.
+ return "-Wl,--enable-new-dtags,-R" + dir
+ else:
+ return "-Wl,-R" + dir
+
+ def library_option(self, lib):
+ return "-l" + lib
+
+ def find_library_file(self, dirs, lib, debug=0):
+ shared_f = self.library_filename(lib, lib_type='shared')
+ dylib_f = self.library_filename(lib, lib_type='dylib')
+ xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub')
+ static_f = self.library_filename(lib, lib_type='static')
+
+ if sys.platform == 'darwin':
+ # On OSX users can specify an alternate SDK using
+ # '-isysroot', calculate the SDK root if it is specified
+ # (and use it further on)
+ #
+ # Note that, as of Xcode 7, Apple SDKs may contain textual stub
+ # libraries with .tbd extensions rather than the normal .dylib
+ # shared libraries installed in /. The Apple compiler tool
+ # chain handles this transparently but it can cause problems
+ # for programs that are being built with an SDK and searching
+ # for specific libraries. Callers of find_library_file need to
+ # keep in mind that the base filename of the returned SDK library
+ # file might have a different extension from that of the library
+ # file installed on the running system, for example:
+ # /Applications/Xcode.app/Contents/Developer/Platforms/
+ # MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
+ # usr/lib/libedit.tbd
+ # vs
+ # /usr/lib/libedit.dylib
+ cflags = sysconfig.get_config_var('CFLAGS')
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
+ if m is None:
+ sysroot = '/'
+ else:
+ sysroot = m.group(1)
+
+
+
+ for dir in dirs:
+ shared = os.path.join(dir, shared_f)
+ dylib = os.path.join(dir, dylib_f)
+ static = os.path.join(dir, static_f)
+ xcode_stub = os.path.join(dir, xcode_stub_f)
+
+ if sys.platform == 'darwin' and (
+ dir.startswith('/System/') or (
+ dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
+
+ shared = os.path.join(sysroot, dir[1:], shared_f)
+ dylib = os.path.join(sysroot, dir[1:], dylib_f)
+ static = os.path.join(sysroot, dir[1:], static_f)
+ xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f)
+
+ # We're second-guessing the linker here, with not much hard
+ # data to go on: GCC seems to prefer the shared library, so I'm
+ # assuming that *all* Unix C compilers do. And of course I'm
+ # ignoring even GCC's "-static" option. So sue me.
+ if os.path.exists(dylib):
+ return dylib
+ elif os.path.exists(xcode_stub):
+ return xcode_stub
+ elif os.path.exists(shared):
+ return shared
+ elif os.path.exists(static):
+ return static
+
+ # Oops, didn't find it in *any* of 'dirs'
+ return None
diff --git a/setuptools/_distutils/util.py b/setuptools/_distutils/util.py
new file mode 100644
index 0000000..6d506d7
--- /dev/null
+++ b/setuptools/_distutils/util.py
@@ -0,0 +1,496 @@
+"""distutils.util
+
+Miscellaneous utility functions -- anything that doesn't fit into
+one of the other *util.py modules.
+"""
+
+import os
+import re
+import importlib.util
+import string
+import sys
+import sysconfig
+from distutils.errors import DistutilsPlatformError
+from distutils.dep_util import newer
+from distutils.spawn import spawn
+from distutils import log
+from distutils.errors import DistutilsByteCompileError
+from .py35compat import _optim_args_from_interpreter_flags
+
+
+def get_host_platform():
+ """Return a string that identifies the current platform. This is used mainly to
+ distinguish platform-specific build directories and platform-specific built
+ distributions.
+ """
+
+ # We initially exposed platforms as defined in Python 3.9
+ # even with older Python versions when distutils was split out.
+ # Now that we delegate to stdlib sysconfig we need to restore this
+ # in case anyone has started to depend on it.
+
+ if sys.version_info < (3, 8):
+ if os.name == 'nt':
+ if '(arm)' in sys.version.lower():
+ return 'win-arm32'
+ if '(arm64)' in sys.version.lower():
+ return 'win-arm64'
+
+ if sys.version_info < (3, 9):
+ if os.name == "posix" and hasattr(os, 'uname'):
+ osname, host, release, version, machine = os.uname()
+ if osname[:3] == "aix":
+ from .py38compat import aix_platform
+ return aix_platform(osname, version, release)
+
+ return sysconfig.get_platform()
+
+def get_platform():
+ if os.name == 'nt':
+ TARGET_TO_PLAT = {
+ 'x86' : 'win32',
+ 'x64' : 'win-amd64',
+ 'arm' : 'win-arm32',
+ 'arm64': 'win-arm64',
+ }
+ return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
+ else:
+ return get_host_platform()
+
+
+if sys.platform == 'darwin':
+ _syscfg_macosx_ver = None # cache the version pulled from sysconfig
+MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET'
+
+def _clear_cached_macosx_ver():
+ """For testing only. Do not call."""
+ global _syscfg_macosx_ver
+ _syscfg_macosx_ver = None
+
+def get_macosx_target_ver_from_syscfg():
+ """Get the version of macOS latched in the Python interpreter configuration.
+ Returns the version as a string or None if can't obtain one. Cached."""
+ global _syscfg_macosx_ver
+ if _syscfg_macosx_ver is None:
+ from distutils import sysconfig
+ ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or ''
+ if ver:
+ _syscfg_macosx_ver = ver
+ return _syscfg_macosx_ver
+
+def get_macosx_target_ver():
+ """Return the version of macOS for which we are building.
+
+ The target version defaults to the version in sysconfig latched at time
+ the Python interpreter was built, unless overridden by an environment
+ variable. If neither source has a value, then None is returned"""
+
+ syscfg_ver = get_macosx_target_ver_from_syscfg()
+ env_ver = os.environ.get(MACOSX_VERSION_VAR)
+
+ if env_ver:
+ # Validate overridden version against sysconfig version, if have both.
+ # Ensure that the deployment target of the build process is not less
+ # than 10.3 if the interpreter was built for 10.3 or later. This
+ # ensures extension modules are built with correct compatibility
+ # values, specifically LDSHARED which can use
+ # '-undefined dynamic_lookup' which only works on >= 10.3.
+ if syscfg_ver and split_version(syscfg_ver) >= [10, 3] and \
+ split_version(env_ver) < [10, 3]:
+ my_msg = ('$' + MACOSX_VERSION_VAR + ' mismatch: '
+ 'now "%s" but "%s" during configure; '
+ 'must use 10.3 or later'
+ % (env_ver, syscfg_ver))
+ raise DistutilsPlatformError(my_msg)
+ return env_ver
+ return syscfg_ver
+
+
+def split_version(s):
+ """Convert a dot-separated string into a list of numbers for comparisons"""
+ return [int(n) for n in s.split('.')]
+
+
+def convert_path (pathname):
+ """Return 'pathname' as a name that will work on the native filesystem,
+ i.e. split it on '/' and put it back together again using the current
+ directory separator. Needed because filenames in the setup script are
+ always supplied in Unix style, and have to be converted to the local
+ convention before we can actually use them in the filesystem. Raises
+ ValueError on non-Unix-ish systems if 'pathname' either starts or
+ ends with a slash.
+ """
+ if os.sep == '/':
+ return pathname
+ if not pathname:
+ return pathname
+ if pathname[0] == '/':
+ raise ValueError("path '%s' cannot be absolute" % pathname)
+ if pathname[-1] == '/':
+ raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+ paths = pathname.split('/')
+ while '.' in paths:
+ paths.remove('.')
+ if not paths:
+ return os.curdir
+ return os.path.join(*paths)
+
+# convert_path ()
+
+
+def change_root (new_root, pathname):
+ """Return 'pathname' with 'new_root' prepended. If 'pathname' is
+ relative, this is equivalent to "os.path.join(new_root,pathname)".
+ Otherwise, it requires making 'pathname' relative and then joining the
+ two, which is tricky on DOS/Windows and Mac OS.
+ """
+ if os.name == 'posix':
+ if not os.path.isabs(pathname):
+ return os.path.join(new_root, pathname)
+ else:
+ return os.path.join(new_root, pathname[1:])
+
+ elif os.name == 'nt':
+ (drive, path) = os.path.splitdrive(pathname)
+ if path[0] == '\\':
+ path = path[1:]
+ return os.path.join(new_root, path)
+
+ else:
+ raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
+
+
+_environ_checked = 0
+def check_environ ():
+ """Ensure that 'os.environ' has all the environment variables we
+ guarantee that users can use in config files, command-line options,
+ etc. Currently this includes:
+ HOME - user's home directory (Unix only)
+ PLAT - description of the current platform, including hardware
+ and OS (see 'get_platform()')
+ """
+ global _environ_checked
+ if _environ_checked:
+ return
+
+ if os.name == 'posix' and 'HOME' not in os.environ:
+ try:
+ import pwd
+ os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+ except (ImportError, KeyError):
+ # bpo-10496: if the current user identifier doesn't exist in the
+ # password database, do nothing
+ pass
+
+ if 'PLAT' not in os.environ:
+ os.environ['PLAT'] = get_platform()
+
+ _environ_checked = 1
+
+
+def subst_vars (s, local_vars):
+ """
+ Perform variable substitution on 'string'.
+ Variables are indicated by format-style braces ("{var}").
+ Variable is substituted by the value found in the 'local_vars'
+ dictionary or in 'os.environ' if it's not in 'local_vars'.
+ 'os.environ' is first checked/augmented to guarantee that it contains
+ certain values: see 'check_environ()'. Raise ValueError for any
+ variables not found in either 'local_vars' or 'os.environ'.
+ """
+ check_environ()
+ lookup = dict(os.environ)
+ lookup.update((name, str(value)) for name, value in local_vars.items())
+ try:
+ return _subst_compat(s).format_map(lookup)
+ except KeyError as var:
+ raise ValueError(f"invalid variable {var}")
+
+# subst_vars ()
+
+
+def _subst_compat(s):
+ """
+ Replace shell/Perl-style variable substitution with
+ format-style. For compatibility.
+ """
+ def _subst(match):
+ return f'{{{match.group(1)}}}'
+ repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
+ if repl != s:
+ import warnings
+ warnings.warn(
+ "shell/Perl-style substitions are deprecated",
+ DeprecationWarning,
+ )
+ return repl
+
+
+def grok_environment_error (exc, prefix="error: "):
+ # Function kept for backward compatibility.
+ # Used to try clever things with EnvironmentErrors,
+ # but nowadays str(exception) produces good messages.
+ return prefix + str(exc)
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+def _init_regex():
+ global _wordchars_re, _squote_re, _dquote_re
+ _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+ _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+ _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+def split_quoted (s):
+ """Split a string up according to Unix shell-like rules for quotes and
+ backslashes. In short: words are delimited by spaces, as long as those
+ spaces are not escaped by a backslash, or inside a quoted string.
+ Single and double quotes are equivalent, and the quote characters can
+ be backslash-escaped. The backslash is stripped from any two-character
+ escape sequence, leaving only the escaped character. The quote
+ characters are stripped from any quoted string. Returns a list of
+ words.
+ """
+
+ # This is a nice algorithm for splitting up a single string, since it
+ # doesn't require character-by-character examination. It was a little
+ # bit of a brain-bender to get it working right, though...
+ if _wordchars_re is None: _init_regex()
+
+ s = s.strip()
+ words = []
+ pos = 0
+
+ while s:
+ m = _wordchars_re.match(s, pos)
+ end = m.end()
+ if end == len(s):
+ words.append(s[:end])
+ break
+
+ if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
+ words.append(s[:end]) # we definitely have a word delimiter
+ s = s[end:].lstrip()
+ pos = 0
+
+ elif s[end] == '\\': # preserve whatever is being escaped;
+ # will become part of the current word
+ s = s[:end] + s[end+1:]
+ pos = end+1
+
+ else:
+ if s[end] == "'": # slurp singly-quoted string
+ m = _squote_re.match(s, end)
+ elif s[end] == '"': # slurp doubly-quoted string
+ m = _dquote_re.match(s, end)
+ else:
+ raise RuntimeError("this can't happen (bad char '%c')" % s[end])
+
+ if m is None:
+ raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+
+ (beg, end) = m.span()
+ s = s[:beg] + s[beg+1:end-1] + s[end:]
+ pos = m.end() - 2
+
+ if pos >= len(s):
+ words.append(s)
+ break
+
+ return words
+
+# split_quoted ()
+
+
+def execute (func, args, msg=None, verbose=0, dry_run=0):
+ """Perform some action that affects the outside world (eg. by
+ writing to the filesystem). Such actions are special because they
+ are disabled by the 'dry_run' flag. This method takes care of all
+ that bureaucracy for you; all you have to do is supply the
+ function to call and an argument tuple for it (to embody the
+ "external action" being performed), and an optional message to
+ print.
+ """
+ if msg is None:
+ msg = "%s%r" % (func.__name__, args)
+ if msg[-2:] == ',)': # correct for singleton tuple
+ msg = msg[0:-2] + ')'
+
+ log.info(msg)
+ if not dry_run:
+ func(*args)
+
+
+def strtobool (val):
+ """Convert a string representation of truth to true (1) or false (0).
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+ """
+ val = val.lower()
+ if val in ('y', 'yes', 't', 'true', 'on', '1'):
+ return 1
+ elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+ return 0
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
+
+
+def byte_compile (py_files,
+ optimize=0, force=0,
+ prefix=None, base_dir=None,
+ verbose=1, dry_run=0,
+ direct=None):
+ """Byte-compile a collection of Python source files to .pyc
+ files in a __pycache__ subdirectory. 'py_files' is a list
+ of files to compile; any files that don't end in ".py" are silently
+ skipped. 'optimize' must be one of the following:
+ 0 - don't optimize
+ 1 - normal optimization (like "python -O")
+ 2 - extra optimization (like "python -OO")
+ If 'force' is true, all files are recompiled regardless of
+ timestamps.
+
+ The source filename encoded in each bytecode file defaults to the
+ filenames listed in 'py_files'; you can modify these with 'prefix' and
+ 'basedir'. 'prefix' is a string that will be stripped off of each
+ source filename, and 'base_dir' is a directory name that will be
+ prepended (after 'prefix' is stripped). You can supply either or both
+ (or neither) of 'prefix' and 'base_dir', as you wish.
+
+ If 'dry_run' is true, doesn't actually do anything that would
+ affect the filesystem.
+
+ Byte-compilation is either done directly in this interpreter process
+ with the standard py_compile module, or indirectly by writing a
+ temporary script and executing it. Normally, you should let
+ 'byte_compile()' figure out to use direct compilation or not (see
+ the source for details). The 'direct' flag is used by the script
+ generated in indirect mode; unless you know what you're doing, leave
+ it set to None.
+ """
+
+ # Late import to fix a bootstrap issue: _posixsubprocess is built by
+ # setup.py, but setup.py uses distutils.
+ import subprocess
+
+ # nothing is done if sys.dont_write_bytecode is True
+ if sys.dont_write_bytecode:
+ raise DistutilsByteCompileError('byte-compiling is disabled.')
+
+ # First, if the caller didn't force us into direct or indirect mode,
+ # figure out which mode we should be in. We take a conservative
+ # approach: choose direct mode *only* if the current interpreter is
+ # in debug mode and optimize is 0. If we're not in debug mode (-O
+ # or -OO), we don't know which level of optimization this
+ # interpreter is running with, so we can't do direct
+ # byte-compilation and be certain that it's the right thing. Thus,
+ # always compile indirectly if the current interpreter is in either
+ # optimize mode, or if either optimization level was requested by
+ # the caller.
+ if direct is None:
+ direct = (__debug__ and optimize == 0)
+
+ # "Indirect" byte-compilation: write a temporary script and then
+ # run it with the appropriate flags.
+ if not direct:
+ try:
+ from tempfile import mkstemp
+ (script_fd, script_name) = mkstemp(".py")
+ except ImportError:
+ from tempfile import mktemp
+ (script_fd, script_name) = None, mktemp(".py")
+ log.info("writing byte-compilation script '%s'", script_name)
+ if not dry_run:
+ if script_fd is not None:
+ script = os.fdopen(script_fd, "w")
+ else:
+ script = open(script_name, "w")
+
+ with script:
+ script.write("""\
+from distutils.util import byte_compile
+files = [
+""")
+
+ # XXX would be nice to write absolute filenames, just for
+ # safety's sake (script should be more robust in the face of
+ # chdir'ing before running it). But this requires abspath'ing
+ # 'prefix' as well, and that breaks the hack in build_lib's
+ # 'byte_compile()' method that carefully tacks on a trailing
+ # slash (os.sep really) to make sure the prefix here is "just
+ # right". This whole prefix business is rather delicate -- the
+ # problem is that it's really a directory, but I'm treating it
+ # as a dumb string, so trailing slashes and so forth matter.
+
+ #py_files = map(os.path.abspath, py_files)
+ #if prefix:
+ # prefix = os.path.abspath(prefix)
+
+ script.write(",\n".join(map(repr, py_files)) + "]\n")
+ script.write("""
+byte_compile(files, optimize=%r, force=%r,
+ prefix=%r, base_dir=%r,
+ verbose=%r, dry_run=0,
+ direct=1)
+""" % (optimize, force, prefix, base_dir, verbose))
+
+ cmd = [sys.executable]
+ cmd.extend(_optim_args_from_interpreter_flags())
+ cmd.append(script_name)
+ spawn(cmd, dry_run=dry_run)
+ execute(os.remove, (script_name,), "removing %s" % script_name,
+ dry_run=dry_run)
+
+ # "Direct" byte-compilation: use the py_compile module to compile
+ # right here, right now. Note that the script generated in indirect
+ # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+ # cross-process recursion. Hey, it works!
+ else:
+ from py_compile import compile
+
+ for file in py_files:
+ if file[-3:] != ".py":
+ # This lets us be lazy and not filter filenames in
+ # the "install_lib" command.
+ continue
+
+ # Terminology from the py_compile module:
+ # cfile - byte-compiled file
+ # dfile - purported source filename (same as 'file' by default)
+ if optimize >= 0:
+ opt = '' if optimize == 0 else optimize
+ cfile = importlib.util.cache_from_source(
+ file, optimization=opt)
+ else:
+ cfile = importlib.util.cache_from_source(file)
+ dfile = file
+ if prefix:
+ if file[:len(prefix)] != prefix:
+ raise ValueError("invalid prefix: filename %r doesn't start with %r"
+ % (file, prefix))
+ dfile = dfile[len(prefix):]
+ if base_dir:
+ dfile = os.path.join(base_dir, dfile)
+
+ cfile_base = os.path.basename(cfile)
+ if direct:
+ if force or newer(file, cfile):
+ log.info("byte-compiling %s to %s", file, cfile_base)
+ if not dry_run:
+ compile(file, cfile, dfile)
+ else:
+ log.debug("skipping byte-compilation of %s to %s",
+ file, cfile_base)
+
+# byte_compile ()
+
+def rfc822_escape (header):
+ """Return a version of the string escaped for inclusion in an
+ RFC-822 header, by ensuring there are 8 spaces space after each newline.
+ """
+ lines = header.split('\n')
+ sep = '\n' + 8 * ' '
+ return sep.join(lines)
diff --git a/setuptools/_distutils/version.py b/setuptools/_distutils/version.py
new file mode 100644
index 0000000..35e181d
--- /dev/null
+++ b/setuptools/_distutils/version.py
@@ -0,0 +1,363 @@
+#
+# distutils/version.py
+#
+# Implements multiple version numbering conventions for the
+# Python Module Distribution Utilities.
+#
+# $Id$
+#
+
+"""Provides classes to represent module version numbers (one class for
+each style of version numbering). There are currently two such classes
+implemented: StrictVersion and LooseVersion.
+
+Every version number class implements the following interface:
+ * the 'parse' method takes a string and parses it to some internal
+ representation; if the string is an invalid version number,
+ 'parse' raises a ValueError exception
+ * the class constructor takes an optional string argument which,
+ if supplied, is passed to 'parse'
+ * __str__ reconstructs the string that was passed to 'parse' (or
+ an equivalent string -- ie. one that will generate an equivalent
+ version number instance)
+ * __repr__ generates Python code to recreate the version number instance
+ * _cmp compares the current instance with either another instance
+ of the same class or a string (which will be parsed to an instance
+ of the same class, thus must follow the same rules)
+"""
+
+import re
+import warnings
+import contextlib
+
+
+@contextlib.contextmanager
+def suppress_known_deprecation():
+ with warnings.catch_warnings(record=True) as ctx:
+ warnings.filterwarnings(
+ action='default',
+ category=DeprecationWarning,
+ message="distutils Version classes are deprecated.",
+ )
+ yield ctx
+
+
+class Version:
+ """Abstract base class for version numbering classes. Just provides
+ constructor (__init__) and reproducer (__repr__), because those
+ seem to be the same for all version numbering classes; and route
+ rich comparisons to _cmp.
+ """
+
+ def __init__ (self, vstring=None):
+ warnings.warn(
+ "distutils Version classes are deprecated. "
+ "Use packaging.version instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if vstring:
+ self.parse(vstring)
+
+ def __repr__ (self):
+ return "%s ('%s')" % (self.__class__.__name__, str(self))
+
+ def __eq__(self, other):
+ c = self._cmp(other)
+ if c is NotImplemented:
+ return c
+ return c == 0
+
+ def __lt__(self, other):
+ c = self._cmp(other)
+ if c is NotImplemented:
+ return c
+ return c < 0
+
+ def __le__(self, other):
+ c = self._cmp(other)
+ if c is NotImplemented:
+ return c
+ return c <= 0
+
+ def __gt__(self, other):
+ c = self._cmp(other)
+ if c is NotImplemented:
+ return c
+ return c > 0
+
+ def __ge__(self, other):
+ c = self._cmp(other)
+ if c is NotImplemented:
+ return c
+ return c >= 0
+
+
+# Interface for version-number classes -- must be implemented
+# by the following classes (the concrete ones -- Version should
+# be treated as an abstract class).
+# __init__ (string) - create and take same action as 'parse'
+# (string parameter is optional)
+# parse (string) - convert a string representation to whatever
+# internal representation is appropriate for
+# this style of version numbering
+# __str__ (self) - convert back to a string; should be very similar
+# (if not identical to) the string supplied to parse
+# __repr__ (self) - generate Python code to recreate
+# the instance
+# _cmp (self, other) - compare two version numbers ('other' may
+# be an unparsed version string, or another
+# instance of your version class)
+
+
+class StrictVersion (Version):
+
+ """Version numbering for anal retentives and software idealists.
+ Implements the standard interface for version number classes as
+ described above. A version number consists of two or three
+ dot-separated numeric components, with an optional "pre-release" tag
+ on the end. The pre-release tag consists of the letter 'a' or 'b'
+ followed by a number. If the numeric components of two version
+ numbers are equal, then one with a pre-release tag will always
+ be deemed earlier (lesser) than one without.
+
+ The following are valid version numbers (shown in the order that
+ would be obtained by sorting according to the supplied cmp function):
+
+ 0.4 0.4.0 (these two are equivalent)
+ 0.4.1
+ 0.5a1
+ 0.5b3
+ 0.5
+ 0.9.6
+ 1.0
+ 1.0.4a3
+ 1.0.4b1
+ 1.0.4
+
+ The following are examples of invalid version numbers:
+
+ 1
+ 2.7.2.2
+ 1.3.a4
+ 1.3pl1
+ 1.3c4
+
+ The rationale for this version numbering system will be explained
+ in the distutils documentation.
+ """
+
+ version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
+ re.VERBOSE | re.ASCII)
+
+
+ def parse (self, vstring):
+ match = self.version_re.match(vstring)
+ if not match:
+ raise ValueError("invalid version number '%s'" % vstring)
+
+ (major, minor, patch, prerelease, prerelease_num) = \
+ match.group(1, 2, 4, 5, 6)
+
+ if patch:
+ self.version = tuple(map(int, [major, minor, patch]))
+ else:
+ self.version = tuple(map(int, [major, minor])) + (0,)
+
+ if prerelease:
+ self.prerelease = (prerelease[0], int(prerelease_num))
+ else:
+ self.prerelease = None
+
+
+ def __str__ (self):
+
+ if self.version[2] == 0:
+ vstring = '.'.join(map(str, self.version[0:2]))
+ else:
+ vstring = '.'.join(map(str, self.version))
+
+ if self.prerelease:
+ vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
+
+ return vstring
+
+
+ def _cmp (self, other):
+ if isinstance(other, str):
+ with suppress_known_deprecation():
+ other = StrictVersion(other)
+ elif not isinstance(other, StrictVersion):
+ return NotImplemented
+
+ if self.version != other.version:
+ # numeric versions don't match
+ # prerelease stuff doesn't matter
+ if self.version < other.version:
+ return -1
+ else:
+ return 1
+
+ # have to compare prerelease
+ # case 1: neither has prerelease; they're equal
+ # case 2: self has prerelease, other doesn't; other is greater
+ # case 3: self doesn't have prerelease, other does: self is greater
+ # case 4: both have prerelease: must compare them!
+
+ if (not self.prerelease and not other.prerelease):
+ return 0
+ elif (self.prerelease and not other.prerelease):
+ return -1
+ elif (not self.prerelease and other.prerelease):
+ return 1
+ elif (self.prerelease and other.prerelease):
+ if self.prerelease == other.prerelease:
+ return 0
+ elif self.prerelease < other.prerelease:
+ return -1
+ else:
+ return 1
+ else:
+ assert False, "never get here"
+
+# end class StrictVersion
+
+
+# The rules according to Greg Stein:
+# 1) a version number has 1 or more numbers separated by a period or by
+# sequences of letters. If only periods, then these are compared
+# left-to-right to determine an ordering.
+# 2) sequences of letters are part of the tuple for comparison and are
+# compared lexicographically
+# 3) recognize the numeric components may have leading zeroes
+#
+# The LooseVersion class below implements these rules: a version number
+# string is split up into a tuple of integer and string components, and
+# comparison is a simple tuple comparison. This means that version
+# numbers behave in a predictable and obvious way, but a way that might
+# not necessarily be how people *want* version numbers to behave. There
+# wouldn't be a problem if people could stick to purely numeric version
+# numbers: just split on period and compare the numbers as tuples.
+# However, people insist on putting letters into their version numbers;
+# the most common purpose seems to be:
+# - indicating a "pre-release" version
+# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
+# - indicating a post-release patch ('p', 'pl', 'patch')
+# but of course this can't cover all version number schemes, and there's
+# no way to know what a programmer means without asking him.
+#
+# The problem is what to do with letters (and other non-numeric
+# characters) in a version number. The current implementation does the
+# obvious and predictable thing: keep them as strings and compare
+# lexically within a tuple comparison. This has the desired effect if
+# an appended letter sequence implies something "post-release":
+# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
+#
+# However, if letters in a version number imply a pre-release version,
+# the "obvious" thing isn't correct. Eg. you would expect that
+# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
+# implemented here, this just isn't so.
+#
+# Two possible solutions come to mind. The first is to tie the
+# comparison algorithm to a particular set of semantic rules, as has
+# been done in the StrictVersion class above. This works great as long
+# as everyone can go along with bondage and discipline. Hopefully a
+# (large) subset of Python module programmers will agree that the
+# particular flavour of bondage and discipline provided by StrictVersion
+# provides enough benefit to be worth using, and will submit their
+# version numbering scheme to its domination. The free-thinking
+# anarchists in the lot will never give in, though, and something needs
+# to be done to accommodate them.
+#
+# Perhaps a "moderately strict" version class could be implemented that
+# lets almost anything slide (syntactically), and makes some heuristic
+# assumptions about non-digits in version number strings. This could
+# sink into special-case-hell, though; if I was as talented and
+# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
+# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
+# just as happy dealing with things like "2g6" and "1.13++". I don't
+# think I'm smart enough to do it right though.
+#
+# In any case, I've coded the test suite for this module (see
+# ../test/test_version.py) specifically to fail on things like comparing
+# "1.2a2" and "1.2". That's not because the *code* is doing anything
+# wrong, it's because the simple, obvious design doesn't match my
+# complicated, hairy expectations for real-world version numbers. It
+# would be a snap to fix the test suite to say, "Yep, LooseVersion does
+# the Right Thing" (ie. the code matches the conception). But I'd rather
+# have a conception that matches common notions about version numbers.
+
+class LooseVersion (Version):
+
+ """Version numbering for anarchists and software realists.
+ Implements the standard interface for version number classes as
+ described above. A version number consists of a series of numbers,
+ separated by either periods or strings of letters. When comparing
+ version numbers, the numeric components will be compared
+ numerically, and the alphabetic components lexically. The following
+ are all valid version numbers, in no particular order:
+
+ 1.5.1
+ 1.5.2b2
+ 161
+ 3.10a
+ 8.02
+ 3.4j
+ 1996.07.12
+ 3.2.pl0
+ 3.1.1.6
+ 2g6
+ 11g
+ 0.960923
+ 2.2beta29
+ 1.13++
+ 5.5.kw
+ 2.0b1pl0
+
+ In fact, there is no such thing as an invalid version number under
+ this scheme; the rules for comparison are simple and predictable,
+ but may not always give the results you want (for some definition
+ of "want").
+ """
+
+ component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
+
+ def parse (self, vstring):
+ # I've given up on thinking I can reconstruct the version string
+ # from the parsed tuple -- so I just store the string here for
+ # use by __str__
+ self.vstring = vstring
+ components = [x for x in self.component_re.split(vstring)
+ if x and x != '.']
+ for i, obj in enumerate(components):
+ try:
+ components[i] = int(obj)
+ except ValueError:
+ pass
+
+ self.version = components
+
+
+ def __str__ (self):
+ return self.vstring
+
+
+ def __repr__ (self):
+ return "LooseVersion ('%s')" % str(self)
+
+
+ def _cmp (self, other):
+ if isinstance(other, str):
+ other = LooseVersion(other)
+ elif not isinstance(other, LooseVersion):
+ return NotImplemented
+
+ if self.version == other.version:
+ return 0
+ if self.version < other.version:
+ return -1
+ if self.version > other.version:
+ return 1
+
+
+# end class LooseVersion
diff --git a/setuptools/_distutils/versionpredicate.py b/setuptools/_distutils/versionpredicate.py
new file mode 100644
index 0000000..55f25d9
--- /dev/null
+++ b/setuptools/_distutils/versionpredicate.py
@@ -0,0 +1,169 @@
+"""Module for parsing and testing package version predicate strings.
+"""
+import re
+import distutils.version
+import operator
+
+
+re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
+ re.ASCII)
+# (package) (rest)
+
+re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
+re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
+# (comp) (version)
+
+
+def splitUp(pred):
+ """Parse a single version comparison.
+
+ Return (comparison string, StrictVersion)
+ """
+ res = re_splitComparison.match(pred)
+ if not res:
+ raise ValueError("bad package restriction syntax: %r" % pred)
+ comp, verStr = res.groups()
+ with distutils.version.suppress_known_deprecation():
+ other = distutils.version.StrictVersion(verStr)
+ return (comp, other)
+
+compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
+ ">": operator.gt, ">=": operator.ge, "!=": operator.ne}
+
+class VersionPredicate:
+ """Parse and test package version predicates.
+
+ >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
+
+ The `name` attribute provides the full dotted name that is given::
+
+ >>> v.name
+ 'pyepat.abc'
+
+ The str() of a `VersionPredicate` provides a normalized
+ human-readable version of the expression::
+
+ >>> print(v)
+ pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
+
+ The `satisfied_by()` method can be used to determine with a given
+ version number is included in the set described by the version
+ restrictions::
+
+ >>> v.satisfied_by('1.1')
+ True
+ >>> v.satisfied_by('1.4')
+ True
+ >>> v.satisfied_by('1.0')
+ False
+ >>> v.satisfied_by('4444.4')
+ False
+ >>> v.satisfied_by('1555.1b3')
+ False
+
+ `VersionPredicate` is flexible in accepting extra whitespace::
+
+ >>> v = VersionPredicate(' pat( == 0.1 ) ')
+ >>> v.name
+ 'pat'
+ >>> v.satisfied_by('0.1')
+ True
+ >>> v.satisfied_by('0.2')
+ False
+
+ If any version numbers passed in do not conform to the
+ restrictions of `StrictVersion`, a `ValueError` is raised::
+
+ >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
+ Traceback (most recent call last):
+ ...
+ ValueError: invalid version number '1.2zb3'
+
+ It the module or package name given does not conform to what's
+ allowed as a legal module or package name, `ValueError` is
+ raised::
+
+ >>> v = VersionPredicate('foo-bar')
+ Traceback (most recent call last):
+ ...
+ ValueError: expected parenthesized list: '-bar'
+
+ >>> v = VersionPredicate('foo bar (12.21)')
+ Traceback (most recent call last):
+ ...
+ ValueError: expected parenthesized list: 'bar (12.21)'
+
+ """
+
+ def __init__(self, versionPredicateStr):
+ """Parse a version predicate string.
+ """
+ # Fields:
+ # name: package name
+ # pred: list of (comparison string, StrictVersion)
+
+ versionPredicateStr = versionPredicateStr.strip()
+ if not versionPredicateStr:
+ raise ValueError("empty package restriction")
+ match = re_validPackage.match(versionPredicateStr)
+ if not match:
+ raise ValueError("bad package name in %r" % versionPredicateStr)
+ self.name, paren = match.groups()
+ paren = paren.strip()
+ if paren:
+ match = re_paren.match(paren)
+ if not match:
+ raise ValueError("expected parenthesized list: %r" % paren)
+ str = match.groups()[0]
+ self.pred = [splitUp(aPred) for aPred in str.split(",")]
+ if not self.pred:
+ raise ValueError("empty parenthesized list in %r"
+ % versionPredicateStr)
+ else:
+ self.pred = []
+
+ def __str__(self):
+ if self.pred:
+ seq = [cond + " " + str(ver) for cond, ver in self.pred]
+ return self.name + " (" + ", ".join(seq) + ")"
+ else:
+ return self.name
+
+ def satisfied_by(self, version):
+ """True if version is compatible with all the predicates in self.
+ The parameter version must be acceptable to the StrictVersion
+ constructor. It may be either a string or StrictVersion.
+ """
+ for cond, ver in self.pred:
+ if not compmap[cond](version, ver):
+ return False
+ return True
+
+
+_provision_rx = None
+
+def split_provision(value):
+ """Return the name and optional version number of a provision.
+
+ The version number, if given, will be returned as a `StrictVersion`
+ instance, otherwise it will be `None`.
+
+ >>> split_provision('mypkg')
+ ('mypkg', None)
+ >>> split_provision(' mypkg( 1.2 ) ')
+ ('mypkg', StrictVersion ('1.2'))
+ """
+ global _provision_rx
+ if _provision_rx is None:
+ _provision_rx = re.compile(
+ r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
+ re.ASCII)
+ value = value.strip()
+ m = _provision_rx.match(value)
+ if not m:
+ raise ValueError("illegal provides specification: %r" % value)
+ ver = m.group(2) or None
+ if ver:
+ with distutils.version.suppress_known_deprecation():
+ ver = distutils.version.StrictVersion(ver)
+ return m.group(1), ver
diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py
new file mode 100644
index 0000000..f087681
--- /dev/null
+++ b/setuptools/_entry_points.py
@@ -0,0 +1,86 @@
+import functools
+import operator
+import itertools
+
+from .extern.jaraco.text import yield_lines
+from .extern.jaraco.functools import pass_none
+from ._importlib import metadata
+from ._itertools import ensure_unique
+from .extern.more_itertools import consume
+
+
+def ensure_valid(ep):
+ """
+ Exercise one of the dynamic properties to trigger
+ the pattern match.
+ """
+ ep.extras
+
+
+def load_group(value, group):
+ """
+ Given a value of an entry point or series of entry points,
+ return each as an EntryPoint.
+ """
+ # normalize to a single sequence of lines
+ lines = yield_lines(value)
+ text = f'[{group}]\n' + '\n'.join(lines)
+ return metadata.EntryPoints._from_text(text)
+
+
+def by_group_and_name(ep):
+ return ep.group, ep.name
+
+
+def validate(eps: metadata.EntryPoints):
+ """
+ Ensure entry points are unique by group and name and validate each.
+ """
+ consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name)))
+ return eps
+
+
+@functools.singledispatch
+def load(eps):
+ """
+ Given a Distribution.entry_points, produce EntryPoints.
+ """
+ groups = itertools.chain.from_iterable(
+ load_group(value, group)
+ for group, value in eps.items())
+ return validate(metadata.EntryPoints(groups))
+
+
+@load.register(str)
+def _(eps):
+ r"""
+ >>> ep, = load('[console_scripts]\nfoo=bar')
+ >>> ep.group
+ 'console_scripts'
+ >>> ep.name
+ 'foo'
+ >>> ep.value
+ 'bar'
+ """
+ return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps)))
+
+
+load.register(type(None), lambda x: x)
+
+
+@pass_none
+def render(eps: metadata.EntryPoints):
+ by_group = operator.attrgetter('group')
+ groups = itertools.groupby(sorted(eps, key=by_group), by_group)
+
+ return '\n'.join(
+ f'[{group}]\n{render_items(items)}\n'
+ for group, items in groups
+ )
+
+
+def render_items(eps):
+ return '\n'.join(
+ f'{ep.name} = {ep.value}'
+ for ep in sorted(eps)
+ )
diff --git a/setuptools/_imp.py b/setuptools/_imp.py
new file mode 100644
index 0000000..47efd79
--- /dev/null
+++ b/setuptools/_imp.py
@@ -0,0 +1,82 @@
+"""
+Re-implementation of find_module and get_frozen_object
+from the deprecated imp module.
+"""
+
+import os
+import importlib.util
+import importlib.machinery
+
+from .py34compat import module_from_spec
+
+
+PY_SOURCE = 1
+PY_COMPILED = 2
+C_EXTENSION = 3
+C_BUILTIN = 6
+PY_FROZEN = 7
+
+
+def find_spec(module, paths):
+ finder = (
+ importlib.machinery.PathFinder().find_spec
+ if isinstance(paths, list) else
+ importlib.util.find_spec
+ )
+ return finder(module, paths)
+
+
+def find_module(module, paths=None):
+ """Just like 'imp.find_module()', but with package support"""
+ spec = find_spec(module, paths)
+ if spec is None:
+ raise ImportError("Can't find %s" % module)
+ if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
+ spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
+
+ kind = -1
+ file = None
+ static = isinstance(spec.loader, type)
+ if spec.origin == 'frozen' or static and issubclass(
+ spec.loader, importlib.machinery.FrozenImporter):
+ kind = PY_FROZEN
+ path = None # imp compabilty
+ suffix = mode = '' # imp compatibility
+ elif spec.origin == 'built-in' or static and issubclass(
+ spec.loader, importlib.machinery.BuiltinImporter):
+ kind = C_BUILTIN
+ path = None # imp compabilty
+ suffix = mode = '' # imp compatibility
+ elif spec.has_location:
+ path = spec.origin
+ suffix = os.path.splitext(path)[1]
+ mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
+
+ if suffix in importlib.machinery.SOURCE_SUFFIXES:
+ kind = PY_SOURCE
+ elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
+ kind = PY_COMPILED
+ elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
+ kind = C_EXTENSION
+
+ if kind in {PY_SOURCE, PY_COMPILED}:
+ file = open(path, mode)
+ else:
+ path = None
+ suffix = mode = ''
+
+ return file, path, (suffix, mode, kind)
+
+
+def get_frozen_object(module, paths=None):
+ spec = find_spec(module, paths)
+ if not spec:
+ raise ImportError("Can't find %s" % module)
+ return spec.loader.get_code(module)
+
+
+def get_module(module, paths, info):
+ spec = find_spec(module, paths)
+ if not spec:
+ raise ImportError("Can't find %s" % module)
+ return module_from_spec(spec)
diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
new file mode 100644
index 0000000..c1ac137
--- /dev/null
+++ b/setuptools/_importlib.py
@@ -0,0 +1,36 @@
+import sys
+
+
+def disable_importlib_metadata_finder(metadata):
+ """
+ Ensure importlib_metadata doesn't provide older, incompatible
+ Distributions.
+
+ Workaround for #3102.
+ """
+ try:
+ import importlib_metadata
+ except ImportError:
+ return
+ if importlib_metadata is metadata:
+ return
+ to_remove = [
+ ob
+ for ob in sys.meta_path
+ if isinstance(ob, importlib_metadata.MetadataPathFinder)
+ ]
+ for item in to_remove:
+ sys.meta_path.remove(item)
+
+
+if sys.version_info < (3, 10):
+ from setuptools.extern import importlib_metadata as metadata
+ disable_importlib_metadata_finder(metadata)
+else:
+ import importlib.metadata as metadata # noqa: F401
+
+
+if sys.version_info < (3, 9):
+ from setuptools.extern import importlib_resources as resources
+else:
+ import importlib.resources as resources # noqa: F401
diff --git a/setuptools/_itertools.py b/setuptools/_itertools.py
new file mode 100644
index 0000000..b8bf6d2
--- /dev/null
+++ b/setuptools/_itertools.py
@@ -0,0 +1,23 @@
+from setuptools.extern.more_itertools import consume # noqa: F401
+
+
+# copied from jaraco.itertools 6.1
+def ensure_unique(iterable, key=lambda x: x):
+ """
+ Wrap an iterable to raise a ValueError if non-unique values are encountered.
+
+ >>> list(ensure_unique('abc'))
+ ['a', 'b', 'c']
+ >>> consume(ensure_unique('abca'))
+ Traceback (most recent call last):
+ ...
+ ValueError: Duplicate element 'a' encountered.
+ """
+ seen = set()
+ seen_add = seen.add
+ for element in iterable:
+ k = key(element)
+ if k in seen:
+ raise ValueError(f"Duplicate element {element!r} encountered.")
+ seen_add(k)
+ yield element
diff --git a/setuptools/_path.py b/setuptools/_path.py
new file mode 100644
index 0000000..ede9cb0
--- /dev/null
+++ b/setuptools/_path.py
@@ -0,0 +1,7 @@
+import os
+
+
+def ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ os.makedirs(dirname, exist_ok=True)
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
new file mode 100644
index 0000000..ca72417
--- /dev/null
+++ b/setuptools/_reqs.py
@@ -0,0 +1,19 @@
+import setuptools.extern.jaraco.text as text
+
+from pkg_resources import Requirement
+
+
+def parse_strings(strs):
+ """
+ Yield requirement strings for each specification in `strs`.
+
+ `strs` must be a string, or a (possibly-nested) iterable thereof.
+ """
+ return text.join_continuation(map(text.drop_comment, text.yield_lines(strs)))
+
+
+def parse(strs):
+ """
+ Deprecated drop-in replacement for pkg_resources.parse_requirements.
+ """
+ return map(Requirement, parse_strings(strs))
diff --git a/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc b/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index f86f802..0000000
--- a/setuptools/_vendor/__pycache__/__init__.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/__pycache__/six.cpython-36.pyc b/setuptools/_vendor/__pycache__/six.cpython-36.pyc
deleted file mode 100644
index afda210..0000000
--- a/setuptools/_vendor/__pycache__/six.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/_validate_pyproject/__init__.py b/setuptools/_vendor/_validate_pyproject/__init__.py
new file mode 100644
index 0000000..dbe6cb4
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/__init__.py
@@ -0,0 +1,34 @@
+from functools import reduce
+from typing import Any, Callable, Dict
+
+from . import formats
+from .error_reporting import detailed_errors, ValidationError
+from .extra_validations import EXTRA_VALIDATIONS
+from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException
+from .fastjsonschema_validations import validate as _validate
+
+__all__ = [
+ "validate",
+ "FORMAT_FUNCTIONS",
+ "EXTRA_VALIDATIONS",
+ "ValidationError",
+ "JsonSchemaException",
+ "JsonSchemaValueException",
+]
+
+
+FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = {
+ fn.__name__.replace("_", "-"): fn
+ for fn in formats.__dict__.values()
+ if callable(fn) and not fn.__name__.startswith("_")
+}
+
+
+def validate(data: Any) -> bool:
+ """Validate the given ``data`` object using JSON Schema
+ This function raises ``ValidationError`` if ``data`` is invalid.
+ """
+ with detailed_errors():
+ _validate(data, custom_formats=FORMAT_FUNCTIONS)
+ reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
+ return True
diff --git a/setuptools/_vendor/_validate_pyproject/error_reporting.py b/setuptools/_vendor/_validate_pyproject/error_reporting.py
new file mode 100644
index 0000000..3a4d4e9
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/error_reporting.py
@@ -0,0 +1,318 @@
+import io
+import json
+import logging
+import os
+import re
+from contextlib import contextmanager
+from textwrap import indent, wrap
+from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast
+
+from .fastjsonschema_exceptions import JsonSchemaValueException
+
+_logger = logging.getLogger(__name__)
+
+_MESSAGE_REPLACEMENTS = {
+ "must be named by propertyName definition": "keys must be named by",
+ "one of contains definition": "at least one item that matches",
+ " same as const definition:": "",
+ "only specified items": "only items matching the definition",
+}
+
+_SKIP_DETAILS = (
+ "must not be empty",
+ "is always invalid",
+ "must not be there",
+)
+
+_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"}
+
+_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)")
+_IDENTIFIER = re.compile(r"^[\w_]+$", re.I)
+
+_TOML_JARGON = {
+ "object": "table",
+ "property": "key",
+ "properties": "keys",
+ "property names": "keys",
+}
+
+
+class ValidationError(JsonSchemaValueException):
+ """Report violations of a given JSON schema.
+
+ This class extends :exc:`~fastjsonschema.JsonSchemaValueException`
+ by adding the following properties:
+
+ - ``summary``: an improved version of the ``JsonSchemaValueException`` error message
+ with only the necessary information)
+
+ - ``details``: more contextual information about the error like the failing schema
+ itself and the value that violates the schema.
+
+ Depending on the level of the verbosity of the ``logging`` configuration
+ the exception message will be only ``summary`` (default) or a combination of
+ ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`).
+ """
+
+ summary = ""
+ details = ""
+ _original_message = ""
+
+ @classmethod
+ def _from_jsonschema(cls, ex: JsonSchemaValueException):
+ formatter = _ErrorFormatting(ex)
+ obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
+ debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
+ if debug_code != "false": # pragma: no cover
+ obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__
+ obj._original_message = ex.message
+ obj.summary = formatter.summary
+ obj.details = formatter.details
+ return obj
+
+
+@contextmanager
+def detailed_errors():
+ try:
+ yield
+ except JsonSchemaValueException as ex:
+ raise ValidationError._from_jsonschema(ex) from None
+
+
+class _ErrorFormatting:
+ def __init__(self, ex: JsonSchemaValueException):
+ self.ex = ex
+ self.name = f"`{self._simplify_name(ex.name)}`"
+ self._original_message = self.ex.message.replace(ex.name, self.name)
+ self._summary = ""
+ self._details = ""
+
+ def __str__(self) -> str:
+ if _logger.getEffectiveLevel() <= logging.DEBUG and self.details:
+ return f"{self.summary}\n\n{self.details}"
+
+ return self.summary
+
+ @property
+ def summary(self) -> str:
+ if not self._summary:
+ self._summary = self._expand_summary()
+
+ return self._summary
+
+ @property
+ def details(self) -> str:
+ if not self._details:
+ self._details = self._expand_details()
+
+ return self._details
+
+ def _simplify_name(self, name):
+ x = len("data.")
+ return name[x:] if name.startswith("data.") else name
+
+ def _expand_summary(self):
+ msg = self._original_message
+
+ for bad, repl in _MESSAGE_REPLACEMENTS.items():
+ msg = msg.replace(bad, repl)
+
+ if any(substring in msg for substring in _SKIP_DETAILS):
+ return msg
+
+ schema = self.ex.rule_definition
+ if self.ex.rule in _NEED_DETAILS and schema:
+ summary = _SummaryWriter(_TOML_JARGON)
+ return f"{msg}:\n\n{indent(summary(schema), ' ')}"
+
+ return msg
+
+ def _expand_details(self) -> str:
+ optional = []
+ desc_lines = self.ex.definition.pop("$$description", [])
+ desc = self.ex.definition.pop("description", None) or " ".join(desc_lines)
+ if desc:
+ description = "\n".join(
+ wrap(
+ desc,
+ width=80,
+ initial_indent=" ",
+ subsequent_indent=" ",
+ break_long_words=False,
+ )
+ )
+ optional.append(f"DESCRIPTION:\n{description}")
+ schema = json.dumps(self.ex.definition, indent=4)
+ value = json.dumps(self.ex.value, indent=4)
+ defaults = [
+ f"GIVEN VALUE:\n{indent(value, ' ')}",
+ f"OFFENDING RULE: {self.ex.rule!r}",
+ f"DEFINITION:\n{indent(schema, ' ')}",
+ ]
+ return "\n\n".join(optional + defaults)
+
+
+class _SummaryWriter:
+ _IGNORE = {"description", "default", "title", "examples"}
+
+ def __init__(self, jargon: Optional[Dict[str, str]] = None):
+ self.jargon: Dict[str, str] = jargon or {}
+ # Clarify confusing terms
+ self._terms = {
+ "anyOf": "at least one of the following",
+ "oneOf": "exactly one of the following",
+ "allOf": "all of the following",
+ "not": "(*NOT* the following)",
+ "prefixItems": f"{self._jargon('items')} (in order)",
+ "items": "items",
+ "contains": "contains at least one of",
+ "propertyNames": (
+ f"non-predefined acceptable {self._jargon('property names')}"
+ ),
+ "patternProperties": f"{self._jargon('properties')} named via pattern",
+ "const": "predefined value",
+ "enum": "one of",
+ }
+ # Attributes that indicate that the definition is easy and can be done
+ # inline (e.g. string and number)
+ self._guess_inline_defs = [
+ "enum",
+ "const",
+ "maxLength",
+ "minLength",
+ "pattern",
+ "format",
+ "minimum",
+ "maximum",
+ "exclusiveMinimum",
+ "exclusiveMaximum",
+ "multipleOf",
+ ]
+
+ def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]:
+ if isinstance(term, list):
+ return [self.jargon.get(t, t) for t in term]
+ return self.jargon.get(term, term)
+
+ def __call__(
+ self,
+ schema: Union[dict, List[dict]],
+ prefix: str = "",
+ *,
+ _path: Sequence[str] = (),
+ ) -> str:
+ if isinstance(schema, list):
+ return self._handle_list(schema, prefix, _path)
+
+ filtered = self._filter_unecessary(schema, _path)
+ simple = self._handle_simple_dict(filtered, _path)
+ if simple:
+ return f"{prefix}{simple}"
+
+ child_prefix = self._child_prefix(prefix, " ")
+ item_prefix = self._child_prefix(prefix, "- ")
+ indent = len(prefix) * " "
+ with io.StringIO() as buffer:
+ for i, (key, value) in enumerate(filtered.items()):
+ child_path = [*_path, key]
+ line_prefix = prefix if i == 0 else indent
+ buffer.write(f"{line_prefix}{self._label(child_path)}:")
+ # ^ just the first item should receive the complete prefix
+ if isinstance(value, dict):
+ filtered = self._filter_unecessary(value, child_path)
+ simple = self._handle_simple_dict(filtered, child_path)
+ buffer.write(
+ f" {simple}"
+ if simple
+ else f"\n{self(value, child_prefix, _path=child_path)}"
+ )
+ elif isinstance(value, list) and (
+ key != "type" or self._is_property(child_path)
+ ):
+ children = self._handle_list(value, item_prefix, child_path)
+ sep = " " if children.startswith("[") else "\n"
+ buffer.write(f"{sep}{children}")
+ else:
+ buffer.write(f" {self._value(value, child_path)}\n")
+ return buffer.getvalue()
+
+ def _is_unecessary(self, path: Sequence[str]) -> bool:
+ if self._is_property(path) or not path: # empty path => instruction @ root
+ return False
+ key = path[-1]
+ return any(key.startswith(k) for k in "$_") or key in self._IGNORE
+
+ def _filter_unecessary(self, schema: dict, path: Sequence[str]):
+ return {
+ key: value
+ for key, value in schema.items()
+ if not self._is_unecessary([*path, key])
+ }
+
+ def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]:
+ inline = any(p in value for p in self._guess_inline_defs)
+ simple = not any(isinstance(v, (list, dict)) for v in value.values())
+ if inline or simple:
+ return f"{{{', '.join(self._inline_attrs(value, path))}}}\n"
+ return None
+
+ def _handle_list(
+ self, schemas: list, prefix: str = "", path: Sequence[str] = ()
+ ) -> str:
+ if self._is_unecessary(path):
+ return ""
+
+ repr_ = repr(schemas)
+ if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60:
+ return f"{repr_}\n"
+
+ item_prefix = self._child_prefix(prefix, "- ")
+ return "".join(
+ self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
+ )
+
+ def _is_property(self, path: Sequence[str]):
+ """Check if the given path can correspond to an arbitrarily named property"""
+ counter = 0
+ for key in path[-2::-1]:
+ if key not in {"properties", "patternProperties"}:
+ break
+ counter += 1
+
+ # If the counter if even, the path correspond to a JSON Schema keyword
+ # otherwise it can be any arbitrary string naming a property
+ return counter % 2 == 1
+
+ def _label(self, path: Sequence[str]) -> str:
+ *parents, key = path
+ if not self._is_property(path):
+ norm_key = _separate_terms(key)
+ return self._terms.get(key) or " ".join(self._jargon(norm_key))
+
+ if parents[-1] == "patternProperties":
+ return f"(regex {key!r})"
+ return repr(key) # property name
+
+ def _value(self, value: Any, path: Sequence[str]) -> str:
+ if path[-1] == "type" and not self._is_property(path):
+ type_ = self._jargon(value)
+ return (
+ f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_)
+ )
+ return repr(value)
+
+ def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:
+ for key, value in schema.items():
+ child_path = [*path, key]
+ yield f"{self._label(child_path)}: {self._value(value, child_path)}"
+
+ def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str:
+ return len(parent_prefix) * " " + child_prefix
+
+
+def _separate_terms(word: str) -> List[str]:
+ """
+ >>> _separate_terms("FooBar-foo")
+ "foo bar foo"
+ """
+ return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w]
diff --git a/setuptools/_vendor/_validate_pyproject/extra_validations.py b/setuptools/_vendor/_validate_pyproject/extra_validations.py
new file mode 100644
index 0000000..48c4e25
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/extra_validations.py
@@ -0,0 +1,36 @@
+"""The purpose of this module is implement PEP 621 validations that are
+difficult to express as a JSON Schema (or that are not supported by the current
+JSON Schema library).
+"""
+
+from typing import Mapping, TypeVar
+
+from .fastjsonschema_exceptions import JsonSchemaValueException
+
+T = TypeVar("T", bound=Mapping)
+
+
+class RedefiningStaticFieldAsDynamic(JsonSchemaValueException):
+ """According to PEP 621:
+
+ Build back-ends MUST raise an error if the metadata specifies a field
+ statically as well as being listed in dynamic.
+ """
+
+
+def validate_project_dynamic(pyproject: T) -> T:
+ project_table = pyproject.get("project", {})
+ dynamic = project_table.get("dynamic", [])
+
+ for field in dynamic:
+ if field in project_table:
+ msg = f"You cannot provide a value for `project.{field}` and "
+ msg += "list it under `project.dynamic` at the same time"
+ name = f"data.project.{field}"
+ value = {field: project_table[field], "...": " # ...", "dynamic": dynamic}
+ raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621")
+
+ return pyproject
+
+
+EXTRA_VALIDATIONS = (validate_project_dynamic,)
diff --git a/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py b/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py
new file mode 100644
index 0000000..d2dddd6
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py
@@ -0,0 +1,51 @@
+import re
+
+
+SPLIT_RE = re.compile(r'[\.\[\]]+')
+
+
+class JsonSchemaException(ValueError):
+ """
+ Base exception of ``fastjsonschema`` library.
+ """
+
+
+class JsonSchemaValueException(JsonSchemaException):
+ """
+ Exception raised by validation function. Available properties:
+
+ * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``),
+ * invalid ``value`` (e.g. ``60``),
+ * ``name`` of a path in the data structure (e.g. ``data.property[index]``),
+ * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``),
+ * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``),
+ * ``rule`` which the ``value`` is breaking (e.g. ``maximum``)
+ * and ``rule_definition`` (e.g. ``42``).
+
+ .. versionchanged:: 2.14.0
+ Added all extra properties.
+ """
+
+ def __init__(self, message, value=None, name=None, definition=None, rule=None):
+ super().__init__(message)
+ self.message = message
+ self.value = value
+ self.name = name
+ self.definition = definition
+ self.rule = rule
+
+ @property
+ def path(self):
+ return [item for item in SPLIT_RE.split(self.name) if item != '']
+
+ @property
+ def rule_definition(self):
+ if not self.rule or not self.definition:
+ return None
+ return self.definition.get(self.rule)
+
+
+class JsonSchemaDefinitionException(JsonSchemaException):
+ """
+ Exception raised by generator of validation function.
+ """
diff --git a/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py b/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py
new file mode 100644
index 0000000..3feda6a
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py
@@ -0,0 +1,1001 @@
+# noqa
+# type: ignore
+# flake8: noqa
+# pylint: skip-file
+# mypy: ignore-errors
+# yapf: disable
+# pylama:skip=1
+
+
+# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code ***
+
+
+VERSION = "2.15.3"
+import re
+from .fastjsonschema_exceptions import JsonSchemaValueException
+
+
+REGEX_PATTERNS = {
+ '^.*$': re.compile('^.*$'),
+ '.+': re.compile('.+'),
+ '^.+$': re.compile('^.+$'),
+ 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z')
+}
+
+NoneType = type(None)
+
+def validate(data, custom_formats={}, name_prefix=None):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "")
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "build-system" in data_keys:
+ data_keys.remove("build-system")
+ data__buildsystem = data["build-system"]
+ if not isinstance(data__buildsystem, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type')
+ data__buildsystem_is_dict = isinstance(data__buildsystem, dict)
+ if data__buildsystem_is_dict:
+ data__buildsystem_len = len(data__buildsystem)
+ if not all(prop in data__buildsystem for prop in ['requires']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required')
+ data__buildsystem_keys = set(data__buildsystem.keys())
+ if "requires" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("requires")
+ data__buildsystem__requires = data__buildsystem["requires"]
+ if not isinstance(data__buildsystem__requires, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type')
+ data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple))
+ if data__buildsystem__requires_is_list:
+ data__buildsystem__requires_len = len(data__buildsystem__requires)
+ for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires):
+ if not isinstance(data__buildsystem__requires_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "build-backend" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("build-backend")
+ data__buildsystem__buildbackend = data__buildsystem["build-backend"]
+ if not isinstance(data__buildsystem__buildbackend, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type')
+ if isinstance(data__buildsystem__buildbackend, str):
+ if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format')
+ if "backend-path" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("backend-path")
+ data__buildsystem__backendpath = data__buildsystem["backend-path"]
+ if not isinstance(data__buildsystem__backendpath, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type')
+ data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple))
+ if data__buildsystem__backendpath_is_list:
+ data__buildsystem__backendpath_len = len(data__buildsystem__backendpath)
+ for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath):
+ if not isinstance(data__buildsystem__backendpath_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type')
+ if data__buildsystem_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties')
+ if "project" in data_keys:
+ data_keys.remove("project")
+ data__project = data["project"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project")
+ if "tool" in data_keys:
+ data_keys.remove("tool")
+ data__tool = data["tool"]
+ if not isinstance(data__tool, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+ data__tool_is_dict = isinstance(data__tool, dict)
+ if data__tool_is_dict:
+ data__tool_keys = set(data__tool.keys())
+ if "distutils" in data__tool_keys:
+ data__tool_keys.remove("distutils")
+ data__tool__distutils = data__tool["distutils"]
+ validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils")
+ if "setuptools" in data__tool_keys:
+ data__tool_keys.remove("setuptools")
+ data__tool__setuptools = data__tool["setuptools"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "platforms" in data_keys:
+ data_keys.remove("platforms")
+ data__platforms = data["platforms"]
+ if not isinstance(data__platforms, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__platforms_is_list = isinstance(data__platforms, (list, tuple))
+ if data__platforms_is_list:
+ data__platforms_len = len(data__platforms)
+ for data__platforms_x, data__platforms_item in enumerate(data__platforms):
+ if not isinstance(data__platforms_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "provides" in data_keys:
+ data_keys.remove("provides")
+ data__provides = data["provides"]
+ if not isinstance(data__provides, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
+ data__provides_is_list = isinstance(data__provides, (list, tuple))
+ if data__provides_is_list:
+ data__provides_len = len(data__provides)
+ for data__provides_x, data__provides_item in enumerate(data__provides):
+ if not isinstance(data__provides_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__provides_item, str):
+ if not custom_formats["pep508-identifier"](data__provides_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
+ if "obsoletes" in data_keys:
+ data_keys.remove("obsoletes")
+ data__obsoletes = data["obsoletes"]
+ if not isinstance(data__obsoletes, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
+ data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple))
+ if data__obsoletes_is_list:
+ data__obsoletes_len = len(data__obsoletes)
+ for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes):
+ if not isinstance(data__obsoletes_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__obsoletes_item, str):
+ if not custom_formats["pep508-identifier"](data__obsoletes_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
+ if "zip-safe" in data_keys:
+ data_keys.remove("zip-safe")
+ data__zipsafe = data["zip-safe"]
+ if not isinstance(data__zipsafe, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type')
+ if "script-files" in data_keys:
+ data_keys.remove("script-files")
+ data__scriptfiles = data["script-files"]
+ if not isinstance(data__scriptfiles, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type')
+ data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple))
+ if data__scriptfiles_is_list:
+ data__scriptfiles_len = len(data__scriptfiles)
+ for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles):
+ if not isinstance(data__scriptfiles_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "eager-resources" in data_keys:
+ data_keys.remove("eager-resources")
+ data__eagerresources = data["eager-resources"]
+ if not isinstance(data__eagerresources, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple))
+ if data__eagerresources_is_list:
+ data__eagerresources_len = len(data__eagerresources)
+ for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources):
+ if not isinstance(data__eagerresources_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "packages" in data_keys:
+ data_keys.remove("packages")
+ data__packages = data["packages"]
+ data__packages_one_of_count1 = 0
+ if data__packages_one_of_count1 < 2:
+ try:
+ if not isinstance(data__packages, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type')
+ data__packages_is_list = isinstance(data__packages, (list, tuple))
+ if data__packages_is_list:
+ data__packages_len = len(data__packages)
+ for data__packages_x, data__packages_item in enumerate(data__packages):
+ if not isinstance(data__packages_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be string", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__packages_item, str):
+ if not custom_formats["python-module-name"](data__packages_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be python-module-name", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ data__packages_one_of_count1 += 1
+ except JsonSchemaValueException: pass
+ if data__packages_one_of_count1 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
+ data__packages_one_of_count1 += 1
+ except JsonSchemaValueException: pass
+ if data__packages_one_of_count1 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
+ if "package-dir" in data_keys:
+ data_keys.remove("package-dir")
+ data__packagedir = data["package-dir"]
+ if not isinstance(data__packagedir, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
+ data__packagedir_is_dict = isinstance(data__packagedir, dict)
+ if data__packagedir_is_dict:
+ data__packagedir_keys = set(data__packagedir.keys())
+ for data__packagedir_key, data__packagedir_val in data__packagedir.items():
+ if REGEX_PATTERNS['^.*$'].search(data__packagedir_key):
+ if data__packagedir_key in data__packagedir_keys:
+ data__packagedir_keys.remove(data__packagedir_key)
+ if not isinstance(data__packagedir_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__packagedir_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
+ data__packagedir_len = len(data__packagedir)
+ if data__packagedir_len != 0:
+ data__packagedir_property_names = True
+ for data__packagedir_key in data__packagedir:
+ try:
+ data__packagedir_key_one_of_count2 = 0
+ if data__packagedir_key_one_of_count2 < 2:
+ try:
+ if isinstance(data__packagedir_key, str):
+ if not custom_formats["python-module-name"](data__packagedir_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be python-module-name", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'format': 'python-module-name'}, rule='format')
+ data__packagedir_key_one_of_count2 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedir_key_one_of_count2 < 2:
+ try:
+ if data__packagedir_key != "":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const')
+ data__packagedir_key_one_of_count2 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedir_key_one_of_count2 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__packagedir_property_names = False
+ if not data__packagedir_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
+ if "package-data" in data_keys:
+ data_keys.remove("package-data")
+ data__packagedata = data["package-data"]
+ if not isinstance(data__packagedata, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__packagedata_is_dict = isinstance(data__packagedata, dict)
+ if data__packagedata_is_dict:
+ data__packagedata_keys = set(data__packagedata.keys())
+ for data__packagedata_key, data__packagedata_val in data__packagedata.items():
+ if REGEX_PATTERNS['^.*$'].search(data__packagedata_key):
+ if data__packagedata_key in data__packagedata_keys:
+ data__packagedata_keys.remove(data__packagedata_key)
+ if not isinstance(data__packagedata_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple))
+ if data__packagedata_val_is_list:
+ data__packagedata_val_len = len(data__packagedata_val)
+ for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val):
+ if not isinstance(data__packagedata_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__packagedata_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+ data__packagedata_len = len(data__packagedata)
+ if data__packagedata_len != 0:
+ data__packagedata_property_names = True
+ for data__packagedata_key in data__packagedata:
+ try:
+ data__packagedata_key_one_of_count3 = 0
+ if data__packagedata_key_one_of_count3 < 2:
+ try:
+ if isinstance(data__packagedata_key, str):
+ if not custom_formats["python-module-name"](data__packagedata_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format')
+ data__packagedata_key_one_of_count3 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedata_key_one_of_count3 < 2:
+ try:
+ if data__packagedata_key != "*":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const')
+ data__packagedata_key_one_of_count3 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedata_key_one_of_count3 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__packagedata_property_names = False
+ if not data__packagedata_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+ if "include-package-data" in data_keys:
+ data_keys.remove("include-package-data")
+ data__includepackagedata = data["include-package-data"]
+ if not isinstance(data__includepackagedata, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, rule='type')
+ if "exclude-package-data" in data_keys:
+ data_keys.remove("exclude-package-data")
+ data__excludepackagedata = data["exclude-package-data"]
+ if not isinstance(data__excludepackagedata, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict)
+ if data__excludepackagedata_is_dict:
+ data__excludepackagedata_keys = set(data__excludepackagedata.keys())
+ for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items():
+ if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key):
+ if data__excludepackagedata_key in data__excludepackagedata_keys:
+ data__excludepackagedata_keys.remove(data__excludepackagedata_key)
+ if not isinstance(data__excludepackagedata_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple))
+ if data__excludepackagedata_val_is_list:
+ data__excludepackagedata_val_len = len(data__excludepackagedata_val)
+ for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val):
+ if not isinstance(data__excludepackagedata_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__excludepackagedata_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+ data__excludepackagedata_len = len(data__excludepackagedata)
+ if data__excludepackagedata_len != 0:
+ data__excludepackagedata_property_names = True
+ for data__excludepackagedata_key in data__excludepackagedata:
+ try:
+ data__excludepackagedata_key_one_of_count4 = 0
+ if data__excludepackagedata_key_one_of_count4 < 2:
+ try:
+ if isinstance(data__excludepackagedata_key, str):
+ if not custom_formats["python-module-name"](data__excludepackagedata_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format')
+ data__excludepackagedata_key_one_of_count4 += 1
+ except JsonSchemaValueException: pass
+ if data__excludepackagedata_key_one_of_count4 < 2:
+ try:
+ if data__excludepackagedata_key != "*":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const')
+ data__excludepackagedata_key_one_of_count4 += 1
+ except JsonSchemaValueException: pass
+ if data__excludepackagedata_key_one_of_count4 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__excludepackagedata_property_names = False
+ if not data__excludepackagedata_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+ if "namespace-packages" in data_keys:
+ data_keys.remove("namespace-packages")
+ data__namespacepackages = data["namespace-packages"]
+ if not isinstance(data__namespacepackages, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type')
+ data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple))
+ if data__namespacepackages_is_list:
+ data__namespacepackages_len = len(data__namespacepackages)
+ for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages):
+ if not isinstance(data__namespacepackages_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__namespacepackages_item, str):
+ if not custom_formats["python-module-name"](data__namespacepackages_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ if "py-modules" in data_keys:
+ data_keys.remove("py-modules")
+ data__pymodules = data["py-modules"]
+ if not isinstance(data__pymodules, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type')
+ data__pymodules_is_list = isinstance(data__pymodules, (list, tuple))
+ if data__pymodules_is_list:
+ data__pymodules_len = len(data__pymodules)
+ for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules):
+ if not isinstance(data__pymodules_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__pymodules_item, str):
+ if not custom_formats["python-module-name"](data__pymodules_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ if "data-files" in data_keys:
+ data_keys.remove("data-files")
+ data__datafiles = data["data-files"]
+ if not isinstance(data__datafiles, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__datafiles_is_dict = isinstance(data__datafiles, dict)
+ if data__datafiles_is_dict:
+ data__datafiles_keys = set(data__datafiles.keys())
+ for data__datafiles_key, data__datafiles_val in data__datafiles.items():
+ if REGEX_PATTERNS['^.*$'].search(data__datafiles_key):
+ if data__datafiles_key in data__datafiles_keys:
+ data__datafiles_keys.remove(data__datafiles_key)
+ if not isinstance(data__datafiles_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple))
+ if data__datafiles_val_is_list:
+ data__datafiles_val_len = len(data__datafiles_val)
+ for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val):
+ if not isinstance(data__datafiles_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "cmdclass" in data_keys:
+ data_keys.remove("cmdclass")
+ data__cmdclass = data["cmdclass"]
+ if not isinstance(data__cmdclass, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type')
+ data__cmdclass_is_dict = isinstance(data__cmdclass, dict)
+ if data__cmdclass_is_dict:
+ data__cmdclass_keys = set(data__cmdclass.keys())
+ for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items():
+ if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key):
+ if data__cmdclass_key in data__cmdclass_keys:
+ data__cmdclass_keys.remove(data__cmdclass_key)
+ if not isinstance(data__cmdclass_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type')
+ if isinstance(data__cmdclass_val, str):
+ if not custom_formats["python-qualified-identifier"](data__cmdclass_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format')
+ if "license-files" in data_keys:
+ data_keys.remove("license-files")
+ data__licensefiles = data["license-files"]
+ if not isinstance(data__licensefiles, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type')
+ data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple))
+ if data__licensefiles_is_list:
+ data__licensefiles_len = len(data__licensefiles)
+ for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles):
+ if not isinstance(data__licensefiles_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ else: data["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*']
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ if not isinstance(data__dynamic, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type')
+ data__dynamic_is_dict = isinstance(data__dynamic, dict)
+ if data__dynamic_is_dict:
+ data__dynamic_keys = set(data__dynamic.keys())
+ if "version" in data__dynamic_keys:
+ data__dynamic_keys.remove("version")
+ data__dynamic__version = data__dynamic["version"]
+ data__dynamic__version_one_of_count5 = 0
+ if data__dynamic__version_one_of_count5 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+ data__dynamic__version_one_of_count5 += 1
+ except JsonSchemaValueException: pass
+ if data__dynamic__version_one_of_count5 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+ data__dynamic__version_one_of_count5 += 1
+ except JsonSchemaValueException: pass
+ if data__dynamic__version_one_of_count5 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
+ if "classifiers" in data__dynamic_keys:
+ data__dynamic_keys.remove("classifiers")
+ data__dynamic__classifiers = data__dynamic["classifiers"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers")
+ if "description" in data__dynamic_keys:
+ data__dynamic_keys.remove("description")
+ data__dynamic__description = data__dynamic["description"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description")
+ if "entry-points" in data__dynamic_keys:
+ data__dynamic_keys.remove("entry-points")
+ data__dynamic__entrypoints = data__dynamic["entry-points"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points")
+ if "readme" in data__dynamic_keys:
+ data__dynamic_keys.remove("readme")
+ data__dynamic__readme = data__dynamic["readme"]
+ data__dynamic__readme_any_of_count6 = 0
+ if not data__dynamic__readme_any_of_count6:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
+ data__dynamic__readme_any_of_count6 += 1
+ except JsonSchemaValueException: pass
+ if not data__dynamic__readme_any_of_count6:
+ try:
+ data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
+ if data__dynamic__readme_is_dict:
+ data__dynamic__readme_keys = set(data__dynamic__readme.keys())
+ if "content-type" in data__dynamic__readme_keys:
+ data__dynamic__readme_keys.remove("content-type")
+ data__dynamic__readme__contenttype = data__dynamic__readme["content-type"]
+ if not isinstance(data__dynamic__readme__contenttype, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type')
+ data__dynamic__readme_any_of_count6 += 1
+ except JsonSchemaValueException: pass
+ if not data__dynamic__readme_any_of_count6:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf')
+ data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
+ if data__dynamic__readme_is_dict:
+ data__dynamic__readme_len = len(data__dynamic__readme)
+ if not all(prop in data__dynamic__readme for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required')
+ if data__dynamic_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required')
+ data_keys = set(data.keys())
+ if "file" in data_keys:
+ data_keys.remove("file")
+ data__file = data["file"]
+ data__file_one_of_count7 = 0
+ if data__file_one_of_count7 < 2:
+ try:
+ if not isinstance(data__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type')
+ data__file_one_of_count7 += 1
+ except JsonSchemaValueException: pass
+ if data__file_one_of_count7 < 2:
+ try:
+ if not isinstance(data__file, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__file_is_list = isinstance(data__file, (list, tuple))
+ if data__file_is_list:
+ data__file_len = len(data__file)
+ for data__file_x, data__file_item in enumerate(data__file):
+ if not isinstance(data__file_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ data__file_one_of_count7 += 1
+ except JsonSchemaValueException: pass
+ if data__file_one_of_count7 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['attr']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required')
+ data_keys = set(data.keys())
+ if "attr" in data_keys:
+ data_keys.remove("attr")
+ data__attr = data["attr"]
+ if not isinstance(data__attr, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "find" in data_keys:
+ data_keys.remove("find")
+ data__find = data["find"]
+ if not isinstance(data__find, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type')
+ data__find_is_dict = isinstance(data__find, dict)
+ if data__find_is_dict:
+ data__find_keys = set(data__find.keys())
+ if "where" in data__find_keys:
+ data__find_keys.remove("where")
+ data__find__where = data__find["where"]
+ if not isinstance(data__find__where, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__find__where_is_list = isinstance(data__find__where, (list, tuple))
+ if data__find__where_is_list:
+ data__find__where_len = len(data__find__where)
+ for data__find__where_x, data__find__where_item in enumerate(data__find__where):
+ if not isinstance(data__find__where_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "exclude" in data__find_keys:
+ data__find_keys.remove("exclude")
+ data__find__exclude = data__find["exclude"]
+ if not isinstance(data__find__exclude, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
+ data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple))
+ if data__find__exclude_is_list:
+ data__find__exclude_len = len(data__find__exclude)
+ for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude):
+ if not isinstance(data__find__exclude_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "include" in data__find_keys:
+ data__find_keys.remove("include")
+ data__find__include = data__find["include"]
+ if not isinstance(data__find__include, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
+ data__find__include_is_list = isinstance(data__find__include, (list, tuple))
+ if data__find__include_is_list:
+ data__find__include_len = len(data__find__include)
+ for data__find__include_x, data__find__include_item in enumerate(data__find__include):
+ if not isinstance(data__find__include_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "namespaces" in data__find_keys:
+ data__find_keys.remove("namespaces")
+ data__find__namespaces = data__find["namespaces"]
+ if not isinstance(data__find__namespaces, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type')
+ if data__find_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "global" in data_keys:
+ data_keys.remove("global")
+ data__global = data["global"]
+ if not isinstance(data__global, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type')
+ for data_key, data_val in data.items():
+ if REGEX_PATTERNS['.+'].search(data_key):
+ if data_key in data_keys:
+ data_keys.remove(data_key)
+ if not isinstance(data_val, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['name']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
+ data_keys = set(data.keys())
+ if "name" in data_keys:
+ data_keys.remove("name")
+ data__name = data["name"]
+ if not isinstance(data__name, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__name, str):
+ if not custom_formats["pep508-identifier"](data__name):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format')
+ if "version" in data_keys:
+ data_keys.remove("version")
+ data__version = data["version"]
+ if not isinstance(data__version, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type')
+ if isinstance(data__version, str):
+ if not custom_formats["pep440"](data__version):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format')
+ if "description" in data_keys:
+ data_keys.remove("description")
+ data__description = data["description"]
+ if not isinstance(data__description, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, rule='type')
+ if "readme" in data_keys:
+ data_keys.remove("readme")
+ data__readme = data["readme"]
+ data__readme_one_of_count8 = 0
+ if data__readme_one_of_count8 < 2:
+ try:
+ if not isinstance(data__readme, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
+ data__readme_one_of_count8 += 1
+ except JsonSchemaValueException: pass
+ if data__readme_one_of_count8 < 2:
+ try:
+ if not isinstance(data__readme, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
+ data__readme_any_of_count9 = 0
+ if not data__readme_any_of_count9:
+ try:
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "file" in data__readme_keys:
+ data__readme_keys.remove("file")
+ data__readme__file = data__readme["file"]
+ if not isinstance(data__readme__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
+ data__readme_any_of_count9 += 1
+ except JsonSchemaValueException: pass
+ if not data__readme_any_of_count9:
+ try:
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['text']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "text" in data__readme_keys:
+ data__readme_keys.remove("text")
+ data__readme__text = data__readme["text"]
+ if not isinstance(data__readme__text, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
+ data__readme_any_of_count9 += 1
+ except JsonSchemaValueException: pass
+ if not data__readme_any_of_count9:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['content-type']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "content-type" in data__readme_keys:
+ data__readme_keys.remove("content-type")
+ data__readme__contenttype = data__readme["content-type"]
+ if not isinstance(data__readme__contenttype, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
+ data__readme_one_of_count8 += 1
+ except JsonSchemaValueException: pass
+ if data__readme_one_of_count8 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count8) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+ if "requires-python" in data_keys:
+ data_keys.remove("requires-python")
+ data__requirespython = data["requires-python"]
+ if not isinstance(data__requirespython, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='type')
+ if isinstance(data__requirespython, str):
+ if not custom_formats["pep508-versionspec"](data__requirespython):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='format')
+ if "license" in data_keys:
+ data_keys.remove("license")
+ data__license = data["license"]
+ data__license_one_of_count10 = 0
+ if data__license_one_of_count10 < 2:
+ try:
+ data__license_is_dict = isinstance(data__license, dict)
+ if data__license_is_dict:
+ data__license_len = len(data__license)
+ if not all(prop in data__license for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
+ data__license_keys = set(data__license.keys())
+ if "file" in data__license_keys:
+ data__license_keys.remove("file")
+ data__license__file = data__license["file"]
+ if not isinstance(data__license__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
+ data__license_one_of_count10 += 1
+ except JsonSchemaValueException: pass
+ if data__license_one_of_count10 < 2:
+ try:
+ data__license_is_dict = isinstance(data__license, dict)
+ if data__license_is_dict:
+ data__license_len = len(data__license)
+ if not all(prop in data__license for prop in ['text']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}, rule='required')
+ data__license_keys = set(data__license.keys())
+ if "text" in data__license_keys:
+ data__license_keys.remove("text")
+ data__license__text = data__license["text"]
+ if not isinstance(data__license__text, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}, rule='type')
+ data__license_one_of_count10 += 1
+ except JsonSchemaValueException: pass
+ if data__license_one_of_count10 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count10) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, rule='oneOf')
+ if "authors" in data_keys:
+ data_keys.remove("authors")
+ data__authors = data["authors"]
+ if not isinstance(data__authors, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type')
+ data__authors_is_list = isinstance(data__authors, (list, tuple))
+ if data__authors_is_list:
+ data__authors_len = len(data__authors)
+ for data__authors_x, data__authors_item in enumerate(data__authors):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]")
+ if "maintainers" in data_keys:
+ data_keys.remove("maintainers")
+ data__maintainers = data["maintainers"]
+ if not isinstance(data__maintainers, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type')
+ data__maintainers_is_list = isinstance(data__maintainers, (list, tuple))
+ if data__maintainers_is_list:
+ data__maintainers_len = len(data__maintainers)
+ for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]")
+ if "keywords" in data_keys:
+ data_keys.remove("keywords")
+ data__keywords = data["keywords"]
+ if not isinstance(data__keywords, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type')
+ data__keywords_is_list = isinstance(data__keywords, (list, tuple))
+ if data__keywords_is_list:
+ data__keywords_len = len(data__keywords)
+ for data__keywords_x, data__keywords_item in enumerate(data__keywords):
+ if not isinstance(data__keywords_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "classifiers" in data_keys:
+ data_keys.remove("classifiers")
+ data__classifiers = data["classifiers"]
+ if not isinstance(data__classifiers, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, rule='type')
+ data__classifiers_is_list = isinstance(data__classifiers, (list, tuple))
+ if data__classifiers_is_list:
+ data__classifiers_len = len(data__classifiers)
+ for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers):
+ if not isinstance(data__classifiers_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='type')
+ if isinstance(data__classifiers_item, str):
+ if not custom_formats["trove-classifier"](data__classifiers_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='format')
+ if "urls" in data_keys:
+ data_keys.remove("urls")
+ data__urls = data["urls"]
+ if not isinstance(data__urls, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type')
+ data__urls_is_dict = isinstance(data__urls, dict)
+ if data__urls_is_dict:
+ data__urls_keys = set(data__urls.keys())
+ for data__urls_key, data__urls_val in data__urls.items():
+ if REGEX_PATTERNS['^.+$'].search(data__urls_key):
+ if data__urls_key in data__urls_keys:
+ data__urls_keys.remove(data__urls_key)
+ if not isinstance(data__urls_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type')
+ if isinstance(data__urls_val, str):
+ if not custom_formats["url"](data__urls_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format')
+ if data__urls_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties')
+ if "scripts" in data_keys:
+ data_keys.remove("scripts")
+ data__scripts = data["scripts"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts")
+ if "gui-scripts" in data_keys:
+ data_keys.remove("gui-scripts")
+ data__guiscripts = data["gui-scripts"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts")
+ if "entry-points" in data_keys:
+ data_keys.remove("entry-points")
+ data__entrypoints = data["entry-points"]
+ data__entrypoints_is_dict = isinstance(data__entrypoints, dict)
+ if data__entrypoints_is_dict:
+ data__entrypoints_keys = set(data__entrypoints.keys())
+ for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items():
+ if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key):
+ if data__entrypoints_key in data__entrypoints_keys:
+ data__entrypoints_keys.remove(data__entrypoints_key)
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}")
+ if data__entrypoints_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties')
+ data__entrypoints_len = len(data__entrypoints)
+ if data__entrypoints_len != 0:
+ data__entrypoints_property_names = True
+ for data__entrypoints_key in data__entrypoints:
+ try:
+ if isinstance(data__entrypoints_key, str):
+ if not custom_formats["python-entrypoint-group"](data__entrypoints_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format')
+ except JsonSchemaValueException:
+ data__entrypoints_property_names = False
+ if not data__entrypoints_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames')
+ if "dependencies" in data_keys:
+ data_keys.remove("dependencies")
+ data__dependencies = data["dependencies"]
+ if not isinstance(data__dependencies, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
+ data__dependencies_is_list = isinstance(data__dependencies, (list, tuple))
+ if data__dependencies_is_list:
+ data__dependencies_len = len(data__dependencies)
+ for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]")
+ if "optional-dependencies" in data_keys:
+ data_keys.remove("optional-dependencies")
+ data__optionaldependencies = data["optional-dependencies"]
+ if not isinstance(data__optionaldependencies, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+ data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict)
+ if data__optionaldependencies_is_dict:
+ data__optionaldependencies_keys = set(data__optionaldependencies.keys())
+ for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items():
+ if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key):
+ if data__optionaldependencies_key in data__optionaldependencies_keys:
+ data__optionaldependencies_keys.remove(data__optionaldependencies_key)
+ if not isinstance(data__optionaldependencies_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
+ data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple))
+ if data__optionaldependencies_val_is_list:
+ data__optionaldependencies_val_len = len(data__optionaldependencies_val)
+ for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]")
+ if data__optionaldependencies_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+ data__optionaldependencies_len = len(data__optionaldependencies)
+ if data__optionaldependencies_len != 0:
+ data__optionaldependencies_property_names = True
+ for data__optionaldependencies_key in data__optionaldependencies:
+ try:
+ if isinstance(data__optionaldependencies_key, str):
+ if not custom_formats["pep508-identifier"](data__optionaldependencies_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format')
+ except JsonSchemaValueException:
+ data__optionaldependencies_property_names = False
+ if not data__optionaldependencies_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames')
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ if not isinstance(data__dynamic, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type')
+ data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
+ if data__dynamic_is_list:
+ data__dynamic_len = len(data__dynamic)
+ for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic):
+ if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
+ try:
+ try:
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['version']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, rule='required')
+ except JsonSchemaValueException: pass
+ else:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not')
+ except JsonSchemaValueException:
+ pass
+ else:
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
+ if data__dynamic_is_list:
+ data__dynamic_contains = False
+ for data__dynamic_key in data__dynamic:
+ try:
+ if data__dynamic_key != "version":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const')
+ data__dynamic_contains = True
+ break
+ except JsonSchemaValueException: pass
+ if not data__dynamic_contains:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}, rule='contains')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type')
+ if isinstance(data, str):
+ if not custom_formats["pep508"](data):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ for data_key, data_val in data.items():
+ if REGEX_PATTERNS['^.+$'].search(data_key):
+ if data_key in data_keys:
+ data_keys.remove(data_key)
+ if not isinstance(data_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type')
+ if isinstance(data_val, str):
+ if not custom_formats["python-entrypoint-reference"](data_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties')
+ data_len = len(data)
+ if data_len != 0:
+ data_property_names = True
+ for data_key in data:
+ try:
+ if isinstance(data_key, str):
+ if not custom_formats["python-entrypoint-name"](data_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format')
+ except JsonSchemaValueException:
+ data_property_names = False
+ if not data_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "name" in data_keys:
+ data_keys.remove("name")
+ data__name = data["name"]
+ if not isinstance(data__name, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type')
+ if "email" in data_keys:
+ data_keys.remove("email")
+ data__email = data["email"]
+ if not isinstance(data__email, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type')
+ if isinstance(data__email, str):
+ if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format')
+ return data \ No newline at end of file
diff --git a/setuptools/_vendor/_validate_pyproject/formats.py b/setuptools/_vendor/_validate_pyproject/formats.py
new file mode 100644
index 0000000..a288eb5
--- /dev/null
+++ b/setuptools/_vendor/_validate_pyproject/formats.py
@@ -0,0 +1,252 @@
+import logging
+import os
+import re
+import string
+import typing
+from itertools import chain as _chain
+
+_logger = logging.getLogger(__name__)
+
+# -------------------------------------------------------------------------------------
+# PEP 440
+
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
+
+
+def pep440(version: str) -> bool:
+ return VERSION_REGEX.match(version) is not None
+
+
+# -------------------------------------------------------------------------------------
+# PEP 508
+
+PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
+PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
+
+
+def pep508_identifier(name: str) -> bool:
+ return PEP508_IDENTIFIER_REGEX.match(name) is not None
+
+
+try:
+ try:
+ from packaging import requirements as _req
+ except ImportError: # pragma: no cover
+ # let's try setuptools vendored version
+ from setuptools._vendor.packaging import requirements as _req # type: ignore
+
+ def pep508(value: str) -> bool:
+ try:
+ _req.Requirement(value)
+ return True
+ except _req.InvalidRequirement:
+ return False
+
+except ImportError: # pragma: no cover
+ _logger.warning(
+ "Could not find an installation of `packaging`. Requirements, dependencies and "
+ "versions might not be validated. "
+ "To enforce validation, please install `packaging`."
+ )
+
+ def pep508(value: str) -> bool:
+ return True
+
+
+def pep508_versionspec(value: str) -> bool:
+ """Expression that can be used to specify/lock versions (including ranges)"""
+ if any(c in value for c in (";", "]", "@")):
+ # In PEP 508:
+ # conditional markers, extras and URL specs are not included in the
+ # versionspec
+ return False
+ # Let's pretend we have a dependency called `requirement` with the given
+ # version spec, then we can re-use the pep508 function for validation:
+ return pep508(f"requirement{value}")
+
+
+# -------------------------------------------------------------------------------------
+# PEP 517
+
+
+def pep517_backend_reference(value: str) -> bool:
+ module, _, obj = value.partition(":")
+ identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
+ return all(python_identifier(i) for i in identifiers if i)
+
+
+# -------------------------------------------------------------------------------------
+# Classifiers - PEP 301
+
+
+def _download_classifiers() -> str:
+ import cgi
+ from urllib.request import urlopen
+
+ url = "https://pypi.org/pypi?:action=list_classifiers"
+ with urlopen(url) as response:
+ content_type = response.getheader("content-type", "text/plain")
+ encoding = cgi.parse_header(content_type)[1].get("charset", "utf-8")
+ return response.read().decode(encoding)
+
+
+class _TroveClassifier:
+ """The ``trove_classifiers`` package is the official way of validating classifiers,
+ however this package might not be always available.
+ As a workaround we can still download a list from PyPI.
+ We also don't want to be over strict about it, so simply skipping silently is an
+ option (classifiers will be validated anyway during the upload to PyPI).
+ """
+
+ def __init__(self):
+ self.downloaded: typing.Union[None, False, typing.Set[str]] = None
+ # None => not cached yet
+ # False => cache not available
+ self.__name__ = "trove_classifier" # Emulate a public function
+
+ def __call__(self, value: str) -> bool:
+ if self.downloaded is False:
+ return True
+
+ if os.getenv("NO_NETWORK"):
+ self.downloaded = False
+ msg = (
+ "Install ``trove-classifiers`` to ensure proper validation. "
+ "Skipping download of classifiers list from PyPI (NO_NETWORK)."
+ )
+ _logger.debug(msg)
+ return True
+
+ if self.downloaded is None:
+ msg = (
+ "Install ``trove-classifiers`` to ensure proper validation. "
+ "Meanwhile a list of classifiers will be downloaded from PyPI."
+ )
+ _logger.debug(msg)
+ try:
+ self.downloaded = set(_download_classifiers().splitlines())
+ except Exception:
+ self.downloaded = False
+ _logger.debug("Problem with download, skipping validation")
+ return True
+
+ return value in self.downloaded or value.lower().startswith("private ::")
+
+
+try:
+ from trove_classifiers import classifiers as _trove_classifiers
+
+ def trove_classifier(value: str) -> bool:
+ return value in _trove_classifiers or value.lower().startswith("private ::")
+
+except ImportError: # pragma: no cover
+ trove_classifier = _TroveClassifier()
+
+
+# -------------------------------------------------------------------------------------
+# Non-PEP related
+
+
+def url(value: str) -> bool:
+ from urllib.parse import urlparse
+
+ try:
+ parts = urlparse(value)
+ if not parts.scheme:
+ _logger.warning(
+ "For maximum compatibility please make sure to include a "
+ "`scheme` prefix in your URL (e.g. 'http://'). "
+ f"Given value: {value}"
+ )
+ if not (value.startswith("/") or value.startswith("\\") or "@" in value):
+ parts = urlparse(f"http://{value}")
+
+ return bool(parts.scheme and parts.netloc)
+ except Exception:
+ return False
+
+
+# https://packaging.python.org/specifications/entry-points/
+ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
+ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
+RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
+RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
+ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
+ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
+
+
+def python_identifier(value: str) -> bool:
+ return value.isidentifier()
+
+
+def python_qualified_identifier(value: str) -> bool:
+ if value.startswith(".") or value.endswith("."):
+ return False
+ return all(python_identifier(m) for m in value.split("."))
+
+
+def python_module_name(value: str) -> bool:
+ return python_qualified_identifier(value)
+
+
+def python_entrypoint_group(value: str) -> bool:
+ return ENTRYPOINT_GROUP_REGEX.match(value) is not None
+
+
+def python_entrypoint_name(value: str) -> bool:
+ if not ENTRYPOINT_REGEX.match(value):
+ return False
+ if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
+ msg = f"Entry point `{value}` does not follow recommended pattern: "
+ msg += RECOMMEDED_ENTRYPOINT_PATTERN
+ _logger.warning(msg)
+ return True
+
+
+def python_entrypoint_reference(value: str) -> bool:
+ module, _, rest = value.partition(":")
+ if "[" in rest:
+ obj, _, extras_ = rest.partition("[")
+ if extras_.strip()[-1] != "]":
+ return False
+ extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
+ if not all(pep508_identifier(e) for e in extras):
+ return False
+ _logger.warning(f"`{value}` - using extras for entry points is not recommended")
+ else:
+ obj = rest
+
+ module_parts = module.split(".")
+ identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
+ return all(python_identifier(i.strip()) for i in identifiers)
diff --git a/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt
new file mode 100644
index 0000000..bbb0754
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata-4.11.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_metadata
diff --git a/setuptools/_vendor/importlib_metadata/__init__.py b/setuptools/_vendor/importlib_metadata/__init__.py
new file mode 100644
index 0000000..292e0c6
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/__init__.py
@@ -0,0 +1,1047 @@
+import os
+import re
+import abc
+import csv
+import sys
+from .. import zipp
+import email
+import pathlib
+import operator
+import textwrap
+import warnings
+import functools
+import itertools
+import posixpath
+import collections
+
+from . import _adapters, _meta
+from ._collections import FreezableDefaultDict, Pair
+from ._compat import (
+ NullFinder,
+ install,
+ pypy_partial,
+)
+from ._functools import method_cache, pass_none
+from ._itertools import always_iterable, unique_everseen
+from ._meta import PackageMetadata, SimplePath
+
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+from typing import List, Mapping, Optional, Union
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageMetadata',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'packages_distributions',
+ 'requires',
+ 'version',
+]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self):
+ return f"No package metadata was found for {self.name}"
+
+ @property
+ def name(self):
+ (name,) = self.args
+ return name
+
+
+class Sectioned:
+ """
+ A simple entry point config parser for performance
+
+ >>> for item in Sectioned.read(Sectioned._sample):
+ ... print(item)
+ Pair(name='sec1', value='# comments ignored')
+ Pair(name='sec1', value='a = 1')
+ Pair(name='sec1', value='b = 2')
+ Pair(name='sec2', value='a = 2')
+
+ >>> res = Sectioned.section_pairs(Sectioned._sample)
+ >>> item = next(res)
+ >>> item.name
+ 'sec1'
+ >>> item.value
+ Pair(name='a', value='1')
+ >>> item = next(res)
+ >>> item.value
+ Pair(name='b', value='2')
+ >>> item = next(res)
+ >>> item.name
+ 'sec2'
+ >>> item.value
+ Pair(name='a', value='2')
+ >>> list(res)
+ []
+ """
+
+ _sample = textwrap.dedent(
+ """
+ [sec1]
+ # comments ignored
+ a = 1
+ b = 2
+
+ [sec2]
+ a = 2
+ """
+ ).lstrip()
+
+ @classmethod
+ def section_pairs(cls, text):
+ return (
+ section._replace(value=Pair.parse(section.value))
+ for section in cls.read(text, filter_=cls.valid)
+ if section.name is not None
+ )
+
+ @staticmethod
+ def read(text, filter_=None):
+ lines = filter(filter_, map(str.strip, text.splitlines()))
+ name = None
+ for value in lines:
+ section_match = value.startswith('[') and value.endswith(']')
+ if section_match:
+ name = value.strip('[]')
+ continue
+ yield Pair(name, value)
+
+ @staticmethod
+ def valid(line):
+ return line and not line.startswith('#')
+
+
+class DeprecatedTuple:
+ """
+ Provide subscript item access for backward compatibility.
+
+ >>> recwarn = getfixture('recwarn')
+ >>> ep = EntryPoint(name='name', value='value', group='group')
+ >>> ep[:]
+ ('name', 'value', 'group')
+ >>> ep[0]
+ 'name'
+ >>> len(recwarn)
+ 1
+ """
+
+ _warn = functools.partial(
+ warnings.warn,
+ "EntryPoint tuple interface is deprecated. Access members by name.",
+ DeprecationWarning,
+ stacklevel=pypy_partial(2),
+ )
+
+ def __getitem__(self, item):
+ self._warn()
+ return self._key()[item]
+
+
+class EntryPoint(DeprecatedTuple):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+)\s*)?'
+ r'((?P<extras>\[.*\])\s*)?$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ dist: Optional['Distribution'] = None
+
+ def __init__(self, name, value, group):
+ vars(self).update(name=name, value=value, group=group)
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ def _for(self, dist):
+ vars(self).update(dist=dist)
+ return self
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints by name.
+ """
+ msg = (
+ "Construction of dict of EntryPoints is deprecated in "
+ "favor of EntryPoints."
+ )
+ warnings.warn(msg, DeprecationWarning)
+ return iter((self.name, self))
+
+ def matches(self, **params):
+ attrs = (getattr(self, param) for param in params)
+ return all(map(operator.eq, params.values(), attrs))
+
+ def _key(self):
+ return self.name, self.value, self.group
+
+ def __lt__(self, other):
+ return self._key() < other._key()
+
+ def __eq__(self, other):
+ return self._key() == other._key()
+
+ def __setattr__(self, name, value):
+ raise AttributeError("EntryPoint objects are immutable.")
+
+ def __repr__(self):
+ return (
+ f'EntryPoint(name={self.name!r}, value={self.value!r}, '
+ f'group={self.group!r})'
+ )
+
+ def __hash__(self):
+ return hash(self._key())
+
+
+class DeprecatedList(list):
+ """
+ Allow an otherwise immutable object to implement mutability
+ for compatibility.
+
+ >>> recwarn = getfixture('recwarn')
+ >>> dl = DeprecatedList(range(3))
+ >>> dl[0] = 1
+ >>> dl.append(3)
+ >>> del dl[3]
+ >>> dl.reverse()
+ >>> dl.sort()
+ >>> dl.extend([4])
+ >>> dl.pop(-1)
+ 4
+ >>> dl.remove(1)
+ >>> dl += [5]
+ >>> dl + [6]
+ [1, 2, 5, 6]
+ >>> dl + (6,)
+ [1, 2, 5, 6]
+ >>> dl.insert(0, 0)
+ >>> dl
+ [0, 1, 2, 5]
+ >>> dl == [0, 1, 2, 5]
+ True
+ >>> dl == (0, 1, 2, 5)
+ True
+ >>> len(recwarn)
+ 1
+ """
+
+ __slots__ = ()
+
+ _warn = functools.partial(
+ warnings.warn,
+ "EntryPoints list interface is deprecated. Cast to list if needed.",
+ DeprecationWarning,
+ stacklevel=pypy_partial(2),
+ )
+
+ def _wrap_deprecated_method(method_name: str): # type: ignore
+ def wrapped(self, *args, **kwargs):
+ self._warn()
+ return getattr(super(), method_name)(*args, **kwargs)
+
+ return method_name, wrapped
+
+ locals().update(
+ map(
+ _wrap_deprecated_method,
+ '__setitem__ __delitem__ append reverse extend pop remove '
+ '__iadd__ insert sort'.split(),
+ )
+ )
+
+ def __add__(self, other):
+ if not isinstance(other, tuple):
+ self._warn()
+ other = tuple(other)
+ return self.__class__(tuple(self) + other)
+
+ def __eq__(self, other):
+ if not isinstance(other, tuple):
+ self._warn()
+ other = tuple(other)
+
+ return tuple(self).__eq__(other)
+
+
+class EntryPoints(DeprecatedList):
+ """
+ An immutable collection of selectable EntryPoint objects.
+ """
+
+ __slots__ = ()
+
+ def __getitem__(self, name): # -> EntryPoint:
+ """
+ Get the EntryPoint in self matching name.
+ """
+ if isinstance(name, int):
+ warnings.warn(
+ "Accessing entry points by index is deprecated. "
+ "Cast to tuple if needed.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return super().__getitem__(name)
+ try:
+ return next(iter(self.select(name=name)))
+ except StopIteration:
+ raise KeyError(name)
+
+ def select(self, **params):
+ """
+ Select entry points from self that match the
+ given parameters (typically group and/or name).
+ """
+ return EntryPoints(ep for ep in self if ep.matches(**params))
+
+ @property
+ def names(self):
+ """
+ Return the set of all names of all entry points.
+ """
+ return {ep.name for ep in self}
+
+ @property
+ def groups(self):
+ """
+ Return the set of all groups of all entry points.
+
+ For coverage while SelectableGroups is present.
+ >>> EntryPoints().groups
+ set()
+ """
+ return {ep.group for ep in self}
+
+ @classmethod
+ def _from_text_for(cls, text, dist):
+ return cls(ep._for(dist) for ep in cls._from_text(text))
+
+ @staticmethod
+ def _from_text(text):
+ return (
+ EntryPoint(name=item.value.name, value=item.value.value, group=item.name)
+ for item in Sectioned.section_pairs(text or '')
+ )
+
+
+class Deprecated:
+ """
+ Compatibility add-in for mapping to indicate that
+ mapping behavior is deprecated.
+
+ >>> recwarn = getfixture('recwarn')
+ >>> class DeprecatedDict(Deprecated, dict): pass
+ >>> dd = DeprecatedDict(foo='bar')
+ >>> dd.get('baz', None)
+ >>> dd['foo']
+ 'bar'
+ >>> list(dd)
+ ['foo']
+ >>> list(dd.keys())
+ ['foo']
+ >>> 'foo' in dd
+ True
+ >>> list(dd.values())
+ ['bar']
+ >>> len(recwarn)
+ 1
+ """
+
+ _warn = functools.partial(
+ warnings.warn,
+ "SelectableGroups dict interface is deprecated. Use select.",
+ DeprecationWarning,
+ stacklevel=pypy_partial(2),
+ )
+
+ def __getitem__(self, name):
+ self._warn()
+ return super().__getitem__(name)
+
+ def get(self, name, default=None):
+ self._warn()
+ return super().get(name, default)
+
+ def __iter__(self):
+ self._warn()
+ return super().__iter__()
+
+ def __contains__(self, *args):
+ self._warn()
+ return super().__contains__(*args)
+
+ def keys(self):
+ self._warn()
+ return super().keys()
+
+ def values(self):
+ self._warn()
+ return super().values()
+
+
+class SelectableGroups(Deprecated, dict):
+ """
+ A backward- and forward-compatible result from
+ entry_points that fully implements the dict interface.
+ """
+
+ @classmethod
+ def load(cls, eps):
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return cls((group, EntryPoints(eps)) for group, eps in grouped)
+
+ @property
+ def _all(self):
+ """
+ Reconstruct a list of all entrypoints from the groups.
+ """
+ groups = super(Deprecated, self).values()
+ return EntryPoints(itertools.chain.from_iterable(groups))
+
+ @property
+ def groups(self):
+ return self._all.groups
+
+ @property
+ def names(self):
+ """
+ for coverage:
+ >>> SelectableGroups().names
+ set()
+ """
+ return self._all.names
+
+ def select(self, **params):
+ if not params:
+ return self
+ return self._all.select(**params)
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return f'<FileHash mode: {self.mode} value: {self.value}>'
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context) for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None) for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @property
+ def metadata(self) -> _meta.PackageMetadata:
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return _adapters.Message(email.message_from_string(text))
+
+ @property
+ def name(self):
+ """Return the 'Name' metadata for the distribution package."""
+ return self.metadata['Name']
+
+ @property
+ def _normalized_name(self):
+ """Return a normalized version of the name."""
+ return Prepared.normalize(self.name)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ @pass_none
+ def make_files(lines):
+ return list(starmap(make_file, csv.reader(lines)))
+
+ return make_files(self._read_files_distinfo() or self._read_files_egginfo())
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return pass_none(self._deps_from_requires_text)(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+
+ def make_condition(name):
+ return name and f'extra == "{name}"'
+
+ def quoted_marker(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = f'({markers})'
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ def url_req_space(req):
+ """
+ PEP 508 requires a space between the url_spec and the quoted_marker.
+ Ref python/importlib_metadata#357.
+ """
+ # '@' is uniquely indicative of a url_req.
+ return ' ' * ('@' in req)
+
+ for section in sections:
+ space = url_req_space(section.value)
+ yield section.value + space + quoted_marker(section.name)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The sequence of directory path that a distribution finder
+ should search.
+
+ Typically refers to Python installed package paths such as
+ "site-packages" directories and defaults to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+
+ >>> FastPath('').children()
+ ['...']
+ """
+
+ @functools.lru_cache() # type: ignore
+ def __new__(cls, root):
+ return super().__new__(cls)
+
+ def __init__(self, root):
+ self.root = str(root)
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '.')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
+
+ def search(self, name):
+ return self.lookup(self.mtime).search(name)
+
+ @property
+ def mtime(self):
+ with suppress(OSError):
+ return os.stat(self.root).st_mtime
+ self.lookup.cache_clear()
+
+ @method_cache
+ def lookup(self, mtime):
+ return Lookup(self)
+
+
+class Lookup:
+ def __init__(self, path: FastPath):
+ base = os.path.basename(path.root).lower()
+ base_is_egg = base.endswith(".egg")
+ self.infos = FreezableDefaultDict(list)
+ self.eggs = FreezableDefaultDict(list)
+
+ for child in path.children():
+ low = child.lower()
+ if low.endswith((".dist-info", ".egg-info")):
+ # rpartition is faster than splitext and suitable for this purpose.
+ name = low.rpartition(".")[0].partition("-")[0]
+ normalized = Prepared.normalize(name)
+ self.infos[normalized].append(path.joinpath(child))
+ elif base_is_egg and low == "egg-info":
+ name = base.rpartition(".")[0].partition("-")[0]
+ legacy_normalized = Prepared.legacy_normalize(name)
+ self.eggs[legacy_normalized].append(path.joinpath(child))
+
+ self.infos.freeze()
+ self.eggs.freeze()
+
+ def search(self, prepared):
+ infos = (
+ self.infos[prepared.normalized]
+ if prepared
+ else itertools.chain.from_iterable(self.infos.values())
+ )
+ eggs = (
+ self.eggs[prepared.legacy_normalized]
+ if prepared
+ else itertools.chain.from_iterable(self.eggs.values())
+ )
+ return itertools.chain(infos, eggs)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+
+ normalized = None
+ legacy_normalized = None
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = self.normalize(name)
+ self.legacy_normalized = self.legacy_normalize(name)
+
+ @staticmethod
+ def normalize(name):
+ """
+ PEP 503 normalization plus dashes as underscores.
+ """
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
+
+ @staticmethod
+ def legacy_normalize(name):
+ """
+ Normalize the package name as found in the convention in
+ older packaging tools versions and specs.
+ """
+ return name.lower().replace('-', '_')
+
+ def __bool__(self):
+ return bool(self.name)
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(self, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ prepared = Prepared(name)
+ return itertools.chain.from_iterable(
+ path.search(prepared) for path in map(FastPath, paths)
+ )
+
+ def invalidate_caches(cls):
+ FastPath.__new__.cache_clear()
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path: SimplePath):
+ """Construct a distribution.
+
+ :param path: SimplePath indicating the metadata directory.
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(
+ FileNotFoundError,
+ IsADirectoryError,
+ KeyError,
+ NotADirectoryError,
+ PermissionError,
+ ):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+ @property
+ def _normalized_name(self):
+ """
+ Performance optimization: where possible, resolve the
+ normalized name from the file system path.
+ """
+ stem = os.path.basename(str(self._path))
+ return self._name_from_stem(stem) or super()._normalized_name
+
+ def _name_from_stem(self, stem):
+ name, ext = os.path.splitext(stem)
+ if ext not in ('.dist-info', '.egg-info'):
+ return
+ name, sep, rest = stem.partition('-')
+ return name
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name) -> _meta.PackageMetadata:
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: A PackageMetadata containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points(**params) -> Union[EntryPoints, SelectableGroups]:
+ """Return EntryPoint objects for all installed packages.
+
+ Pass selection parameters (group or name) to filter the
+ result to entry points matching those properties (see
+ EntryPoints.select()).
+
+ For compatibility, returns ``SelectableGroups`` object unless
+ selection parameters are supplied. In the future, this function
+ will return ``EntryPoints`` instead of ``SelectableGroups``
+ even when no selection parameters are supplied.
+
+ For maximum future compatibility, pass selection parameters
+ or invoke ``.select`` with parameters on the result.
+
+ :return: EntryPoints or SelectableGroups for all installed packages.
+ """
+ norm_name = operator.attrgetter('_normalized_name')
+ unique = functools.partial(unique_everseen, key=norm_name)
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in unique(distributions())
+ )
+ return SelectableGroups.load(eps).select(**params)
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
+
+
+def packages_distributions() -> Mapping[str, List[str]]:
+ """
+ Return a mapping of top-level packages to their
+ distributions.
+
+ >>> import collections.abc
+ >>> pkgs = packages_distributions()
+ >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values())
+ True
+ """
+ pkg_to_dist = collections.defaultdict(list)
+ for dist in distributions():
+ for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
+ pkg_to_dist[pkg].append(dist.metadata['Name'])
+ return dict(pkg_to_dist)
+
+
+def _top_level_declared(dist):
+ return (dist.read_text('top_level.txt') or '').split()
+
+
+def _top_level_inferred(dist):
+ return {
+ f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
+ for f in always_iterable(dist.files)
+ if f.suffix == ".py"
+ }
diff --git a/setuptools/_vendor/importlib_metadata/_adapters.py b/setuptools/_vendor/importlib_metadata/_adapters.py
new file mode 100644
index 0000000..aa460d3
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_adapters.py
@@ -0,0 +1,68 @@
+import re
+import textwrap
+import email.message
+
+from ._text import FoldedCase
+
+
+class Message(email.message.Message):
+ multiple_use_keys = set(
+ map(
+ FoldedCase,
+ [
+ 'Classifier',
+ 'Obsoletes-Dist',
+ 'Platform',
+ 'Project-URL',
+ 'Provides-Dist',
+ 'Provides-Extra',
+ 'Requires-Dist',
+ 'Requires-External',
+ 'Supported-Platform',
+ 'Dynamic',
+ ],
+ )
+ )
+ """
+ Keys that may be indicated multiple times per PEP 566.
+ """
+
+ def __new__(cls, orig: email.message.Message):
+ res = super().__new__(cls)
+ vars(res).update(vars(orig))
+ return res
+
+ def __init__(self, *args, **kwargs):
+ self._headers = self._repair_headers()
+
+ # suppress spurious error from mypy
+ def __iter__(self):
+ return super().__iter__()
+
+ def _repair_headers(self):
+ def redent(value):
+ "Correct for RFC822 indentation"
+ if not value or '\n' not in value:
+ return value
+ return textwrap.dedent(' ' * 8 + value)
+
+ headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
+ if self._payload:
+ headers.append(('Description', self.get_payload()))
+ return headers
+
+ @property
+ def json(self):
+ """
+ Convert PackageMetadata to a JSON-compatible format
+ per PEP 0566.
+ """
+
+ def transform(key):
+ value = self.get_all(key) if key in self.multiple_use_keys else self[key]
+ if key == 'Keywords':
+ value = re.split(r'\s+', value)
+ tk = key.lower().replace('-', '_')
+ return tk, value
+
+ return dict(map(transform, map(FoldedCase, self)))
diff --git a/setuptools/_vendor/importlib_metadata/_collections.py b/setuptools/_vendor/importlib_metadata/_collections.py
new file mode 100644
index 0000000..cf0954e
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_collections.py
@@ -0,0 +1,30 @@
+import collections
+
+
+# from jaraco.collections 3.3
+class FreezableDefaultDict(collections.defaultdict):
+ """
+ Often it is desirable to prevent the mutation of
+ a default dict after its initial construction, such
+ as to prevent mutation during iteration.
+
+ >>> dd = FreezableDefaultDict(list)
+ >>> dd[0].append('1')
+ >>> dd.freeze()
+ >>> dd[1]
+ []
+ >>> len(dd)
+ 1
+ """
+
+ def __missing__(self, key):
+ return getattr(self, '_frozen', super().__missing__)(key)
+
+ def freeze(self):
+ self._frozen = lambda key: self.default_factory()
+
+
+class Pair(collections.namedtuple('Pair', 'name value')):
+ @classmethod
+ def parse(cls, text):
+ return cls(*map(str.strip, text.split("=", 1)))
diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py
new file mode 100644
index 0000000..ef3136f
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_compat.py
@@ -0,0 +1,71 @@
+import sys
+import platform
+
+
+__all__ = ['install', 'NullFinder', 'Protocol']
+
+
+try:
+ from typing import Protocol
+except ImportError: # pragma: no cover
+ from ..typing_extensions import Protocol # type: ignore
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+
+ def matches(finder):
+ return getattr(
+ finder, '__module__', None
+ ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
+
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+ # In Python 2, the import system requires finders
+ # to have a find_module() method, but this usage
+ # is deprecated in Python 3 in favor of find_spec().
+ # For the purposes of this finder (i.e. being present
+ # on sys.meta_path but having no other import
+ # system functionality), the two methods are identical.
+ find_module = find_spec
+
+
+def pypy_partial(val):
+ """
+ Adjust for variable stacklevel on partial under PyPy.
+
+ Workaround for #327.
+ """
+ is_pypy = platform.python_implementation() == 'PyPy'
+ return val + is_pypy
diff --git a/setuptools/_vendor/importlib_metadata/_functools.py b/setuptools/_vendor/importlib_metadata/_functools.py
new file mode 100644
index 0000000..71f66bd
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_functools.py
@@ -0,0 +1,104 @@
+import types
+import functools
+
+
+# from jaraco.functools 3.3
+def method_cache(method, cache_wrapper=None):
+ """
+ Wrap lru_cache to support storing the cache data in the object instances.
+
+ Abstracts the common paradigm where the method explicitly saves an
+ underscore-prefixed protected property on first call and returns that
+ subsequently.
+
+ >>> class MyClass:
+ ... calls = 0
+ ...
+ ... @method_cache
+ ... def method(self, value):
+ ... self.calls += 1
+ ... return value
+
+ >>> a = MyClass()
+ >>> a.method(3)
+ 3
+ >>> for x in range(75):
+ ... res = a.method(x)
+ >>> a.calls
+ 75
+
+ Note that the apparent behavior will be exactly like that of lru_cache
+ except that the cache is stored on each instance, so values in one
+ instance will not flush values from another, and when an instance is
+ deleted, so are the cached values for that instance.
+
+ >>> b = MyClass()
+ >>> for x in range(35):
+ ... res = b.method(x)
+ >>> b.calls
+ 35
+ >>> a.method(0)
+ 0
+ >>> a.calls
+ 75
+
+ Note that if method had been decorated with ``functools.lru_cache()``,
+ a.calls would have been 76 (due to the cached value of 0 having been
+ flushed by the 'b' instance).
+
+ Clear the cache with ``.cache_clear()``
+
+ >>> a.method.cache_clear()
+
+ Same for a method that hasn't yet been called.
+
+ >>> c = MyClass()
+ >>> c.method.cache_clear()
+
+ Another cache wrapper may be supplied:
+
+ >>> cache = functools.lru_cache(maxsize=2)
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
+ >>> a = MyClass()
+ >>> a.method2()
+ 3
+
+ Caution - do not subsequently wrap the method with another decorator, such
+ as ``@property``, which changes the semantics of the function.
+
+ See also
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
+ for another implementation and additional justification.
+ """
+ cache_wrapper = cache_wrapper or functools.lru_cache()
+
+ def wrapper(self, *args, **kwargs):
+ # it's the first call, replace the method with a cached, bound method
+ bound_method = types.MethodType(method, self)
+ cached_method = cache_wrapper(bound_method)
+ setattr(self, method.__name__, cached_method)
+ return cached_method(*args, **kwargs)
+
+ # Support cache clear even before cache has been created.
+ wrapper.cache_clear = lambda: None
+
+ return wrapper
+
+
+# From jaraco.functools 3.3
+def pass_none(func):
+ """
+ Wrap func so it's not called if its first param is None
+
+ >>> print_text = pass_none(print)
+ >>> print_text('text')
+ text
+ >>> print_text(None)
+ """
+
+ @functools.wraps(func)
+ def wrapper(param, *args, **kwargs):
+ if param is not None:
+ return func(param, *args, **kwargs)
+
+ return wrapper
diff --git a/setuptools/_vendor/importlib_metadata/_itertools.py b/setuptools/_vendor/importlib_metadata/_itertools.py
new file mode 100644
index 0000000..d4ca9b9
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_itertools.py
@@ -0,0 +1,73 @@
+from itertools import filterfalse
+
+
+def unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+# copied from more_itertools 8.8
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
diff --git a/setuptools/_vendor/importlib_metadata/_meta.py b/setuptools/_vendor/importlib_metadata/_meta.py
new file mode 100644
index 0000000..37ee43e
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_meta.py
@@ -0,0 +1,48 @@
+from ._compat import Protocol
+from typing import Any, Dict, Iterator, List, TypeVar, Union
+
+
+_T = TypeVar("_T")
+
+
+class PackageMetadata(Protocol):
+ def __len__(self) -> int:
+ ... # pragma: no cover
+
+ def __contains__(self, item: str) -> bool:
+ ... # pragma: no cover
+
+ def __getitem__(self, key: str) -> str:
+ ... # pragma: no cover
+
+ def __iter__(self) -> Iterator[str]:
+ ... # pragma: no cover
+
+ def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
+ """
+ Return all values associated with a possibly multi-valued key.
+ """
+
+ @property
+ def json(self) -> Dict[str, Union[str, List[str]]]:
+ """
+ A JSON-compatible form of the metadata.
+ """
+
+
+class SimplePath(Protocol):
+ """
+ A minimal subset of pathlib.Path required by PathDistribution.
+ """
+
+ def joinpath(self) -> 'SimplePath':
+ ... # pragma: no cover
+
+ def __truediv__(self) -> 'SimplePath':
+ ... # pragma: no cover
+
+ def parent(self) -> 'SimplePath':
+ ... # pragma: no cover
+
+ def read_text(self) -> str:
+ ... # pragma: no cover
diff --git a/setuptools/_vendor/importlib_metadata/_text.py b/setuptools/_vendor/importlib_metadata/_text.py
new file mode 100644
index 0000000..c88cfbb
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/_text.py
@@ -0,0 +1,99 @@
+import re
+
+from ._functools import method_cache
+
+
+# from jaraco.text 3.5
+class FoldedCase(str):
+ """
+ A case insensitive string class; behaves just like str
+ except compares equal when the only variation is case.
+
+ >>> s = FoldedCase('hello world')
+
+ >>> s == 'Hello World'
+ True
+
+ >>> 'Hello World' == s
+ True
+
+ >>> s != 'Hello World'
+ False
+
+ >>> s.index('O')
+ 4
+
+ >>> s.split('O')
+ ['hell', ' w', 'rld']
+
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
+ ['alpha', 'Beta', 'GAMMA']
+
+ Sequence membership is straightforward.
+
+ >>> "Hello World" in [s]
+ True
+ >>> s in ["Hello World"]
+ True
+
+ You may test for set inclusion, but candidate and elements
+ must both be folded.
+
+ >>> FoldedCase("Hello World") in {s}
+ True
+ >>> s in {FoldedCase("Hello World")}
+ True
+
+ String inclusion works as long as the FoldedCase object
+ is on the right.
+
+ >>> "hello" in FoldedCase("Hello World")
+ True
+
+ But not if the FoldedCase object is on the left:
+
+ >>> FoldedCase('hello') in 'Hello World'
+ False
+
+ In that case, use in_:
+
+ >>> FoldedCase('hello').in_('Hello World')
+ True
+
+ >>> FoldedCase('hello') > FoldedCase('Hello')
+ False
+ """
+
+ def __lt__(self, other):
+ return self.lower() < other.lower()
+
+ def __gt__(self, other):
+ return self.lower() > other.lower()
+
+ def __eq__(self, other):
+ return self.lower() == other.lower()
+
+ def __ne__(self, other):
+ return self.lower() != other.lower()
+
+ def __hash__(self):
+ return hash(self.lower())
+
+ def __contains__(self, other):
+ return super().lower().__contains__(other.lower())
+
+ def in_(self, other):
+ "Does self appear in other?"
+ return self in FoldedCase(other)
+
+ # cache lower since it's likely to be called frequently.
+ @method_cache
+ def lower(self):
+ return super().lower()
+
+ def index(self, sub):
+ return self.lower().index(sub.lower())
+
+ def split(self, splitter=' ', maxsplit=0):
+ pattern = re.compile(re.escape(splitter), re.I)
+ return pattern.split(self, maxsplit)
diff --git a/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000..58ad1bd
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources-5.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/setuptools/_vendor/importlib_resources/__init__.py b/setuptools/_vendor/importlib_resources/__init__.py
new file mode 100644
index 0000000..34e3a99
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/__init__.py
@@ -0,0 +1,36 @@
+"""Read resources contained within a package."""
+
+from ._common import (
+ as_file,
+ files,
+ Package,
+)
+
+from ._legacy import (
+ contents,
+ open_binary,
+ read_binary,
+ open_text,
+ read_text,
+ is_resource,
+ path,
+ Resource,
+)
+
+from .abc import ResourceReader
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'ResourceReader',
+ 'as_file',
+ 'contents',
+ 'files',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+]
diff --git a/setuptools/_vendor/importlib_resources/_adapters.py b/setuptools/_vendor/importlib_resources/_adapters.py
new file mode 100644
index 0000000..ea363d8
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/_adapters.py
@@ -0,0 +1,170 @@
+from contextlib import suppress
+from io import TextIOWrapper
+
+from . import abc
+
+
+class SpecLoaderAdapter:
+ """
+ Adapt a package spec to adapt the underlying loader.
+ """
+
+ def __init__(self, spec, adapter=lambda spec: spec.loader):
+ self.spec = spec
+ self.loader = adapter(spec)
+
+ def __getattr__(self, name):
+ return getattr(self.spec, name)
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt a loader to provide TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ def get_resource_reader(self, name):
+ return CompatibilityFiles(self.spec)._native()
+
+
+def _io_wrapper(file, mode='r', *args, **kwargs):
+ if mode == 'r':
+ return TextIOWrapper(file, *args, **kwargs)
+ elif mode == 'rb':
+ return file
+ raise ValueError(
+ "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
+ )
+
+
+class CompatibilityFiles:
+ """
+ Adapter for an existing or non-existent resource reader
+ to provide a compatibility .files().
+ """
+
+ class SpecPath(abc.Traversable):
+ """
+ Path tied to a module spec.
+ Can be read and exposes the resource reader children.
+ """
+
+ def __init__(self, spec, reader):
+ self._spec = spec
+ self._reader = reader
+
+ def iterdir(self):
+ if not self._reader:
+ return iter(())
+ return iter(
+ CompatibilityFiles.ChildPath(self._reader, path)
+ for path in self._reader.contents()
+ )
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ if not self._reader:
+ return CompatibilityFiles.OrphanPath(other)
+ return CompatibilityFiles.ChildPath(self._reader, other)
+
+ @property
+ def name(self):
+ return self._spec.name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
+
+ class ChildPath(abc.Traversable):
+ """
+ Path tied to a resource reader child.
+ Can be read but doesn't expose any meaningful children.
+ """
+
+ def __init__(self, reader, name):
+ self._reader = reader
+ self._name = name
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return self._reader.is_resource(self.name)
+
+ def is_dir(self):
+ return not self.is_file()
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(self.name, other)
+
+ @property
+ def name(self):
+ return self._name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(
+ self._reader.open_resource(self.name), mode, *args, **kwargs
+ )
+
+ class OrphanPath(abc.Traversable):
+ """
+ Orphan path, not tied to a module spec or resource reader.
+ Can't be read and doesn't expose any meaningful children.
+ """
+
+ def __init__(self, *path_parts):
+ if len(path_parts) < 1:
+ raise ValueError('Need at least one path part to construct a path')
+ self._path = path_parts
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(*self._path, other)
+
+ @property
+ def name(self):
+ return self._path[-1]
+
+ def open(self, mode='r', *args, **kwargs):
+ raise FileNotFoundError("Can't open orphan path")
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def _reader(self):
+ with suppress(AttributeError):
+ return self.spec.loader.get_resource_reader(self.spec.name)
+
+ def _native(self):
+ """
+ Return the native reader if it supports files().
+ """
+ reader = self._reader
+ return reader if hasattr(reader, 'files') else self
+
+ def __getattr__(self, attr):
+ return getattr(self._reader, attr)
+
+ def files(self):
+ return CompatibilityFiles.SpecPath(self.spec, self._reader)
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+ """
+ return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/setuptools/_vendor/importlib_resources/_common.py b/setuptools/_vendor/importlib_resources/_common.py
new file mode 100644
index 0000000..a12e2c7
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/_common.py
@@ -0,0 +1,104 @@
+import os
+import pathlib
+import tempfile
+import functools
+import contextlib
+import types
+import importlib
+
+from typing import Union, Optional
+from .abc import ResourceReader, Traversable
+
+from ._compat import wrap_spec
+
+Package = Union[types.ModuleType, str]
+
+
+def files(package):
+ # type: (Package) -> Traversable
+ """
+ Get a Traversable resource from a package
+ """
+ return from_package(get_package(package))
+
+
+def get_resource_reader(package):
+ # type: (types.ModuleType) -> Optional[ResourceReader]
+ """
+ Return the package's loader if it's a ResourceReader.
+ """
+ # We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
+ if reader is None:
+ return None
+ return reader(spec.name) # type: ignore
+
+
+def resolve(cand):
+ # type: (Package) -> types.ModuleType
+ return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand)
+
+
+def get_package(package):
+ # type: (Package) -> types.ModuleType
+ """Take a package name or module object and return the module.
+
+ Raise an exception if the resolved module is not a package.
+ """
+ resolved = resolve(package)
+ if wrap_spec(resolved).submodule_search_locations is None:
+ raise TypeError(f'{package!r} is not a package')
+ return resolved
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ spec = wrap_spec(package)
+ reader = spec.loader.get_resource_reader(spec.name)
+ return reader.files()
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ try:
+ os.write(fd, reader())
+ finally:
+ os.close(fd)
+ del reader
+ yield pathlib.Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@functools.singledispatch
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ return _tempfile(path.read_bytes, suffix=path.name)
+
+
+@as_file.register(pathlib.Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/setuptools/_vendor/importlib_resources/_compat.py b/setuptools/_vendor/importlib_resources/_compat.py
new file mode 100644
index 0000000..cb9fc82
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/_compat.py
@@ -0,0 +1,98 @@
+# flake8: noqa
+
+import abc
+import sys
+import pathlib
+from contextlib import suppress
+
+if sys.version_info >= (3, 10):
+ from zipfile import Path as ZipPath # type: ignore
+else:
+ from ..zipp import Path as ZipPath # type: ignore
+
+
+try:
+ from typing import runtime_checkable # type: ignore
+except ImportError:
+
+ def runtime_checkable(cls): # type: ignore
+ return cls
+
+
+try:
+ from typing import Protocol # type: ignore
+except ImportError:
+ Protocol = abc.ABC # type: ignore
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt loaders to provide TraversableResources and other
+ compatibility.
+
+ Used primarily for Python 3.9 and earlier where the native
+ loaders do not yet implement TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def path(self):
+ return self.spec.origin
+
+ def get_resource_reader(self, name):
+ from . import readers, _adapters
+
+ def _zip_reader(spec):
+ with suppress(AttributeError):
+ return readers.ZipReader(spec.loader, spec.name)
+
+ def _namespace_reader(spec):
+ with suppress(AttributeError, ValueError):
+ return readers.NamespaceReader(spec.submodule_search_locations)
+
+ def _available_reader(spec):
+ with suppress(AttributeError):
+ return spec.loader.get_resource_reader(spec.name)
+
+ def _native_reader(spec):
+ reader = _available_reader(spec)
+ return reader if hasattr(reader, 'files') else None
+
+ def _file_reader(spec):
+ try:
+ path = pathlib.Path(self.path)
+ except TypeError:
+ return None
+ if path.exists():
+ return readers.FileReader(self)
+
+ return (
+ # native reader if it supplies 'files'
+ _native_reader(self.spec)
+ or
+ # local ZipReader if a zip module
+ _zip_reader(self.spec)
+ or
+ # local NamespaceReader if a namespace module
+ _namespace_reader(self.spec)
+ or
+ # local FileReader
+ _file_reader(self.spec)
+ # fallback - adapt the spec ResourceReader to TraversableReader
+ or _adapters.CompatibilityFiles(self.spec)
+ )
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+
+ Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
+ from above for older Python compatibility (<3.10).
+ """
+ from . import _adapters
+
+ return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/setuptools/_vendor/importlib_resources/_itertools.py b/setuptools/_vendor/importlib_resources/_itertools.py
new file mode 100644
index 0000000..cce0558
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/_itertools.py
@@ -0,0 +1,35 @@
+from itertools import filterfalse
+
+from typing import (
+ Callable,
+ Iterable,
+ Iterator,
+ Optional,
+ Set,
+ TypeVar,
+ Union,
+)
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_U = TypeVar('_U')
+
+
+def unique_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
+) -> Iterator[_T]:
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen: Set[Union[_T, _U]] = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/setuptools/_vendor/importlib_resources/_legacy.py b/setuptools/_vendor/importlib_resources/_legacy.py
new file mode 100644
index 0000000..1d5d3f1
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/_legacy.py
@@ -0,0 +1,121 @@
+import functools
+import os
+import pathlib
+import types
+import warnings
+
+from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
+
+from . import _common
+
+Package = Union[types.ModuleType, str]
+Resource = str
+
+
+def deprecated(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ warnings.warn(
+ f"{func.__name__} is deprecated. Use files() instead. "
+ "Refer to https://importlib-resources.readthedocs.io"
+ "/en/latest/using.html#migrating-from-legacy for migration advice.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def normalize_path(path):
+ # type: (Any) -> str
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError(f'{path!r} must be only a file name')
+ return file_name
+
+
+@deprecated
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open('rb')
+
+
+@deprecated
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ return (_common.files(package) / normalize_path(resource)).read_bytes()
+
+
+@deprecated
+def open_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open(
+ 'r', encoding=encoding, errors=errors
+ )
+
+
+@deprecated
+def read_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@deprecated
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ return [path.name for path in _common.files(package).iterdir()]
+
+
+@deprecated
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ resource = normalize_path(name)
+ return any(
+ traversable.name == resource and traversable.is_file()
+ for traversable in _common.files(package).iterdir()
+ )
+
+
+@deprecated
+def path(
+ package: Package,
+ resource: Resource,
+) -> ContextManager[pathlib.Path]:
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ return _common.as_file(_common.files(package) / normalize_path(resource))
diff --git a/setuptools/_vendor/importlib_resources/abc.py b/setuptools/_vendor/importlib_resources/abc.py
new file mode 100644
index 0000000..d39dc1a
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/abc.py
@@ -0,0 +1,137 @@
+import abc
+from typing import BinaryIO, Iterable, Text
+
+from ._compat import runtime_checkable, Protocol
+
+
+class ResourceReader(metaclass=abc.ABCMeta):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abc.abstractmethod
+ def open_resource(self, resource: Text) -> BinaryIO:
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource: Text) -> Text:
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, path: Text) -> bool:
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self) -> Iterable[str]:
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as text
+ """
+ with self.open(encoding=encoding) as strm:
+ return strm.read()
+
+ @abc.abstractmethod
+ def is_dir(self) -> bool:
+ """
+ Return True if self is a directory
+ """
+
+ @abc.abstractmethod
+ def is_file(self) -> bool:
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+ return self.joinpath(child)
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self) -> str:
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ """
+ The required interface for providing traversable
+ resources.
+ """
+
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).is_file()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/setuptools/_vendor/importlib_resources/readers.py b/setuptools/_vendor/importlib_resources/readers.py
new file mode 100644
index 0000000..f1190ca
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/readers.py
@@ -0,0 +1,122 @@
+import collections
+import pathlib
+import operator
+
+from . import abc
+
+from ._itertools import unique_everseen
+from ._compat import ZipPath
+
+
+def remove_duplicates(items):
+ return iter(collections.OrderedDict.fromkeys(items))
+
+
+class FileReader(abc.TraversableResources):
+ def __init__(self, loader):
+ self.path = pathlib.Path(loader.path).parent
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
+
+
+class ZipReader(abc.TraversableResources):
+ def __init__(self, loader, module):
+ _, _, name = module.rpartition('.')
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
+ self.archive = loader.archive
+
+ def open_resource(self, resource):
+ try:
+ return super().open_resource(resource)
+ except KeyError as exc:
+ raise FileNotFoundError(exc.args[0])
+
+ def is_resource(self, path):
+ # workaround for `zipfile.Path.is_file` returning true
+ # for non-existent paths.
+ target = self.files().joinpath(path)
+ return target.is_file() and target.exists()
+
+ def files(self):
+ return ZipPath(self.archive, self.prefix)
+
+
+class MultiplexedPath(abc.Traversable):
+ """
+ Given a series of Traversable objects, implement a merged
+ version of the interface across all objects. Useful for
+ namespace packages which may be multihomed at a single
+ name.
+ """
+
+ def __init__(self, *paths):
+ self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
+ if not self._paths:
+ message = 'MultiplexedPath must contain at least one path'
+ raise FileNotFoundError(message)
+ if not all(path.is_dir() for path in self._paths):
+ raise NotADirectoryError('MultiplexedPath only supports directories')
+
+ def iterdir(self):
+ files = (file for path in self._paths for file in path.iterdir())
+ return unique_everseen(files, key=operator.attrgetter('name'))
+
+ def read_bytes(self):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def read_text(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def joinpath(self, child):
+ # first try to find child in current paths
+ for file in self.iterdir():
+ if file.name == child:
+ return file
+ # if it does not exist, construct it with the first path
+ return self._paths[0] / child
+
+ __truediv__ = joinpath
+
+ def open(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ @property
+ def name(self):
+ return self._paths[0].name
+
+ def __repr__(self):
+ paths = ', '.join(f"'{path}'" for path in self._paths)
+ return f'MultiplexedPath({paths})'
+
+
+class NamespaceReader(abc.TraversableResources):
+ def __init__(self, namespace_path):
+ if 'NamespacePath' not in str(namespace_path):
+ raise ValueError('Invalid path')
+ self.path = MultiplexedPath(*list(namespace_path))
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
diff --git a/setuptools/_vendor/importlib_resources/simple.py b/setuptools/_vendor/importlib_resources/simple.py
new file mode 100644
index 0000000..da073cb
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/simple.py
@@ -0,0 +1,116 @@
+"""
+Interface adapters for low-level readers.
+"""
+
+import abc
+import io
+import itertools
+from typing import BinaryIO, List
+
+from .abc import Traversable, TraversableResources
+
+
+class SimpleReader(abc.ABC):
+ """
+ The minimum, low-level interface required from a resource
+ provider.
+ """
+
+ @abc.abstractproperty
+ def package(self):
+ # type: () -> str
+ """
+ The name of the package for which this reader loads resources.
+ """
+
+ @abc.abstractmethod
+ def children(self):
+ # type: () -> List['SimpleReader']
+ """
+ Obtain an iterable of SimpleReader for available
+ child containers (e.g. directories).
+ """
+
+ @abc.abstractmethod
+ def resources(self):
+ # type: () -> List[str]
+ """
+ Obtain available named resources for this virtual package.
+ """
+
+ @abc.abstractmethod
+ def open_binary(self, resource):
+ # type: (str) -> BinaryIO
+ """
+ Obtain a File-like for a named resource.
+ """
+
+ @property
+ def name(self):
+ return self.package.split('.')[-1]
+
+
+class ResourceHandle(Traversable):
+ """
+ Handle to a named resource in a ResourceReader.
+ """
+
+ def __init__(self, parent, name):
+ # type: (ResourceContainer, str) -> None
+ self.parent = parent
+ self.name = name # type: ignore
+
+ def is_file(self):
+ return True
+
+ def is_dir(self):
+ return False
+
+ def open(self, mode='r', *args, **kwargs):
+ stream = self.parent.reader.open_binary(self.name)
+ if 'b' not in mode:
+ stream = io.TextIOWrapper(*args, **kwargs)
+ return stream
+
+ def joinpath(self, name):
+ raise RuntimeError("Cannot traverse into a resource")
+
+
+class ResourceContainer(Traversable):
+ """
+ Traversable container for a package's resources via its reader.
+ """
+
+ def __init__(self, reader):
+ # type: (SimpleReader) -> None
+ self.reader = reader
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def iterdir(self):
+ files = (ResourceHandle(self, name) for name in self.reader.resources)
+ dirs = map(ResourceContainer, self.reader.children())
+ return itertools.chain(files, dirs)
+
+ def open(self, *args, **kwargs):
+ raise IsADirectoryError()
+
+ def joinpath(self, name):
+ return next(
+ traversable for traversable in self.iterdir() if traversable.name == name
+ )
+
+
+class TraversableReader(TraversableResources, SimpleReader):
+ """
+ A TraversableResources based on SimpleReader. Resource providers
+ may derive from this class to provide the TraversableResources
+ interface by supplying the SimpleReader interface.
+ """
+
+ def files(self):
+ return ResourceContainer(self)
diff --git a/setuptools/_vendor/importlib_resources/tests/__init__.py b/setuptools/_vendor/importlib_resources/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/_compat.py b/setuptools/_vendor/importlib_resources/tests/_compat.py
new file mode 100644
index 0000000..4c99cff
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/_compat.py
@@ -0,0 +1,19 @@
+import os
+
+
+try:
+ from test.support import import_helper # type: ignore
+except ImportError:
+ # Python 3.9 and earlier
+ class import_helper: # type: ignore
+ from test.support import modules_setup, modules_cleanup
+
+
+try:
+ # Python 3.10
+ from test.support.os_helper import unlink
+except ImportError:
+ from test.support import unlink as _unlink
+
+ def unlink(target):
+ return _unlink(os.fspath(target))
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data01/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt b/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
new file mode 100644
index 0000000..61a813e
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
@@ -0,0 +1 @@
+one resource
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt b/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
new file mode 100644
index 0000000..a80ce46
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
@@ -0,0 +1 @@
+two resource
diff --git a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
new file mode 100644
index 0000000..d92c7c5
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
@@ -0,0 +1,102 @@
+import io
+import unittest
+
+import importlib_resources as resources
+
+from importlib_resources._adapters import (
+ CompatibilityFiles,
+ wrap_spec,
+)
+
+from . import util
+
+
+class CompatibilityFilesTests(unittest.TestCase):
+ @property
+ def package(self):
+ bytes_data = io.BytesIO(b'Hello, world!')
+ return util.create_package(
+ file=bytes_data,
+ path='some_path',
+ contents=('a', 'b', 'c'),
+ )
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_iter(self):
+ self.assertEqual(
+ sorted(path.name for path in self.files.iterdir()),
+ ['a', 'b', 'c'],
+ )
+
+ def test_child_path_iter(self):
+ self.assertEqual(list((self.files / 'a').iterdir()), [])
+
+ def test_orphan_path_iter(self):
+ self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
+ self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
+
+ def test_spec_path_is(self):
+ self.assertFalse(self.files.is_file())
+ self.assertFalse(self.files.is_dir())
+
+ def test_child_path_is(self):
+ self.assertTrue((self.files / 'a').is_file())
+ self.assertFalse((self.files / 'a').is_dir())
+
+ def test_orphan_path_is(self):
+ self.assertFalse((self.files / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a').is_dir())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
+
+ def test_spec_path_name(self):
+ self.assertEqual(self.files.name, 'testingpackage')
+
+ def test_child_path_name(self):
+ self.assertEqual((self.files / 'a').name, 'a')
+
+ def test_orphan_path_name(self):
+ self.assertEqual((self.files / 'a' / 'b').name, 'b')
+ self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
+
+ def test_spec_path_open(self):
+ self.assertEqual(self.files.read_bytes(), b'Hello, world!')
+ self.assertEqual(self.files.read_text(), 'Hello, world!')
+
+ def test_child_path_open(self):
+ self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
+ self.assertEqual((self.files / 'a').read_text(), 'Hello, world!')
+
+ def test_orphan_path_open(self):
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b').read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b' / 'c').read_bytes()
+
+ def test_open_invalid_mode(self):
+ with self.assertRaises(ValueError):
+ self.files.open('0')
+
+ def test_orphan_path_invalid(self):
+ with self.assertRaises(ValueError):
+ CompatibilityFiles.OrphanPath()
+
+ def test_wrap_spec(self):
+ spec = wrap_spec(self.package)
+ self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
+
+
+class CompatibilityFilesNoReaderTests(unittest.TestCase):
+ @property
+ def package(self):
+ return util.create_package_from_loader(None)
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_joinpath(self):
+ self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
diff --git a/setuptools/_vendor/importlib_resources/tests/test_contents.py b/setuptools/_vendor/importlib_resources/tests/test_contents.py
new file mode 100644
index 0000000..525568e
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_contents.py
@@ -0,0 +1,43 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+
+
+class ContentsTests:
+ expected = {
+ '__init__.py',
+ 'binary.file',
+ 'subdirectory',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def test_contents(self):
+ contents = {path.name for path in resources.files(self.data).iterdir()}
+ assert self.expected <= contents
+
+
+class ContentsDiskTests(ContentsTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
+ expected = {
+ # no __init__ because of namespace design
+ # no subdirectory as incidental difference in fixture
+ 'binary.file',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py
new file mode 100644
index 0000000..2676b49
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_files.py
@@ -0,0 +1,46 @@
+import typing
+import unittest
+
+import importlib_resources as resources
+from importlib_resources.abc import Traversable
+from . import data01
+from . import util
+
+
+class FilesTests:
+ def test_read_bytes(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_bytes()
+ assert actual == b'Hello, UTF-8 world!\n'
+
+ def test_read_text(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
+ assert actual == 'Hello, UTF-8 world!\n'
+
+ @unittest.skipUnless(
+ hasattr(typing, 'runtime_checkable'),
+ "Only suitable when typing supports runtime_checkable",
+ )
+ def test_traversable(self):
+ assert isinstance(resources.files(self.data), Traversable)
+
+
+class OpenDiskTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class OpenNamespaceTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_open.py b/setuptools/_vendor/importlib_resources/tests/test_open.py
new file mode 100644
index 0000000..87b42c3
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_open.py
@@ -0,0 +1,81 @@
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open('rb'):
+ pass
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open():
+ pass
+
+
+class OpenTests:
+ def test_open_binary(self):
+ target = resources.files(self.data) / 'binary.file'
+ with target.open('rb') as fp:
+ result = fp.read()
+ self.assertEqual(result, b'\x00\x01\x02\x03')
+
+ def test_open_text_default_encoding(self):
+ target = resources.files(self.data) / 'utf-8.file'
+ with target.open() as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_open_text_given_encoding(self):
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-16', errors='strict') as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_open_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-8', errors='strict') as fp:
+ self.assertRaises(UnicodeError, fp.read)
+ with target.open(encoding='utf-8', errors='ignore') as fp:
+ result = fp.read()
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+ def test_open_binary_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open, 'rb')
+
+ def test_open_text_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open)
+
+
+class OpenDiskTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_path.py b/setuptools/_vendor/importlib_resources/tests/test_path.py
new file mode 100644
index 0000000..4f4d394
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_path.py
@@ -0,0 +1,64 @@
+import io
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ with resources.as_file(resources.files(package).joinpath(path)):
+ pass
+
+
+class PathTests:
+ def test_reading(self):
+ # Path should be readable.
+ # Test also implicitly verifies the returned object is a pathlib.Path
+ # instance.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
+ # pathlib.Path.read_text() was introduced in Python 3.5.
+ with path.open('r', encoding='utf-8') as file:
+ text = file.read()
+ self.assertEqual('Hello, UTF-8 world!\n', text)
+
+
+class PathDiskTests(PathTests, unittest.TestCase):
+ data = data01
+
+ def test_natural_path(self):
+ """
+ Guarantee the internal implementation detail that
+ file-system-backed resources do not get the tempdir
+ treatment.
+ """
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ assert 'data' in str(path)
+
+
+class PathMemoryTests(PathTests, unittest.TestCase):
+ def setUp(self):
+ file = io.BytesIO(b'Hello, UTF-8 world!\n')
+ self.addCleanup(file.close)
+ self.data = util.create_package(
+ file=file, path=FileNotFoundError("package exists only in memory")
+ )
+ self.data.__spec__.origin = None
+ self.data.__spec__.has_location = False
+
+
+class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
+ def test_remove_in_context_manager(self):
+ # It is not an error if the file that was temporarily stashed on the
+ # file system is removed inside the `with` stanza.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ path.unlink()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_read.py b/setuptools/_vendor/importlib_resources/tests/test_read.py
new file mode 100644
index 0000000..41dd6db
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_read.py
@@ -0,0 +1,76 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+from importlib import import_module
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_bytes()
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_text()
+
+
+class ReadTests:
+ def test_read_bytes(self):
+ result = resources.files(self.data).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_text_default_encoding(self):
+ result = resources.files(self.data).joinpath('utf-8.file').read_text()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_read_text_given_encoding(self):
+ result = (
+ resources.files(self.data)
+ .joinpath('utf-16.file')
+ .read_text(encoding='utf-16')
+ )
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_read_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
+ result = target.read_text(encoding='utf-8', errors='ignore')
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+
+class ReadDiskTests(ReadTests, unittest.TestCase):
+ data = data01
+
+
+class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
+ def test_read_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ result = resources.files(submodule).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_submodule_resource_by_name(self):
+ result = (
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .read_bytes()
+ )
+ self.assertEqual(result, b'\0\1\2\3')
+
+
+class ReadNamespaceTests(ReadTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py
new file mode 100644
index 0000000..16841a5
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_reader.py
@@ -0,0 +1,128 @@
+import os.path
+import sys
+import pathlib
+import unittest
+
+from importlib import import_module
+from importlib_resources.readers import MultiplexedPath, NamespaceReader
+
+
+class MultiplexedPathTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ path = pathlib.Path(__file__).parent / 'namespacedata01'
+ cls.folder = str(path)
+
+ def test_init_no_paths(self):
+ with self.assertRaises(FileNotFoundError):
+ MultiplexedPath()
+
+ def test_init_file(self):
+ with self.assertRaises(NotADirectoryError):
+ MultiplexedPath(os.path.join(self.folder, 'binary.file'))
+
+ def test_iterdir(self):
+ contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
+ try:
+ contents.remove('__pycache__')
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'})
+
+ def test_iterdir_duplicate(self):
+ data01 = os.path.abspath(os.path.join(__file__, '..', 'data01'))
+ contents = {
+ path.name for path in MultiplexedPath(self.folder, data01).iterdir()
+ }
+ for remove in ('__pycache__', '__init__.pyc'):
+ try:
+ contents.remove(remove)
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(
+ contents,
+ {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
+ )
+
+ def test_is_dir(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
+
+ def test_is_file(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
+
+ def test_open_file(self):
+ path = MultiplexedPath(self.folder)
+ with self.assertRaises(FileNotFoundError):
+ path.read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ path.read_text()
+ with self.assertRaises(FileNotFoundError):
+ path.open()
+
+ def test_join_path(self):
+ prefix = os.path.abspath(os.path.join(__file__, '..'))
+ data01 = os.path.join(prefix, 'data01')
+ path = MultiplexedPath(self.folder, data01)
+ self.assertEqual(
+ str(path.joinpath('binary.file'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'binary.file'),
+ )
+ self.assertEqual(
+ str(path.joinpath('subdirectory'))[len(prefix) + 1 :],
+ os.path.join('data01', 'subdirectory'),
+ )
+ self.assertEqual(
+ str(path.joinpath('imaginary'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'imaginary'),
+ )
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(MultiplexedPath(self.folder)),
+ f"MultiplexedPath('{self.folder}')",
+ )
+
+ def test_name(self):
+ self.assertEqual(
+ MultiplexedPath(self.folder).name,
+ os.path.basename(self.folder),
+ )
+
+
+class NamespaceReaderTest(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_init_error(self):
+ with self.assertRaises(ValueError):
+ NamespaceReader(['path1', 'path2'])
+
+ def test_resource_path(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertEqual(
+ reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
+ )
+ self.assertEqual(
+ reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
+ )
+
+ def test_files(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertIsInstance(reader.files(), MultiplexedPath)
+ self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py
new file mode 100644
index 0000000..5affd8b
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_resource.py
@@ -0,0 +1,252 @@
+import sys
+import unittest
+import importlib_resources as resources
+import uuid
+import pathlib
+
+from . import data01
+from . import zipdata01, zipdata02
+from . import util
+from importlib import import_module
+from ._compat import import_helper, unlink
+
+
+class ResourceTests:
+ # Subclasses are expected to set the `data` attribute.
+
+ def test_is_file_exists(self):
+ target = resources.files(self.data) / 'binary.file'
+ self.assertTrue(target.is_file())
+
+ def test_is_file_missing(self):
+ target = resources.files(self.data) / 'not-a-file'
+ self.assertFalse(target.is_file())
+
+ def test_is_dir(self):
+ target = resources.files(self.data) / 'subdirectory'
+ self.assertFalse(target.is_file())
+ self.assertTrue(target.is_dir())
+
+
+class ResourceDiskTests(ResourceTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+def names(traversable):
+ return {item.name for item in traversable.iterdir()}
+
+
+class ResourceLoaderTests(unittest.TestCase):
+ def test_resource_contents(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
+
+ def test_is_file(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('B').is_file())
+
+ def test_is_dir(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('D').is_dir())
+
+ def test_resource_missing(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertFalse(resources.files(package).joinpath('Z').is_file())
+
+
+class ResourceCornerCaseTests(unittest.TestCase):
+ def test_package_has_no_reader_fallback(self):
+ # Test odd ball packages which:
+ # 1. Do not have a ResourceReader as a loader
+ # 2. Are not on the file system
+ # 3. Are not in a zip file
+ module = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ # Give the module a dummy loader.
+ module.__loader__ = object()
+ # Give the module a dummy origin.
+ module.__file__ = '/path/which/shall/not/be/named'
+ module.__spec__.loader = module.__loader__
+ module.__spec__.origin = module.__file__
+ self.assertFalse(resources.files(module).joinpath('A').is_file())
+
+
+class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata01 # type: ignore
+
+ def test_is_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_submodule_contents(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertEqual(
+ names(resources.files(submodule)), {'__init__.py', 'binary.file'}
+ )
+
+ def test_submodule_contents_by_name(self):
+ self.assertEqual(
+ names(resources.files('ziptestdata.subdirectory')),
+ {'__init__.py', 'binary.file'},
+ )
+
+
+class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata02 # type: ignore
+
+ def test_unrelated_contents(self):
+ """
+ Test thata zip with two unrelated subpackages return
+ distinct resources. Ref python/importlib_resources#44.
+ """
+ self.assertEqual(
+ names(resources.files('ziptestdata.one')),
+ {'__init__.py', 'resource1.txt'},
+ )
+ self.assertEqual(
+ names(resources.files('ziptestdata.two')),
+ {'__init__.py', 'resource2.txt'},
+ )
+
+
+class DeletingZipsTest(unittest.TestCase):
+ """Having accessed resources in a zip file should not keep an open
+ reference to the zip.
+ """
+
+ ZIP_MODULE = zipdata01
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+ data_path = pathlib.Path(self.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ self.source_zip_path = data_dir / 'ziptestdata.zip'
+ self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute()
+ self.zip_path.write_bytes(self.source_zip_path.read_bytes())
+ sys.path.append(str(self.zip_path))
+ self.data = import_module('ziptestdata')
+
+ def tearDown(self):
+ try:
+ sys.path.remove(str(self.zip_path))
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[str(self.zip_path)]
+ del sys.modules[self.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ unlink(self.zip_path)
+ except OSError:
+ # If the test fails, this will probably fail too
+ pass
+
+ def test_iterdir_does_not_keep_open(self):
+ c = [item.name for item in resources.files('ziptestdata').iterdir()]
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').is_file()
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_failure_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('not-present').is_file()
+ self.zip_path.unlink()
+ del c
+
+ @unittest.skip("Desired but not supported.")
+ def test_as_file_does_not_keep_open(self): # pragma: no cover
+ c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
+ self.zip_path.unlink()
+ del c
+
+ def test_entered_path_does_not_keep_open(self):
+ # This is what certifi does on import to make its bundle
+ # available for the process duration.
+ c = resources.as_file(
+ resources.files('ziptestdata') / 'binary.file'
+ ).__enter__()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_binary_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').read_bytes()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_text_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('utf-8.file').read_text()
+ self.zip_path.unlink()
+ del c
+
+
+class ResourceFromNamespaceTest01(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_is_submodule_resource(self):
+ self.assertTrue(
+ resources.files(import_module('namespacedata01'))
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('namespacedata01').joinpath('binary.file').is_file()
+ )
+
+ def test_submodule_contents(self):
+ contents = names(resources.files(import_module('namespacedata01')))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+ def test_submodule_contents_by_name(self):
+ contents = names(resources.files('namespacedata01'))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/update-zips.py b/setuptools/_vendor/importlib_resources/tests/update-zips.py
new file mode 100644
index 0000000..9ef0224
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/update-zips.py
@@ -0,0 +1,53 @@
+"""
+Generate the zip test data files.
+
+Run to build the tests/zipdataNN/ziptestdata.zip files from
+files in tests/dataNN.
+
+Replaces the file with the working copy, but does commit anything
+to the source repo.
+"""
+
+import contextlib
+import os
+import pathlib
+import zipfile
+
+
+def main():
+ """
+ >>> from unittest import mock
+ >>> monkeypatch = getfixture('monkeypatch')
+ >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock())
+ >>> print(); main() # print workaround for bpo-32509
+ <BLANKLINE>
+ ...data01... -> ziptestdata/...
+ ...
+ ...data02... -> ziptestdata/...
+ ...
+ """
+ suffixes = '01', '02'
+ tuple(map(generate, suffixes))
+
+
+def generate(suffix):
+ root = pathlib.Path(__file__).parent.relative_to(os.getcwd())
+ zfpath = root / f'zipdata{suffix}/ziptestdata.zip'
+ with zipfile.ZipFile(zfpath, 'w') as zf:
+ for src, rel in walk(root / f'data{suffix}'):
+ dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix())
+ print(src, '->', dst)
+ zf.write(src, dst)
+
+
+def walk(datapath):
+ for dirpath, dirnames, filenames in os.walk(datapath):
+ with contextlib.suppress(KeyError):
+ dirnames.remove('__pycache__')
+ for filename in filenames:
+ res = pathlib.Path(dirpath) / filename
+ rel = res.relative_to(datapath)
+ yield res, rel
+
+
+__name__ == '__main__' and main()
diff --git a/setuptools/_vendor/importlib_resources/tests/util.py b/setuptools/_vendor/importlib_resources/tests/util.py
new file mode 100644
index 0000000..c6d83e4
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/util.py
@@ -0,0 +1,178 @@
+import abc
+import importlib
+import io
+import sys
+import types
+from pathlib import Path, PurePath
+
+from . import data01
+from . import zipdata01
+from ..abc import ResourceReader
+from ._compat import import_helper
+
+
+from importlib.machinery import ModuleSpec
+
+
+class Reader(ResourceReader):
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ def get_resource_reader(self, package):
+ return self
+
+ def open_resource(self, path):
+ self._path = path
+ if isinstance(self.file, Exception):
+ raise self.file
+ return self.file
+
+ def resource_path(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+ return self.path
+
+ def is_resource(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+
+ def part(entry):
+ return entry.split('/')
+
+ return any(
+ len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents)
+ )
+
+ def contents(self):
+ if isinstance(self.path, Exception):
+ raise self.path
+ yield from self._contents
+
+
+def create_package_from_loader(loader, is_package=True):
+ name = 'testingpackage'
+ module = types.ModuleType(name)
+ spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package)
+ module.__spec__ = spec
+ module.__loader__ = loader
+ return module
+
+
+def create_package(file=None, path=None, is_package=True, contents=()):
+ return create_package_from_loader(
+ Reader(file=file, path=path, _contents=contents),
+ is_package,
+ )
+
+
+class CommonTests(metaclass=abc.ABCMeta):
+ """
+ Tests shared by test_open, test_path, and test_read.
+ """
+
+ @abc.abstractmethod
+ def execute(self, package, path):
+ """
+ Call the pertinent legacy API function (e.g. open_text, path)
+ on package and path.
+ """
+
+ def test_package_name(self):
+ # Passing in the package name should succeed.
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_package_object(self):
+ # Passing in the package itself should succeed.
+ self.execute(data01, 'utf-8.file')
+
+ def test_string_path(self):
+ # Passing in a string for the path should succeed.
+ path = 'utf-8.file'
+ self.execute(data01, path)
+
+ def test_pathlib_path(self):
+ # Passing in a pathlib.PurePath object for the path should succeed.
+ path = PurePath('utf-8.file')
+ self.execute(data01, path)
+
+ def test_importing_module_as_side_effect(self):
+ # The anchor package can already be imported.
+ del sys.modules[data01.__name__]
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_non_package_by_name(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ self.execute(__name__, 'utf-8.file')
+
+ def test_non_package_by_package(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ module = sys.modules['importlib_resources.tests.util']
+ self.execute(module, 'utf-8.file')
+
+ def test_missing_path(self):
+ # Attempting to open or read or request the path for a
+ # non-existent path should succeed if open_resource
+ # can return a viable data stream.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ package = create_package(file=bytes_data, path=FileNotFoundError())
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_extant_path(self):
+ # Attempting to open or read or request the path when the
+ # path does exist should still succeed. Does not assert
+ # anything about the result.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ # any path that exists
+ path = __file__
+ package = create_package(file=bytes_data, path=path)
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_useless_loader(self):
+ package = create_package(file=FileNotFoundError(), path=FileNotFoundError())
+ with self.assertRaises(FileNotFoundError):
+ self.execute(package, 'utf-8.file')
+
+
+class ZipSetupBase:
+ ZIP_MODULE = None
+
+ @classmethod
+ def setUpClass(cls):
+ data_path = Path(cls.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ cls._zip_path = str(data_dir / 'ziptestdata.zip')
+ sys.path.append(cls._zip_path)
+ cls.data = importlib.import_module('ziptestdata')
+
+ @classmethod
+ def tearDownClass(cls):
+ try:
+ sys.path.remove(cls._zip_path)
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[cls._zip_path]
+ del sys.modules[cls.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ del cls.data
+ del cls._zip_path
+ except AttributeError:
+ pass
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+
+class ZipSetup(ZipSetupBase):
+ ZIP_MODULE = zipdata01 # type: ignore
diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py b/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py b/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py
diff --git a/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-4.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-3.5.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000..f6205a5
--- /dev/null
+++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/setuptools/_vendor/jaraco/__init__.py b/setuptools/_vendor/jaraco/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/_vendor/jaraco/__init__.py
diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py
new file mode 100644
index 0000000..87a4e3d
--- /dev/null
+++ b/setuptools/_vendor/jaraco/context.py
@@ -0,0 +1,213 @@
+import os
+import subprocess
+import contextlib
+import functools
+import tempfile
+import shutil
+import operator
+
+
+@contextlib.contextmanager
+def pushd(dir):
+ orig = os.getcwd()
+ os.chdir(dir)
+ try:
+ yield dir
+ finally:
+ os.chdir(orig)
+
+
+@contextlib.contextmanager
+def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+ """
+ Get a tarball, extract it, change to that directory, yield, then
+ clean up.
+ `runner` is the function to invoke commands.
+ `pushd` is a context manager for changing the directory.
+ """
+ if target_dir is None:
+ target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
+ if runner is None:
+ runner = functools.partial(subprocess.check_call, shell=True)
+ # In the tar command, use --strip-components=1 to strip the first path and
+ # then
+ # use -C to cause the files to be extracted to {target_dir}. This ensures
+ # that we always know where the files were extracted.
+ runner('mkdir {target_dir}'.format(**vars()))
+ try:
+ getter = 'wget {url} -O -'
+ extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
+ cmd = ' | '.join((getter, extract))
+ runner(cmd.format(compression=infer_compression(url), **vars()))
+ with pushd(target_dir):
+ yield target_dir
+ finally:
+ runner('rm -Rf {target_dir}'.format(**vars()))
+
+
+def infer_compression(url):
+ """
+ Given a URL or filename, infer the compression code for tar.
+ """
+ # cheat and just assume it's the last two characters
+ compression_indicator = url[-2:]
+ mapping = dict(gz='z', bz='j', xz='J')
+ # Assume 'z' (gzip) if no match
+ return mapping.get(compression_indicator, 'z')
+
+
+@contextlib.contextmanager
+def temp_dir(remover=shutil.rmtree):
+ """
+ Create a temporary directory context. Pass a custom remover
+ to override the removal behavior.
+ """
+ temp_dir = tempfile.mkdtemp()
+ try:
+ yield temp_dir
+ finally:
+ remover(temp_dir)
+
+
+@contextlib.contextmanager
+def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
+ """
+ Check out the repo indicated by url.
+
+ If dest_ctx is supplied, it should be a context manager
+ to yield the target directory for the check out.
+ """
+ exe = 'git' if 'git' in url else 'hg'
+ with dest_ctx() as repo_dir:
+ cmd = [exe, 'clone', url, repo_dir]
+ if branch:
+ cmd.extend(['--branch', branch])
+ devnull = open(os.path.devnull, 'w')
+ stdout = devnull if quiet else None
+ subprocess.check_call(cmd, stdout=stdout)
+ yield repo_dir
+
+
+@contextlib.contextmanager
+def null():
+ yield
+
+
+class ExceptionTrap:
+ """
+ A context manager that will catch certain exceptions and provide an
+ indication they occurred.
+
+ >>> with ExceptionTrap() as trap:
+ ... raise Exception()
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap() as trap:
+ ... pass
+ >>> bool(trap)
+ False
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise ValueError("1 + 1 is not 3")
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise Exception()
+ Traceback (most recent call last):
+ ...
+ Exception
+
+ >>> bool(trap)
+ False
+ """
+
+ exc_info = None, None, None
+
+ def __init__(self, exceptions=(Exception,)):
+ self.exceptions = exceptions
+
+ def __enter__(self):
+ return self
+
+ @property
+ def type(self):
+ return self.exc_info[0]
+
+ @property
+ def value(self):
+ return self.exc_info[1]
+
+ @property
+ def tb(self):
+ return self.exc_info[2]
+
+ def __exit__(self, *exc_info):
+ type = exc_info[0]
+ matches = type and issubclass(type, self.exceptions)
+ if matches:
+ self.exc_info = exc_info
+ return matches
+
+ def __bool__(self):
+ return bool(self.type)
+
+ def raises(self, func, *, _test=bool):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if an exception occurred).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> raises = ExceptionTrap(ValueError).raises
+
+ Now decorate a function that always fails.
+
+ >>> @raises
+ ... def fail():
+ ... raise ValueError('failed')
+ >>> fail()
+ True
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ with ExceptionTrap(self.exceptions) as trap:
+ func(*args, **kwargs)
+ return _test(trap)
+
+ return wrapper
+
+ def passes(self, func):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if no exception).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> passes = ExceptionTrap(ValueError).passes
+
+ Now decorate a function that always fails.
+
+ >>> @passes
+ ... def fail():
+ ... raise ValueError('failed')
+
+ >>> fail()
+ False
+ """
+ return self.raises(func, _test=operator.not_)
+
+
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/setuptools/_vendor/jaraco/functools.py b/setuptools/_vendor/jaraco/functools.py
new file mode 100644
index 0000000..bbd8b29
--- /dev/null
+++ b/setuptools/_vendor/jaraco/functools.py
@@ -0,0 +1,525 @@
+import functools
+import time
+import inspect
+import collections
+import types
+import itertools
+
+import setuptools.extern.more_itertools
+
+from typing import Callable, TypeVar
+
+
+CallableT = TypeVar("CallableT", bound=Callable[..., object])
+
+
+def compose(*funcs):
+ """
+ Compose any number of unary functions into a single unary function.
+
+ >>> import textwrap
+ >>> expected = str.strip(textwrap.dedent(compose.__doc__))
+ >>> strip_and_dedent = compose(str.strip, textwrap.dedent)
+ >>> strip_and_dedent(compose.__doc__) == expected
+ True
+
+ Compose also allows the innermost function to take arbitrary arguments.
+
+ >>> round_three = lambda x: round(x, ndigits=3)
+ >>> f = compose(round_three, int.__truediv__)
+ >>> [f(3*x, x+1) for x in range(1,10)]
+ [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
+ """
+
+ def compose_two(f1, f2):
+ return lambda *args, **kwargs: f1(f2(*args, **kwargs))
+
+ return functools.reduce(compose_two, funcs)
+
+
+def method_caller(method_name, *args, **kwargs):
+ """
+ Return a function that will call a named method on the
+ target object with optional positional and keyword
+ arguments.
+
+ >>> lower = method_caller('lower')
+ >>> lower('MyString')
+ 'mystring'
+ """
+
+ def call_method(target):
+ func = getattr(target, method_name)
+ return func(*args, **kwargs)
+
+ return call_method
+
+
+def once(func):
+ """
+ Decorate func so it's only ever called the first time.
+
+ This decorator can ensure that an expensive or non-idempotent function
+ will not be expensive on subsequent calls and is idempotent.
+
+ >>> add_three = once(lambda a: a+3)
+ >>> add_three(3)
+ 6
+ >>> add_three(9)
+ 6
+ >>> add_three('12')
+ 6
+
+ To reset the stored value, simply clear the property ``saved_result``.
+
+ >>> del add_three.saved_result
+ >>> add_three(9)
+ 12
+ >>> add_three(8)
+ 12
+
+ Or invoke 'reset()' on it.
+
+ >>> add_three.reset()
+ >>> add_three(-3)
+ 0
+ >>> add_three(0)
+ 0
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not hasattr(wrapper, 'saved_result'):
+ wrapper.saved_result = func(*args, **kwargs)
+ return wrapper.saved_result
+
+ wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
+ return wrapper
+
+
+def method_cache(
+ method: CallableT,
+ cache_wrapper: Callable[
+ [CallableT], CallableT
+ ] = functools.lru_cache(), # type: ignore[assignment]
+) -> CallableT:
+ """
+ Wrap lru_cache to support storing the cache data in the object instances.
+
+ Abstracts the common paradigm where the method explicitly saves an
+ underscore-prefixed protected property on first call and returns that
+ subsequently.
+
+ >>> class MyClass:
+ ... calls = 0
+ ...
+ ... @method_cache
+ ... def method(self, value):
+ ... self.calls += 1
+ ... return value
+
+ >>> a = MyClass()
+ >>> a.method(3)
+ 3
+ >>> for x in range(75):
+ ... res = a.method(x)
+ >>> a.calls
+ 75
+
+ Note that the apparent behavior will be exactly like that of lru_cache
+ except that the cache is stored on each instance, so values in one
+ instance will not flush values from another, and when an instance is
+ deleted, so are the cached values for that instance.
+
+ >>> b = MyClass()
+ >>> for x in range(35):
+ ... res = b.method(x)
+ >>> b.calls
+ 35
+ >>> a.method(0)
+ 0
+ >>> a.calls
+ 75
+
+ Note that if method had been decorated with ``functools.lru_cache()``,
+ a.calls would have been 76 (due to the cached value of 0 having been
+ flushed by the 'b' instance).
+
+ Clear the cache with ``.cache_clear()``
+
+ >>> a.method.cache_clear()
+
+ Same for a method that hasn't yet been called.
+
+ >>> c = MyClass()
+ >>> c.method.cache_clear()
+
+ Another cache wrapper may be supplied:
+
+ >>> cache = functools.lru_cache(maxsize=2)
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
+ >>> a = MyClass()
+ >>> a.method2()
+ 3
+
+ Caution - do not subsequently wrap the method with another decorator, such
+ as ``@property``, which changes the semantics of the function.
+
+ See also
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
+ for another implementation and additional justification.
+ """
+
+ def wrapper(self: object, *args: object, **kwargs: object) -> object:
+ # it's the first call, replace the method with a cached, bound method
+ bound_method: CallableT = types.MethodType( # type: ignore[assignment]
+ method, self
+ )
+ cached_method = cache_wrapper(bound_method)
+ setattr(self, method.__name__, cached_method)
+ return cached_method(*args, **kwargs)
+
+ # Support cache clear even before cache has been created.
+ wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
+
+ return ( # type: ignore[return-value]
+ _special_method_cache(method, cache_wrapper) or wrapper
+ )
+
+
+def _special_method_cache(method, cache_wrapper):
+ """
+ Because Python treats special methods differently, it's not
+ possible to use instance attributes to implement the cached
+ methods.
+
+ Instead, install the wrapper method under a different name
+ and return a simple proxy to that wrapper.
+
+ https://github.com/jaraco/jaraco.functools/issues/5
+ """
+ name = method.__name__
+ special_names = '__getattr__', '__getitem__'
+ if name not in special_names:
+ return
+
+ wrapper_name = '__cached' + name
+
+ def proxy(self, *args, **kwargs):
+ if wrapper_name not in vars(self):
+ bound = types.MethodType(method, self)
+ cache = cache_wrapper(bound)
+ setattr(self, wrapper_name, cache)
+ else:
+ cache = getattr(self, wrapper_name)
+ return cache(*args, **kwargs)
+
+ return proxy
+
+
+def apply(transform):
+ """
+ Decorate a function with a transform function that is
+ invoked on results returned from the decorated function.
+
+ >>> @apply(reversed)
+ ... def get_numbers(start):
+ ... "doc for get_numbers"
+ ... return range(start, start+3)
+ >>> list(get_numbers(4))
+ [6, 5, 4]
+ >>> get_numbers.__doc__
+ 'doc for get_numbers'
+ """
+
+ def wrap(func):
+ return functools.wraps(func)(compose(transform, func))
+
+ return wrap
+
+
+def result_invoke(action):
+ r"""
+ Decorate a function with an action function that is
+ invoked on the results returned from the decorated
+ function (for its side-effect), then return the original
+ result.
+
+ >>> @result_invoke(print)
+ ... def add_two(a, b):
+ ... return a + b
+ >>> x = add_two(2, 3)
+ 5
+ >>> x
+ 5
+ """
+
+ def wrap(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ action(result)
+ return result
+
+ return wrapper
+
+ return wrap
+
+
+def call_aside(f, *args, **kwargs):
+ """
+ Call a function for its side effect after initialization.
+
+ >>> @call_aside
+ ... def func(): print("called")
+ called
+ >>> func()
+ called
+
+ Use functools.partial to pass parameters to the initial call
+
+ >>> @functools.partial(call_aside, name='bingo')
+ ... def func(name): print("called with", name)
+ called with bingo
+ """
+ f(*args, **kwargs)
+ return f
+
+
+class Throttler:
+ """
+ Rate-limit a function (or other callable)
+ """
+
+ def __init__(self, func, max_rate=float('Inf')):
+ if isinstance(func, Throttler):
+ func = func.func
+ self.func = func
+ self.max_rate = max_rate
+ self.reset()
+
+ def reset(self):
+ self.last_called = 0
+
+ def __call__(self, *args, **kwargs):
+ self._wait()
+ return self.func(*args, **kwargs)
+
+ def _wait(self):
+ "ensure at least 1/max_rate seconds from last call"
+ elapsed = time.time() - self.last_called
+ must_wait = 1 / self.max_rate - elapsed
+ time.sleep(max(0, must_wait))
+ self.last_called = time.time()
+
+ def __get__(self, obj, type=None):
+ return first_invoke(self._wait, functools.partial(self.func, obj))
+
+
+def first_invoke(func1, func2):
+ """
+ Return a function that when invoked will invoke func1 without
+ any parameters (for its side-effect) and then invoke func2
+ with whatever parameters were passed, returning its result.
+ """
+
+ def wrapper(*args, **kwargs):
+ func1()
+ return func2(*args, **kwargs)
+
+ return wrapper
+
+
+def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
+ """
+ Given a callable func, trap the indicated exceptions
+ for up to 'retries' times, invoking cleanup on the
+ exception. On the final attempt, allow any exceptions
+ to propagate.
+ """
+ attempts = itertools.count() if retries == float('inf') else range(retries)
+ for attempt in attempts:
+ try:
+ return func()
+ except trap:
+ cleanup()
+
+ return func()
+
+
+def retry(*r_args, **r_kwargs):
+ """
+ Decorator wrapper for retry_call. Accepts arguments to retry_call
+ except func and then returns a decorator for the decorated function.
+
+ Ex:
+
+ >>> @retry(retries=3)
+ ... def my_func(a, b):
+ ... "this is my funk"
+ ... print(a, b)
+ >>> my_func.__doc__
+ 'this is my funk'
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*f_args, **f_kwargs):
+ bound = functools.partial(func, *f_args, **f_kwargs)
+ return retry_call(bound, *r_args, **r_kwargs)
+
+ return wrapper
+
+ return decorate
+
+
+def print_yielded(func):
+ """
+ Convert a generator into a function that prints all yielded elements
+
+ >>> @print_yielded
+ ... def x():
+ ... yield 3; yield None
+ >>> x()
+ 3
+ None
+ """
+ print_all = functools.partial(map, print)
+ print_results = compose(more_itertools.consume, print_all, func)
+ return functools.wraps(func)(print_results)
+
+
+def pass_none(func):
+ """
+ Wrap func so it's not called if its first param is None
+
+ >>> print_text = pass_none(print)
+ >>> print_text('text')
+ text
+ >>> print_text(None)
+ """
+
+ @functools.wraps(func)
+ def wrapper(param, *args, **kwargs):
+ if param is not None:
+ return func(param, *args, **kwargs)
+
+ return wrapper
+
+
+def assign_params(func, namespace):
+ """
+ Assign parameters from namespace where func solicits.
+
+ >>> def func(x, y=3):
+ ... print(x, y)
+ >>> assigned = assign_params(func, dict(x=2, z=4))
+ >>> assigned()
+ 2 3
+
+ The usual errors are raised if a function doesn't receive
+ its required parameters:
+
+ >>> assigned = assign_params(func, dict(y=3, z=4))
+ >>> assigned()
+ Traceback (most recent call last):
+ TypeError: func() ...argument...
+
+ It even works on methods:
+
+ >>> class Handler:
+ ... def meth(self, arg):
+ ... print(arg)
+ >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
+ crystal
+ """
+ sig = inspect.signature(func)
+ params = sig.parameters.keys()
+ call_ns = {k: namespace[k] for k in params if k in namespace}
+ return functools.partial(func, **call_ns)
+
+
+def save_method_args(method):
+ """
+ Wrap a method such that when it is called, the args and kwargs are
+ saved on the method.
+
+ >>> class MyClass:
+ ... @save_method_args
+ ... def method(self, a, b):
+ ... print(a, b)
+ >>> my_ob = MyClass()
+ >>> my_ob.method(1, 2)
+ 1 2
+ >>> my_ob._saved_method.args
+ (1, 2)
+ >>> my_ob._saved_method.kwargs
+ {}
+ >>> my_ob.method(a=3, b='foo')
+ 3 foo
+ >>> my_ob._saved_method.args
+ ()
+ >>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
+ True
+
+ The arguments are stored on the instance, allowing for
+ different instance to save different args.
+
+ >>> your_ob = MyClass()
+ >>> your_ob.method({str('x'): 3}, b=[4])
+ {'x': 3} [4]
+ >>> your_ob._saved_method.args
+ ({'x': 3},)
+ >>> my_ob._saved_method.args
+ ()
+ """
+ args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
+
+ @functools.wraps(method)
+ def wrapper(self, *args, **kwargs):
+ attr_name = '_saved_' + method.__name__
+ attr = args_and_kwargs(args, kwargs)
+ setattr(self, attr_name, attr)
+ return method(self, *args, **kwargs)
+
+ return wrapper
+
+
+def except_(*exceptions, replace=None, use=None):
+ """
+ Replace the indicated exceptions, if raised, with the indicated
+ literal replacement or evaluated expression (if present).
+
+ >>> safe_int = except_(ValueError)(int)
+ >>> safe_int('five')
+ >>> safe_int('5')
+ 5
+
+ Specify a literal replacement with ``replace``.
+
+ >>> safe_int_r = except_(ValueError, replace=0)(int)
+ >>> safe_int_r('five')
+ 0
+
+ Provide an expression to ``use`` to pass through particular parameters.
+
+ >>> safe_int_pt = except_(ValueError, use='args[0]')(int)
+ >>> safe_int_pt('five')
+ 'five'
+
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except exceptions:
+ try:
+ return eval(use)
+ except TypeError:
+ return replace
+
+ return wrapper
+
+ return decorate
diff --git a/setuptools/_vendor/jaraco/text/Lorem ipsum.txt b/setuptools/_vendor/jaraco/text/Lorem ipsum.txt
new file mode 100644
index 0000000..986f944
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/Lorem ipsum.txt
@@ -0,0 +1,2 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
diff --git a/setuptools/_vendor/jaraco/text/__init__.py b/setuptools/_vendor/jaraco/text/__init__.py
new file mode 100644
index 0000000..a0306d5
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/__init__.py
@@ -0,0 +1,599 @@
+import re
+import itertools
+import textwrap
+import functools
+
+try:
+ from importlib.resources import files # type: ignore
+except ImportError: # pragma: nocover
+ from setuptools.extern.importlib_resources import files # type: ignore
+
+from setuptools.extern.jaraco.functools import compose, method_cache
+from setuptools.extern.jaraco.context import ExceptionTrap
+
+
+def substitution(old, new):
+ """
+ Return a function that will perform a substitution on a string
+ """
+ return lambda s: s.replace(old, new)
+
+
+def multi_substitution(*substitutions):
+ """
+ Take a sequence of pairs specifying substitutions, and create
+ a function that performs those substitutions.
+
+ >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
+ 'baz'
+ """
+ substitutions = itertools.starmap(substitution, substitutions)
+ # compose function applies last function first, so reverse the
+ # substitutions to get the expected order.
+ substitutions = reversed(tuple(substitutions))
+ return compose(*substitutions)
+
+
+class FoldedCase(str):
+ """
+ A case insensitive string class; behaves just like str
+ except compares equal when the only variation is case.
+
+ >>> s = FoldedCase('hello world')
+
+ >>> s == 'Hello World'
+ True
+
+ >>> 'Hello World' == s
+ True
+
+ >>> s != 'Hello World'
+ False
+
+ >>> s.index('O')
+ 4
+
+ >>> s.split('O')
+ ['hell', ' w', 'rld']
+
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
+ ['alpha', 'Beta', 'GAMMA']
+
+ Sequence membership is straightforward.
+
+ >>> "Hello World" in [s]
+ True
+ >>> s in ["Hello World"]
+ True
+
+ You may test for set inclusion, but candidate and elements
+ must both be folded.
+
+ >>> FoldedCase("Hello World") in {s}
+ True
+ >>> s in {FoldedCase("Hello World")}
+ True
+
+ String inclusion works as long as the FoldedCase object
+ is on the right.
+
+ >>> "hello" in FoldedCase("Hello World")
+ True
+
+ But not if the FoldedCase object is on the left:
+
+ >>> FoldedCase('hello') in 'Hello World'
+ False
+
+ In that case, use ``in_``:
+
+ >>> FoldedCase('hello').in_('Hello World')
+ True
+
+ >>> FoldedCase('hello') > FoldedCase('Hello')
+ False
+ """
+
+ def __lt__(self, other):
+ return self.lower() < other.lower()
+
+ def __gt__(self, other):
+ return self.lower() > other.lower()
+
+ def __eq__(self, other):
+ return self.lower() == other.lower()
+
+ def __ne__(self, other):
+ return self.lower() != other.lower()
+
+ def __hash__(self):
+ return hash(self.lower())
+
+ def __contains__(self, other):
+ return super().lower().__contains__(other.lower())
+
+ def in_(self, other):
+ "Does self appear in other?"
+ return self in FoldedCase(other)
+
+ # cache lower since it's likely to be called frequently.
+ @method_cache
+ def lower(self):
+ return super().lower()
+
+ def index(self, sub):
+ return self.lower().index(sub.lower())
+
+ def split(self, splitter=' ', maxsplit=0):
+ pattern = re.compile(re.escape(splitter), re.I)
+ return pattern.split(self, maxsplit)
+
+
+# Python 3.8 compatibility
+_unicode_trap = ExceptionTrap(UnicodeDecodeError)
+
+
+@_unicode_trap.passes
+def is_decodable(value):
+ r"""
+ Return True if the supplied value is decodable (using the default
+ encoding).
+
+ >>> is_decodable(b'\xff')
+ False
+ >>> is_decodable(b'\x32')
+ True
+ """
+ value.decode()
+
+
+def is_binary(value):
+ r"""
+ Return True if the value appears to be binary (that is, it's a byte
+ string and isn't decodable).
+
+ >>> is_binary(b'\xff')
+ True
+ >>> is_binary('\xff')
+ False
+ """
+ return isinstance(value, bytes) and not is_decodable(value)
+
+
+def trim(s):
+ r"""
+ Trim something like a docstring to remove the whitespace that
+ is common due to indentation and formatting.
+
+ >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
+ 'foo = bar\n\tbar = baz'
+ """
+ return textwrap.dedent(s).strip()
+
+
+def wrap(s):
+ """
+ Wrap lines of text, retaining existing newlines as
+ paragraph markers.
+
+ >>> print(wrap(lorem_ipsum))
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
+ eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
+ minim veniam, quis nostrud exercitation ullamco laboris nisi ut
+ aliquip ex ea commodo consequat. Duis aute irure dolor in
+ reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
+ pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
+ culpa qui officia deserunt mollit anim id est laborum.
+ <BLANKLINE>
+ Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
+ varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
+ magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
+ gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
+ risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
+ eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
+ fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
+ a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
+ neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
+ sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
+ nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
+ quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
+ molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
+ """
+ paragraphs = s.splitlines()
+ wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
+ return '\n\n'.join(wrapped)
+
+
+def unwrap(s):
+ r"""
+ Given a multi-line string, return an unwrapped version.
+
+ >>> wrapped = wrap(lorem_ipsum)
+ >>> wrapped.count('\n')
+ 20
+ >>> unwrapped = unwrap(wrapped)
+ >>> unwrapped.count('\n')
+ 1
+ >>> print(unwrapped)
+ Lorem ipsum dolor sit amet, consectetur adipiscing ...
+ Curabitur pretium tincidunt lacus. Nulla gravida orci ...
+
+ """
+ paragraphs = re.split(r'\n\n+', s)
+ cleaned = (para.replace('\n', ' ') for para in paragraphs)
+ return '\n'.join(cleaned)
+
+
+
+
+class Splitter(object):
+ """object that will split a string with the given arguments for each call
+
+ >>> s = Splitter(',')
+ >>> s('hello, world, this is your, master calling')
+ ['hello', ' world', ' this is your', ' master calling']
+ """
+
+ def __init__(self, *args):
+ self.args = args
+
+ def __call__(self, s):
+ return s.split(*self.args)
+
+
+def indent(string, prefix=' ' * 4):
+ """
+ >>> indent('foo')
+ ' foo'
+ """
+ return prefix + string
+
+
+class WordSet(tuple):
+ """
+ Given an identifier, return the words that identifier represents,
+ whether in camel case, underscore-separated, etc.
+
+ >>> WordSet.parse("camelCase")
+ ('camel', 'Case')
+
+ >>> WordSet.parse("under_sep")
+ ('under', 'sep')
+
+ Acronyms should be retained
+
+ >>> WordSet.parse("firstSNL")
+ ('first', 'SNL')
+
+ >>> WordSet.parse("you_and_I")
+ ('you', 'and', 'I')
+
+ >>> WordSet.parse("A simple test")
+ ('A', 'simple', 'test')
+
+ Multiple caps should not interfere with the first cap of another word.
+
+ >>> WordSet.parse("myABCClass")
+ ('my', 'ABC', 'Class')
+
+ The result is a WordSet, so you can get the form you need.
+
+ >>> WordSet.parse("myABCClass").underscore_separated()
+ 'my_ABC_Class'
+
+ >>> WordSet.parse('a-command').camel_case()
+ 'ACommand'
+
+ >>> WordSet.parse('someIdentifier').lowered().space_separated()
+ 'some identifier'
+
+ Slices of the result should return another WordSet.
+
+ >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
+ 'out_of_context'
+
+ >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
+ 'word set'
+
+ >>> example = WordSet.parse('figured it out')
+ >>> example.headless_camel_case()
+ 'figuredItOut'
+ >>> example.dash_separated()
+ 'figured-it-out'
+
+ """
+
+ _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
+
+ def capitalized(self):
+ return WordSet(word.capitalize() for word in self)
+
+ def lowered(self):
+ return WordSet(word.lower() for word in self)
+
+ def camel_case(self):
+ return ''.join(self.capitalized())
+
+ def headless_camel_case(self):
+ words = iter(self)
+ first = next(words).lower()
+ new_words = itertools.chain((first,), WordSet(words).camel_case())
+ return ''.join(new_words)
+
+ def underscore_separated(self):
+ return '_'.join(self)
+
+ def dash_separated(self):
+ return '-'.join(self)
+
+ def space_separated(self):
+ return ' '.join(self)
+
+ def trim_right(self, item):
+ """
+ Remove the item from the end of the set.
+
+ >>> WordSet.parse('foo bar').trim_right('foo')
+ ('foo', 'bar')
+ >>> WordSet.parse('foo bar').trim_right('bar')
+ ('foo',)
+ >>> WordSet.parse('').trim_right('bar')
+ ()
+ """
+ return self[:-1] if self and self[-1] == item else self
+
+ def trim_left(self, item):
+ """
+ Remove the item from the beginning of the set.
+
+ >>> WordSet.parse('foo bar').trim_left('foo')
+ ('bar',)
+ >>> WordSet.parse('foo bar').trim_left('bar')
+ ('foo', 'bar')
+ >>> WordSet.parse('').trim_left('bar')
+ ()
+ """
+ return self[1:] if self and self[0] == item else self
+
+ def trim(self, item):
+ """
+ >>> WordSet.parse('foo bar').trim('foo')
+ ('bar',)
+ """
+ return self.trim_left(item).trim_right(item)
+
+ def __getitem__(self, item):
+ result = super(WordSet, self).__getitem__(item)
+ if isinstance(item, slice):
+ result = WordSet(result)
+ return result
+
+ @classmethod
+ def parse(cls, identifier):
+ matches = cls._pattern.finditer(identifier)
+ return WordSet(match.group(0) for match in matches)
+
+ @classmethod
+ def from_class_name(cls, subject):
+ return cls.parse(subject.__class__.__name__)
+
+
+# for backward compatibility
+words = WordSet.parse
+
+
+def simple_html_strip(s):
+ r"""
+ Remove HTML from the string `s`.
+
+ >>> str(simple_html_strip(''))
+ ''
+
+ >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
+ A stormy day in paradise
+
+ >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
+ Somebody tell the truth.
+
+ >>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
+ What about
+ multiple lines?
+ """
+ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
+ texts = (match.group(3) or '' for match in html_stripper.finditer(s))
+ return ''.join(texts)
+
+
+class SeparatedValues(str):
+ """
+ A string separated by a separator. Overrides __iter__ for getting
+ the values.
+
+ >>> list(SeparatedValues('a,b,c'))
+ ['a', 'b', 'c']
+
+ Whitespace is stripped and empty values are discarded.
+
+ >>> list(SeparatedValues(' a, b , c, '))
+ ['a', 'b', 'c']
+ """
+
+ separator = ','
+
+ def __iter__(self):
+ parts = self.split(self.separator)
+ return filter(None, (part.strip() for part in parts))
+
+
+class Stripper:
+ r"""
+ Given a series of lines, find the common prefix and strip it from them.
+
+ >>> lines = [
+ ... 'abcdefg\n',
+ ... 'abc\n',
+ ... 'abcde\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix
+ 'abc'
+ >>> list(res.lines)
+ ['defg\n', '\n', 'de\n']
+
+ If no prefix is common, nothing should be stripped.
+
+ >>> lines = [
+ ... 'abcd\n',
+ ... '1234\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix = ''
+ >>> list(res.lines)
+ ['abcd\n', '1234\n']
+ """
+
+ def __init__(self, prefix, lines):
+ self.prefix = prefix
+ self.lines = map(self, lines)
+
+ @classmethod
+ def strip_prefix(cls, lines):
+ prefix_lines, lines = itertools.tee(lines)
+ prefix = functools.reduce(cls.common_prefix, prefix_lines)
+ return cls(prefix, lines)
+
+ def __call__(self, line):
+ if not self.prefix:
+ return line
+ null, prefix, rest = line.partition(self.prefix)
+ return rest
+
+ @staticmethod
+ def common_prefix(s1, s2):
+ """
+ Return the common prefix of two lines.
+ """
+ index = min(len(s1), len(s2))
+ while s1[:index] != s2[:index]:
+ index -= 1
+ return s1[:index]
+
+
+def remove_prefix(text, prefix):
+ """
+ Remove the prefix from the text if it exists.
+
+ >>> remove_prefix('underwhelming performance', 'underwhelming ')
+ 'performance'
+
+ >>> remove_prefix('something special', 'sample')
+ 'something special'
+ """
+ null, prefix, rest = text.rpartition(prefix)
+ return rest
+
+
+def remove_suffix(text, suffix):
+ """
+ Remove the suffix from the text if it exists.
+
+ >>> remove_suffix('name.git', '.git')
+ 'name'
+
+ >>> remove_suffix('something special', 'sample')
+ 'something special'
+ """
+ rest, suffix, null = text.partition(suffix)
+ return rest
+
+
+def normalize_newlines(text):
+ r"""
+ Replace alternate newlines with the canonical newline.
+
+ >>> normalize_newlines('Lorem Ipsum\u2029')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\r\n')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\x85')
+ 'Lorem Ipsum\n'
+ """
+ newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
+ pattern = '|'.join(newlines)
+ return re.sub(pattern, '\n', text)
+
+
+def _nonblank(str):
+ return str and not str.startswith('#')
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+ r"""
+ Yield valid lines of a string or iterable.
+
+ >>> list(yield_lines(''))
+ []
+ >>> list(yield_lines(['foo', 'bar']))
+ ['foo', 'bar']
+ >>> list(yield_lines('foo\nbar'))
+ ['foo', 'bar']
+ >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+ ['foo', 'baz #comment']
+ >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+ ['foo', 'bar', 'baz', 'bing']
+ """
+ return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+ return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def drop_comment(line):
+ """
+ Drop comments.
+
+ >>> drop_comment('foo # bar')
+ 'foo'
+
+ A hash without a space may be in a URL.
+
+ >>> drop_comment('http://example.com/foo#bar')
+ 'http://example.com/foo#bar'
+ """
+ return line.partition(' #')[0]
+
+
+def join_continuation(lines):
+ r"""
+ Join lines continued by a trailing backslash.
+
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
+ ['foobarbaz']
+
+ Not sure why, but...
+ The character preceeding the backslash is also elided.
+
+ >>> list(join_continuation(['goo\\', 'dly']))
+ ['godly']
+
+ A terrible idea, but...
+ If no line is available to continue, suppress the lines.
+
+ >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
+ ['foo']
+ """
+ lines = iter(lines)
+ for item in lines:
+ while item.endswith('\\'):
+ try:
+ item = item[:-2].strip() + next(lines)
+ except StopIteration:
+ return
+ yield item
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
new file mode 100644
index 0000000..a5035be
--- /dev/null
+++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+more_itertools
diff --git a/setuptools/_vendor/more_itertools/__init__.py b/setuptools/_vendor/more_itertools/__init__.py
new file mode 100644
index 0000000..19a169f
--- /dev/null
+++ b/setuptools/_vendor/more_itertools/__init__.py
@@ -0,0 +1,4 @@
+from .more import * # noqa
+from .recipes import * # noqa
+
+__version__ = '8.8.0'
diff --git a/setuptools/_vendor/more_itertools/more.py b/setuptools/_vendor/more_itertools/more.py
new file mode 100644
index 0000000..e6fca4d
--- /dev/null
+++ b/setuptools/_vendor/more_itertools/more.py
@@ -0,0 +1,3824 @@
+import warnings
+
+from collections import Counter, defaultdict, deque, abc
+from collections.abc import Sequence
+from functools import partial, reduce, wraps
+from heapq import merge, heapify, heapreplace, heappop
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee,
+ zip_longest,
+)
+from math import exp, factorial, floor, log
+from queue import Empty, Queue
+from random import random, randrange, uniform
+from operator import itemgetter, mul, sub, gt, lt
+from sys import hexversion, maxsize
+from time import monotonic
+
+from .recipes import (
+ consume,
+ flatten,
+ pairwise,
+ powerset,
+ take,
+ unique_everseen,
+)
+
+__all__ = [
+ 'AbortThread',
+ 'adjacent',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'callback_iter',
+ 'chunked',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'consecutive_groups',
+ 'consumer',
+ 'countable',
+ 'count_cycle',
+ 'mark_ends',
+ 'difference',
+ 'distinct_combinations',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'exactly_n',
+ 'filter_except',
+ 'first',
+ 'groupby_transform',
+ 'ilen',
+ 'interleave_longest',
+ 'interleave',
+ 'intersperse',
+ 'islice_extended',
+ 'iterate',
+ 'ichunked',
+ 'is_sorted',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_except',
+ 'map_reduce',
+ 'nth_or_last',
+ 'nth_permutation',
+ 'nth_product',
+ 'numeric_range',
+ 'one',
+ 'only',
+ 'padded',
+ 'partitions',
+ 'set_partitions',
+ 'peekable',
+ 'repeat_last',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'sample',
+ 'seekable',
+ 'SequenceView',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_at',
+ 'split_after',
+ 'split_before',
+ 'split_when',
+ 'split_into',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'substrings',
+ 'substrings_indexes',
+ 'time_limited',
+ 'unique_to_each',
+ 'unzip',
+ 'windowed',
+ 'with_iter',
+ 'UnequalIterablesError',
+ 'zip_equal',
+ 'zip_offset',
+ 'windowed_complete',
+ 'all_unique',
+ 'value_chain',
+ 'product_index',
+ 'combination_index',
+ 'permutation_index',
+]
+
+_marker = object()
+
+
+def chunked(iterable, n, strict=False):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ By the default, the last yielded list will have fewer than *n* elements
+ if the length of *iterable* is not divisible by *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ If the length of *iterable* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ list is yielded.
+
+ """
+ iterator = iter(partial(take, n, iter(iterable)), [])
+ if strict:
+
+ def ret():
+ for chunk in iterator:
+ if len(chunk) != n:
+ raise ValueError('iterable is not divisible by n.')
+ yield chunk
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ ) from e
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ if isinstance(iterable, Sequence):
+ return iterable[-1]
+ # Work around https://bugs.python.org/issue38525
+ elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0):
+ return next(reversed(iterable))
+ else:
+ return deque(iterable, maxlen=1)[-1]
+ except (IndexError, TypeError, StopIteration):
+ if default is _marker:
+ raise ValueError(
+ 'last() was called on an empty iterable, and no default was '
+ 'provided.'
+ )
+ return default
+
+
+def nth_or_last(iterable, n, default=_marker):
+ """Return the nth or the last item of *iterable*,
+ or *default* if *iterable* is empty.
+
+ >>> nth_or_last([0, 1, 2, 3], 2)
+ 2
+ >>> nth_or_last([0, 1], 2)
+ 1
+ >>> nth_or_last([], 0, 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ return last(islice(iterable, n + 1), default=default)
+
+
+class peekable:
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhausted
+ ... list(p)
+ []
+
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 3.5+, this function is an alias for :func:`heapq.merge`.
+
+ """
+ warnings.warn(
+ "collate is no longer part of more_itertools, use heapq.merge",
+ DeprecationWarning,
+ )
+ return merge(*iterables, **kwargs)
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # This approach was selected because benchmarks showed it's likely the
+ # fastest of the known implementations at the time of writing.
+ # See GitHub tracker: #236, #230.
+ counter = count()
+ deque(zip(iterable, counter), maxlen=0)
+ return next(counter)
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ yield from iterable
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 'too',
+ 'many', and perhaps more.
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check iterable
+ contents less destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ first_value = next(it)
+ except StopIteration as e:
+ raise (
+ too_short or ValueError('too few items in iterable (expected 1)')
+ ) from e
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def distinct_permutations(iterable, r=None):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ If *r* is given, only the *r*-length permutations are yielded.
+
+ >>> sorted(distinct_permutations([1, 0, 1], r=2))
+ [(0, 1), (1, 0), (1, 1)]
+ >>> sorted(distinct_permutations(range(3), r=2))
+ [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
+
+ """
+ # Algorithm: https://w.wiki/Qai
+ def _full(A):
+ while True:
+ # Yield the permutation we have
+ yield tuple(A)
+
+ # Find the largest index i such that A[i] < A[i + 1]
+ for i in range(size - 2, -1, -1):
+ if A[i] < A[i + 1]:
+ break
+ # If no such index exists, this permutation is the last one
+ else:
+ return
+
+ # Find the largest index j greater than j such that A[i] < A[j]
+ for j in range(size - 1, i, -1):
+ if A[i] < A[j]:
+ break
+
+ # Swap the value of A[i] with that of A[j], then reverse the
+ # sequence from A[i + 1] to form the new permutation
+ A[i], A[j] = A[j], A[i]
+ A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1]
+
+ # Algorithm: modified from the above
+ def _partial(A, r):
+ # Split A into the first r items and the last r items
+ head, tail = A[:r], A[r:]
+ right_head_indexes = range(r - 1, -1, -1)
+ left_tail_indexes = range(len(tail))
+
+ while True:
+ # Yield the permutation we have
+ yield tuple(head)
+
+ # Starting from the right, find the first index of the head with
+ # value smaller than the maximum value of the tail - call it i.
+ pivot = tail[-1]
+ for i in right_head_indexes:
+ if head[i] < pivot:
+ break
+ pivot = head[i]
+ else:
+ return
+
+ # Starting from the left, find the first value of the tail
+ # with a value greater than head[i] and swap.
+ for j in left_tail_indexes:
+ if tail[j] > head[i]:
+ head[i], tail[j] = tail[j], head[i]
+ break
+ # If we didn't find one, start from the right and find the first
+ # index of the head with a value greater than head[i] and swap.
+ else:
+ for j in right_head_indexes:
+ if head[j] > head[i]:
+ head[i], head[j] = head[j], head[i]
+ break
+
+ # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)]
+ tail += head[: i - r : -1] # head[i + 1:][::-1]
+ i += 1
+ head[i:], tail[:] = tail[: r - i], tail[r - i :]
+
+ items = sorted(iterable)
+
+ size = len(items)
+ if r is None:
+ r = size
+
+ if 0 < r <= size:
+ return _full(items) if (r == size) else _partial(items, r)
+
+ return iter(() if r else ((),))
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values:
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ To slide into the iterable's items, use :func:`chain` to add filler items
+ to the left:
+
+ >>> iterable = [1, 2, 3, 4]
+ >>> n = 3
+ >>> padding = [None] * (n - 1)
+ >>> list(windowed(chain(padding, iterable), 3))
+ [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)]
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ window = deque(maxlen=n)
+ i = n
+ for _ in map(window.append, seq):
+ i -= 1
+ if not i:
+ i = step
+ yield tuple(window)
+
+ size = len(window)
+ if size < n:
+ yield tuple(chain(window, repeat(fillvalue, n - size)))
+ elif 0 < i < min(step, n):
+ window += (fillvalue,) * i
+ yield tuple(window)
+
+
+def substrings(iterable):
+ """Yield all of the substrings of *iterable*.
+
+ >>> [''.join(s) for s in substrings('more')]
+ ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more']
+
+ Note that non-string iterables can also be subdivided.
+
+ >>> list(substrings([0, 1, 2]))
+ [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)]
+
+ """
+ # The length-1 substrings
+ seq = []
+ for item in iter(iterable):
+ seq.append(item)
+ yield (item,)
+ seq = tuple(seq)
+ item_count = len(seq)
+
+ # And the rest
+ for n in range(2, item_count + 1):
+ for i in range(item_count - n + 1):
+ yield seq[i : i + n]
+
+
+def substrings_indexes(seq, reverse=False):
+ """Yield all substrings and their positions in *seq*
+
+ The items yielded will be a tuple of the form ``(substr, i, j)``, where
+ ``substr == seq[i:j]``.
+
+ This function only works for iterables that support slicing, such as
+ ``str`` objects.
+
+ >>> for item in substrings_indexes('more'):
+ ... print(item)
+ ('m', 0, 1)
+ ('o', 1, 2)
+ ('r', 2, 3)
+ ('e', 3, 4)
+ ('mo', 0, 2)
+ ('or', 1, 3)
+ ('re', 2, 4)
+ ('mor', 0, 3)
+ ('ore', 1, 4)
+ ('more', 0, 4)
+
+ Set *reverse* to ``True`` to yield the same items in the opposite order.
+
+
+ """
+ r = range(1, len(seq) + 1)
+ if reverse:
+ r = reversed(r)
+ return (
+ (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1)
+ )
+
+
+class bucket:
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
+ >>> sorted(list(s)) # Get the keys
+ ['a', 'b', 'c']
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __iter__(self):
+ for item in self._it:
+ item_value = self._key(item)
+ if self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ yield from self._cache.keys()
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head.copy(), chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and
+ will not be collapsed.
+
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels))
+ or isinstance(node, (str, bytes))
+ or ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ yield from walk(child, level + 1)
+
+ yield from walk(iterable, 0)
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ yield from chunk
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n, strict=False):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ By the default, the last yielded slice will have fewer than *n* elements
+ if the length of *seq* is not divisible by *n*:
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ If the length of *seq* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ slice is yielded.
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ iterator = takewhile(len, (seq[i : i + n] for i in count(0, n)))
+ if strict:
+
+ def ret():
+ for _slice in iterator:
+ if len(_slice) != n:
+ raise ValueError("seq is not divisible by n.")
+ yield _slice
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def split_at(iterable, pred, maxsplit=-1, keep_separator=False):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2))
+ [[0], [2], [4, 5, 6, 7, 8, 9]]
+
+ By default, the delimiting items are not included in the output.
+ The include them, set *keep_separator* to ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True))
+ [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item):
+ yield buf
+ if keep_separator:
+ yield [item]
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends just before
+ an item for which callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield [item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ buf.append(item)
+ if buf:
+ yield buf
+
+
+def split_after(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ if buf:
+ yield buf
+
+
+def split_when(iterable, pred, maxsplit=-1):
+ """Split *iterable* into pieces based on the output of *pred*.
+ *pred* should be a function that takes successive pairs of items and
+ returns ``True`` if the iterable should be split in between them.
+
+ For example, to find runs of increasing numbers, split the iterable when
+ element ``i`` is larger than element ``i + 1``:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y))
+ [[1, 2, 3, 3], [2, 5], [2, 4], [2]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2],
+ ... lambda x, y: x > y, maxsplit=2))
+ [[1, 2, 3, 3], [2, 5], [2, 4, 2]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ it = iter(iterable)
+ try:
+ cur_item = next(it)
+ except StopIteration:
+ return
+
+ buf = [cur_item]
+ for next_item in it:
+ if pred(cur_item, next_item):
+ yield buf
+ if maxsplit == 1:
+ yield [next_item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+
+ buf.append(next_item)
+ cur_item = next_item
+
+ yield buf
+
+
+def split_into(iterable, sizes):
+ """Yield a list of sequential items from *iterable* of length 'n' for each
+ integer 'n' in *sizes*.
+
+ >>> list(split_into([1,2,3,4,5,6], [1,2,3]))
+ [[1], [2, 3], [4, 5, 6]]
+
+ If the sum of *sizes* is smaller than the length of *iterable*, then the
+ remaining items of *iterable* will not be returned.
+
+ >>> list(split_into([1,2,3,4,5,6], [2,3]))
+ [[1, 2], [3, 4, 5]]
+
+ If the sum of *sizes* is larger than the length of *iterable*, fewer items
+ will be returned in the iteration that overruns *iterable* and further
+ lists will be empty:
+
+ >>> list(split_into([1,2,3,4], [1,2,3,4]))
+ [[1], [2, 3], [4], []]
+
+ When a ``None`` object is encountered in *sizes*, the returned list will
+ contain items up to the end of *iterable* the same way that itertools.slice
+ does:
+
+ >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None]))
+ [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]
+
+ :func:`split_into` can be useful for grouping a series of items where the
+ sizes of the groups are not uniform. An example would be where in a row
+ from a table, multiple columns represent elements of the same feature
+ (e.g. a point represented by x,y,z) but, the format is not the same for
+ all columns.
+ """
+ # convert the iterable argument into an iterator so its contents can
+ # be consumed by islice in case it is a generator
+ it = iter(iterable)
+
+ for size in sizes:
+ if size is None:
+ yield list(it)
+ return
+ else:
+ yield list(islice(it, size))
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ yield from chain(it, repeat(fillvalue))
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def repeat_last(iterable, default=None):
+ """After the *iterable* is exhausted, keep yielding its last element.
+
+ >>> list(islice(repeat_last(range(3)), 5))
+ [0, 1, 2, 2, 2]
+
+ If the iterable is empty, yield *default* forever::
+
+ >>> list(islice(repeat_last(range(0), 42), 5))
+ [42, 42, 42, 42, 42]
+
+ """
+ item = _marker
+ for item in iterable:
+ yield item
+ final = default if item is _marker else item
+ yield from repeat(final)
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+class UnequalIterablesError(ValueError):
+ def __init__(self, details=None):
+ msg = 'Iterables have different lengths'
+ if details is not None:
+ msg += (': index 0 has length {}; index {} has length {}').format(
+ *details
+ )
+
+ super().__init__(msg)
+
+
+def _zip_equal_generator(iterables):
+ for combo in zip_longest(*iterables, fillvalue=_marker):
+ for val in combo:
+ if val is _marker:
+ raise UnequalIterablesError()
+ yield combo
+
+
+def zip_equal(*iterables):
+ """``zip`` the input *iterables* together, but raise
+ ``UnequalIterablesError`` if they aren't all the same length.
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abc')
+ >>> list(zip_equal(it_1, it_2))
+ [(0, 'a'), (1, 'b'), (2, 'c')]
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abcd')
+ >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ more_itertools.more.UnequalIterablesError: Iterables have different
+ lengths
+
+ """
+ if hexversion >= 0x30A00A6:
+ warnings.warn(
+ (
+ 'zip_equal will be removed in a future version of '
+ 'more-itertools. Use the builtin zip function with '
+ 'strict=True instead.'
+ ),
+ DeprecationWarning,
+ )
+ # Check whether the iterables are all the same size.
+ try:
+ first_size = len(iterables[0])
+ for i, it in enumerate(iterables[1:], 1):
+ size = len(it)
+ if size != first_size:
+ break
+ else:
+ # If we didn't break out, we can use the built-in zip.
+ return zip(*iterables)
+
+ # If we did break out, there was a mismatch.
+ raise UnequalIterablesError(details=(first_size, i, size))
+ # If any one of the iterables didn't have a length, start reading
+ # them until one runs out.
+ except TypeError:
+ return _zip_equal_generator(iterables)
+
+
+def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which some series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), key=None, reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying multiple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ To sort by a function of the elements of the iterable, pass a *key*
+ function. Its arguments are the elements of the iterables corresponding to
+ the key list::
+
+ >>> names = ('a', 'b', 'c')
+ >>> lengths = (1, 2, 3)
+ >>> widths = (5, 2, 1)
+ >>> def area(length, width):
+ ... return length * width
+ >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area)
+ [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ if key is None:
+ # if there is no key function, the key argument to sorted is an
+ # itemgetter
+ key_argument = itemgetter(*key_list)
+ else:
+ # if there is a key function, call it with the items at the offsets
+ # specified by the key function as arguments
+ key_list = list(key_list)
+ if len(key_list) == 1:
+ # if key_list contains a single item, pass the item at that offset
+ # as the only argument to the key function
+ key_offset = key_list[0]
+ key_argument = lambda zipped_items: key(zipped_items[key_offset])
+ else:
+ # if key_list contains multiple items, use itemgetter to return a
+ # tuple of items, which we pass as *args to the key function
+ get_key_items = itemgetter(*key_list)
+ key_argument = lambda zipped_items: key(
+ *get_key_items(zipped_items)
+ )
+
+ return list(
+ zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
+ )
+
+
+def unzip(iterable):
+ """The inverse of :func:`zip`, this function disaggregates the elements
+ of the zipped *iterable*.
+
+ The ``i``-th iterable contains the ``i``-th element from each element
+ of the zipped iterable. The first element is used to to determine the
+ length of the remaining elements.
+
+ >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> letters, numbers = unzip(iterable)
+ >>> list(letters)
+ ['a', 'b', 'c', 'd']
+ >>> list(numbers)
+ [1, 2, 3, 4]
+
+ This is similar to using ``zip(*iterable)``, but it avoids reading
+ *iterable* into memory. Note, however, that this function uses
+ :func:`itertools.tee` and thus may require significant storage.
+
+ """
+ head, iterable = spy(iter(iterable))
+ if not head:
+ # empty iterable, e.g. zip([], [], [])
+ return ()
+ # spy returns a one-length iterable as head
+ head = head[0]
+ iterables = tee(iterable, len(head))
+
+ def itemgetter(i):
+ def getter(obj):
+ try:
+ return obj[i]
+ except IndexError:
+ # basically if we have an iterable like
+ # iter([(1, 2, 3), (4, 5), (6,)])
+ # the second unzipped iterable would fail at the third tuple
+ # since it would try to access tup[1]
+ # same with the third unzipped iterable and the second tuple
+ # to support these "improperly zipped" iterables,
+ # we create a custom itemgetter
+ # which just stops the unzipped iterables
+ # at first length mismatch
+ raise StopIteration
+
+ return getter
+
+ return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ try:
+ iterable[:0]
+ except TypeError:
+ seq = tuple(iterable)
+ else:
+ seq = iterable
+
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ stop = 0
+ for i in range(1, n + 1):
+ start = stop
+ stop += q + 1 if i <= r else q
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None):
+ """An extension of :func:`itertools.groupby` that can apply transformations
+ to the grouped data.
+
+ * *keyfunc* is a function computing a key value for each item in *iterable*
+ * *valuefunc* is a function that transforms the individual items from
+ *iterable* after grouping
+ * *reducefunc* is a function that transforms each group of items
+
+ >>> iterable = 'aAAbBBcCC'
+ >>> keyfunc = lambda k: k.upper()
+ >>> valuefunc = lambda v: v.lower()
+ >>> reducefunc = lambda g: ''.join(g)
+ >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc))
+ [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
+
+ Each optional argument defaults to an identity function if not specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ ret = groupby(iterable, keyfunc)
+ if valuefunc:
+ ret = ((k, map(valuefunc, g)) for k, g in ret)
+ if reducefunc:
+ ret = ((k, reducefunc(g)) for k, g in ret)
+
+ return ret
+
+
+class numeric_range(abc.Sequence, abc.Hashable):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ ``datetime.datetime`` objects can be used for *start* and *stop*, if *step*
+ is a ``datetime.timedelta`` object:
+
+ >>> import datetime
+ >>> start = datetime.datetime(2019, 1, 1)
+ >>> stop = datetime.datetime(2019, 1, 3)
+ >>> step = datetime.timedelta(days=1)
+ >>> items = iter(numeric_range(start, stop, step))
+ >>> next(items)
+ datetime.datetime(2019, 1, 1, 0, 0)
+ >>> next(items)
+ datetime.datetime(2019, 1, 2, 0, 0)
+
+ """
+
+ _EMPTY_HASH = hash(range(0, 0))
+
+ def __init__(self, *args):
+ argc = len(args)
+ if argc == 1:
+ (self._stop,) = args
+ self._start = type(self._stop)(0)
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 2:
+ self._start, self._stop = args
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 3:
+ self._start, self._stop, self._step = args
+ elif argc == 0:
+ raise TypeError(
+ 'numeric_range expected at least '
+ '1 argument, got {}'.format(argc)
+ )
+ else:
+ raise TypeError(
+ 'numeric_range expected at most '
+ '3 arguments, got {}'.format(argc)
+ )
+
+ self._zero = type(self._step)(0)
+ if self._step == self._zero:
+ raise ValueError('numeric_range() arg 3 must not be zero')
+ self._growing = self._step > self._zero
+ self._init_len()
+
+ def __bool__(self):
+ if self._growing:
+ return self._start < self._stop
+ else:
+ return self._start > self._stop
+
+ def __contains__(self, elem):
+ if self._growing:
+ if self._start <= elem < self._stop:
+ return (elem - self._start) % self._step == self._zero
+ else:
+ if self._start >= elem > self._stop:
+ return (self._start - elem) % (-self._step) == self._zero
+
+ return False
+
+ def __eq__(self, other):
+ if isinstance(other, numeric_range):
+ empty_self = not bool(self)
+ empty_other = not bool(other)
+ if empty_self or empty_other:
+ return empty_self and empty_other # True if both empty
+ else:
+ return (
+ self._start == other._start
+ and self._step == other._step
+ and self._get_by_index(-1) == other._get_by_index(-1)
+ )
+ else:
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self._get_by_index(key)
+ elif isinstance(key, slice):
+ step = self._step if key.step is None else key.step * self._step
+
+ if key.start is None or key.start <= -self._len:
+ start = self._start
+ elif key.start >= self._len:
+ start = self._stop
+ else: # -self._len < key.start < self._len
+ start = self._get_by_index(key.start)
+
+ if key.stop is None or key.stop >= self._len:
+ stop = self._stop
+ elif key.stop <= -self._len:
+ stop = self._start
+ else: # -self._len < key.stop < self._len
+ stop = self._get_by_index(key.stop)
+
+ return numeric_range(start, stop, step)
+ else:
+ raise TypeError(
+ 'numeric range indices must be '
+ 'integers or slices, not {}'.format(type(key).__name__)
+ )
+
+ def __hash__(self):
+ if self:
+ return hash((self._start, self._get_by_index(-1), self._step))
+ else:
+ return self._EMPTY_HASH
+
+ def __iter__(self):
+ values = (self._start + (n * self._step) for n in count())
+ if self._growing:
+ return takewhile(partial(gt, self._stop), values)
+ else:
+ return takewhile(partial(lt, self._stop), values)
+
+ def __len__(self):
+ return self._len
+
+ def _init_len(self):
+ if self._growing:
+ start = self._start
+ stop = self._stop
+ step = self._step
+ else:
+ start = self._stop
+ stop = self._start
+ step = -self._step
+ distance = stop - start
+ if distance <= self._zero:
+ self._len = 0
+ else: # distance > 0 and step > 0: regular euclidean division
+ q, r = divmod(distance, step)
+ self._len = int(q) + int(r != self._zero)
+
+ def __reduce__(self):
+ return numeric_range, (self._start, self._stop, self._step)
+
+ def __repr__(self):
+ if self._step == 1:
+ return "numeric_range({}, {})".format(
+ repr(self._start), repr(self._stop)
+ )
+ else:
+ return "numeric_range({}, {}, {})".format(
+ repr(self._start), repr(self._stop), repr(self._step)
+ )
+
+ def __reversed__(self):
+ return iter(
+ numeric_range(
+ self._get_by_index(-1), self._start - self._step, -self._step
+ )
+ )
+
+ def count(self, value):
+ return int(value in self)
+
+ def index(self, value):
+ if self._growing:
+ if self._start <= value < self._stop:
+ q, r = divmod(value - self._start, self._step)
+ if r == self._zero:
+ return int(q)
+ else:
+ if self._start >= value > self._stop:
+ q, r = divmod(self._start - value, -self._step)
+ if r == self._zero:
+ return int(q)
+
+ raise ValueError("{} is not in numeric range".format(value))
+
+ def _get_by_index(self, i):
+ if i < 0:
+ i += self._len
+ if i < 0 or i >= self._len:
+ raise IndexError("numeric range object index out of range")
+ return self._start + i * self._step
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def mark_ends(iterable):
+ """Yield 3-tuples of the form ``(is_first, is_last, item)``.
+
+ >>> list(mark_ends('ABC'))
+ [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
+
+ Use this when looping over an iterable to take special action on its first
+ and/or last items:
+
+ >>> iterable = ['Header', 100, 200, 'Footer']
+ >>> total = 0
+ >>> for is_first, is_last, item in mark_ends(iterable):
+ ... if is_first:
+ ... continue # Skip the header
+ ... if is_last:
+ ... continue # Skip the footer
+ ... total += item
+ >>> print(total)
+ 300
+ """
+ it = iter(iterable)
+
+ try:
+ b = next(it)
+ except StopIteration:
+ return
+
+ try:
+ for i in count():
+ a = b
+ b = next(it)
+ yield i == 0, False, a
+
+ except StopIteration:
+ yield i == 0, True, a
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ cache_clear = cache.clear
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ yield from cache
+ cache_clear()
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+class islice_extended:
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ You can also use slice notation directly:
+
+ >>> iterable = map(str, count())
+ >>> it = islice_extended(iterable)[10:20:2]
+ >>> list(it)
+ ['10', '12', '14', '16', '18']
+
+ """
+
+ def __init__(self, iterable, *args):
+ it = iter(iterable)
+ if args:
+ self._iterable = _islice_helper(it, slice(*args))
+ else:
+ self._iterable = it
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterable)
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return islice_extended(_islice_helper(self._iterable, key))
+
+ raise TypeError('islice_extended.__getitem__ argument must be a slice')
+
+
+def _islice_helper(it, s):
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if start < 0:
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ yield from islice(it, start, stop, step)
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ yield from cache[i::step]
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ Each group of consecutive items is an iterator that shares it source with
+ *iterable*. When an an output group is advanced, the previous group is
+ no longer available unless its elements are copied (e.g., into a ``list``).
+
+ >>> iterable = [1, 2, 11, 12, 21, 22]
+ >>> saved_groups = []
+ >>> for group in consecutive_groups(iterable):
+ ... saved_groups.append(list(group)) # Copy group elements
+ >>> saved_groups
+ [[1, 2], [11, 12], [21, 22]]
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub, *, initial=None):
+ """This function is the inverse of :func:`itertools.accumulate`. By default
+ it will compute the first difference of *iterable* using
+ :func:`operator.sub`:
+
+ >>> from itertools import accumulate
+ >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ *func* defaults to :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120]
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ If the *initial* keyword is set, the first element will be skipped when
+ computing successive differences.
+
+ >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10)
+ >>> list(difference(it, initial=10))
+ [1, 2, 3]
+
+ """
+ a, b = tee(iterable)
+ try:
+ first = [next(b)]
+ except StopIteration:
+ return iter([])
+
+ if initial is not None:
+ first = []
+
+ return chain(first, starmap(func, zip(b, a)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analogous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable:
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ Call :meth:`peek` to look ahead one item without advancing the iterator:
+
+ >>> it = seekable('1234')
+ >>> it.peek()
+ '1'
+ >>> list(it)
+ ['1', '2', '3', '4']
+ >>> it.peek(default='empty')
+ 'empty'
+
+ Before the iterator is at its end, calling :func:`bool` on it will return
+ ``True``. After it will return ``False``:
+
+ >>> it = seekable('5678')
+ >>> bool(it)
+ True
+ >>> list(it)
+ ['5', '6', '7', '8']
+ >>> bool(it)
+ False
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ By default, the cache grows as the source iterable progresses, so beware of
+ wrapping very large or infinite iterables. Supply *maxlen* to limit the
+ size of the cache (this of course limits how far back you can seek).
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()), maxlen=2)
+ >>> next(it), next(it), next(it), next(it)
+ ('0', '1', '2', '3')
+ >>> list(it.elements())
+ ['2', '3']
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it), next(it)
+ ('2', '3', '4', '5')
+ >>> next(it)
+ '6'
+
+ """
+
+ def __init__(self, iterable, maxlen=None):
+ self._source = iter(iterable)
+ if maxlen is None:
+ self._cache = []
+ else:
+ self._cache = deque([], maxlen)
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ try:
+ peeked = next(self)
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ if self._index is None:
+ self._index = len(self._cache)
+ self._index -= 1
+ return peeked
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length:
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (len_iter - i - 1 for i in locate(reversed(iterable), pred))
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ yield from substitutes
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
+
+
+def partitions(iterable):
+ """Yield all possible order-preserving partitions of *iterable*.
+
+ >>> iterable = 'abc'
+ >>> for part in partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['a', 'b', 'c']
+
+ This is unrelated to :func:`partition`.
+
+ """
+ sequence = list(iterable)
+ n = len(sequence)
+ for i in powerset(range(1, n)):
+ yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
+
+
+def set_partitions(iterable, k=None):
+ """
+ Yield the set partitions of *iterable* into *k* parts. Set partitions are
+ not order-preserving.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable, 2):
+ ... print([''.join(p) for p in part])
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+
+
+ If *k* is not given, every set partition is generated.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+ ['a', 'b', 'c']
+
+ """
+ L = list(iterable)
+ n = len(L)
+ if k is not None:
+ if k < 1:
+ raise ValueError(
+ "Can't partition in a negative or zero number of groups"
+ )
+ elif k > n:
+ return
+
+ def set_partitions_helper(L, k):
+ n = len(L)
+ if k == 1:
+ yield [L]
+ elif n == k:
+ yield [[s] for s in L]
+ else:
+ e, *M = L
+ for p in set_partitions_helper(M, k - 1):
+ yield [[e], *p]
+ for p in set_partitions_helper(M, k):
+ for i in range(len(p)):
+ yield p[:i] + [[e] + p[i]] + p[i + 1 :]
+
+ if k is None:
+ for k in range(1, n + 1):
+ yield from set_partitions_helper(L, k)
+ else:
+ yield from set_partitions_helper(L, k)
+
+
+class time_limited:
+ """
+ Yield items from *iterable* until *limit_seconds* have passed.
+ If the time limit expires before all items have been yielded, the
+ ``timed_out`` parameter will be set to ``True``.
+
+ >>> from time import sleep
+ >>> def generator():
+ ... yield 1
+ ... yield 2
+ ... sleep(0.2)
+ ... yield 3
+ >>> iterable = time_limited(0.1, generator())
+ >>> list(iterable)
+ [1, 2]
+ >>> iterable.timed_out
+ True
+
+ Note that the time is checked before each item is yielded, and iteration
+ stops if the time elapsed is greater than *limit_seconds*. If your time
+ limit is 1 second, but it takes 2 seconds to generate the first item from
+ the iterable, the function will run for 2 seconds and not yield anything.
+
+ """
+
+ def __init__(self, limit_seconds, iterable):
+ if limit_seconds < 0:
+ raise ValueError('limit_seconds must be positive')
+ self.limit_seconds = limit_seconds
+ self._iterable = iter(iterable)
+ self._start_time = monotonic()
+ self.timed_out = False
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._iterable)
+ if monotonic() - self._start_time > self.limit_seconds:
+ self.timed_out = True
+ raise StopIteration
+
+ return item
+
+
+def only(iterable, default=None, too_long=None):
+ """If *iterable* has only one item, return it.
+ If it has zero items, return *default*.
+ If it has more than one item, raise the exception given by *too_long*,
+ which is ``ValueError`` by default.
+
+ >>> only([], default='missing')
+ 'missing'
+ >>> only([1])
+ 1
+ >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 1, 2,
+ and perhaps more.'
+ >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError
+
+ Note that :func:`only` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check
+ iterable contents less destructively.
+ """
+ it = iter(iterable)
+ first_value = next(it, default)
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def ichunked(iterable, n):
+ """Break *iterable* into sub-iterables with *n* elements each.
+ :func:`ichunked` is like :func:`chunked`, but it yields iterables
+ instead of lists.
+
+ If the sub-iterables are read in order, the elements of *iterable*
+ won't be stored in memory.
+ If they are read out of order, :func:`itertools.tee` is used to cache
+ elements as necessary.
+
+ >>> from itertools import count
+ >>> all_chunks = ichunked(count(), 4)
+ >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
+ >>> list(c_2) # c_1's elements have been cached; c_3's haven't been
+ [4, 5, 6, 7]
+ >>> list(c_1)
+ [0, 1, 2, 3]
+ >>> list(c_3)
+ [8, 9, 10, 11]
+
+ """
+ source = iter(iterable)
+
+ while True:
+ # Check to see whether we're at the end of the source iterable
+ item = next(source, _marker)
+ if item is _marker:
+ return
+
+ # Clone the source and yield an n-length slice
+ source, it = tee(chain([item], source))
+ yield islice(it, n)
+
+ # Advance the source iterable
+ consume(source, n)
+
+
+def distinct_combinations(iterable, r):
+ """Yield the distinct combinations of *r* items taken from *iterable*.
+
+ >>> list(distinct_combinations([0, 0, 1], 2))
+ [(0, 0), (0, 1)]
+
+ Equivalent to ``set(combinations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ """
+ if r < 0:
+ raise ValueError('r must be non-negative')
+ elif r == 0:
+ yield ()
+ return
+ pool = tuple(iterable)
+ generators = [unique_everseen(enumerate(pool), key=itemgetter(1))]
+ current_combo = [None] * r
+ level = 0
+ while generators:
+ try:
+ cur_idx, p = next(generators[-1])
+ except StopIteration:
+ generators.pop()
+ level -= 1
+ continue
+ current_combo[level] = p
+ if level + 1 == r:
+ yield tuple(current_combo)
+ else:
+ generators.append(
+ unique_everseen(
+ enumerate(pool[cur_idx + 1 :], cur_idx + 1),
+ key=itemgetter(1),
+ )
+ )
+ level += 1
+
+
+def filter_except(validator, iterable, *exceptions):
+ """Yield the items from *iterable* for which the *validator* function does
+ not raise one of the specified *exceptions*.
+
+ *validator* is called for each item in *iterable*.
+ It should be a function that accepts one argument and raises an exception
+ if that item is not valid.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(filter_except(int, iterable, ValueError, TypeError))
+ ['1', '2', '4']
+
+ If an exception other than one given by *exceptions* is raised by
+ *validator*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ validator(item)
+ except exceptions:
+ pass
+ else:
+ yield item
+
+
+def map_except(function, iterable, *exceptions):
+ """Transform each item from *iterable* with *function* and yield the
+ result, unless *function* raises one of the specified *exceptions*.
+
+ *function* is called to transform each item in *iterable*.
+ It should be a accept one argument.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(map_except(int, iterable, ValueError, TypeError))
+ [1, 2, 4]
+
+ If an exception other than one given by *exceptions* is raised by
+ *function*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ yield function(item)
+ except exceptions:
+ pass
+
+
+def _sample_unweighted(iterable, k):
+ # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
+ # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
+
+ # Fill up the reservoir (collection of samples) with the first `k` samples
+ reservoir = take(k, iterable)
+
+ # Generate random number that's the largest in a sample of k U(0,1) numbers
+ # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
+ W = exp(log(random()) / k)
+
+ # The number of elements to skip before changing the reservoir is a random
+ # number with a geometric distribution. Sample it using random() and logs.
+ next_index = k + floor(log(random()) / log(1 - W))
+
+ for index, element in enumerate(iterable, k):
+
+ if index == next_index:
+ reservoir[randrange(k)] = element
+ # The new W is the largest in a sample of k U(0, `old_W`) numbers
+ W *= exp(log(random()) / k)
+ next_index += floor(log(random()) / log(1 - W)) + 1
+
+ return reservoir
+
+
+def _sample_weighted(iterable, k, weights):
+ # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
+ # "Weighted random sampling with a reservoir".
+
+ # Log-transform for numerical stability for weights that are small/large
+ weight_keys = (log(random()) / weight for weight in weights)
+
+ # Fill up the reservoir (collection of samples) with the first `k`
+ # weight-keys and elements, then heapify the list.
+ reservoir = take(k, zip(weight_keys, iterable))
+ heapify(reservoir)
+
+ # The number of jumps before changing the reservoir is a random variable
+ # with an exponential distribution. Sample it using random() and logs.
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+
+ for weight, element in zip(weights, iterable):
+ if weight >= weights_to_skip:
+ # The notation here is consistent with the paper, but we store
+ # the weight-keys in log-space for better numerical stability.
+ smallest_weight_key, _ = reservoir[0]
+ t_w = exp(weight * smallest_weight_key)
+ r_2 = uniform(t_w, 1) # generate U(t_w, 1)
+ weight_key = log(r_2) / weight
+ heapreplace(reservoir, (weight_key, element))
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+ else:
+ weights_to_skip -= weight
+
+ # Equivalent to [element for weight_key, element in sorted(reservoir)]
+ return [heappop(reservoir)[1] for _ in range(k)]
+
+
+def sample(iterable, k, weights=None):
+ """Return a *k*-length list of elements chosen (without replacement)
+ from the *iterable*. Like :func:`random.sample`, but works on iterables
+ of unknown length.
+
+ >>> iterable = range(100)
+ >>> sample(iterable, 5) # doctest: +SKIP
+ [81, 60, 96, 16, 4]
+
+ An iterable with *weights* may also be given:
+
+ >>> iterable = range(100)
+ >>> weights = (i * i + 1 for i in range(100))
+ >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
+ [79, 67, 74, 66, 78]
+
+ The algorithm can also be used to generate weighted random permutations.
+ The relative weight of each item determines the probability that it
+ appears late in the permutation.
+
+ >>> data = "abcdefgh"
+ >>> weights = range(1, len(data) + 1)
+ >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
+ ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
+ """
+ if k == 0:
+ return []
+
+ iterable = iter(iterable)
+ if weights is None:
+ return _sample_unweighted(iterable, k)
+ else:
+ weights = iter(weights)
+ return _sample_weighted(iterable, k, weights)
+
+
+def is_sorted(iterable, key=None, reverse=False):
+ """Returns ``True`` if the items of iterable are in sorted order, and
+ ``False`` otherwise. *key* and *reverse* have the same meaning that they do
+ in the built-in :func:`sorted` function.
+
+ >>> is_sorted(['1', '2', '3', '4', '5'], key=int)
+ True
+ >>> is_sorted([5, 4, 3, 1, 2], reverse=True)
+ False
+
+ The function returns ``False`` after encountering the first out-of-order
+ item. If there are no out-of-order items, the iterable is exhausted.
+ """
+
+ compare = lt if reverse else gt
+ it = iterable if (key is None) else map(key, iterable)
+ return not any(starmap(compare, pairwise(it)))
+
+
+class AbortThread(BaseException):
+ pass
+
+
+class callback_iter:
+ """Convert a function that uses callbacks to an iterator.
+
+ Let *func* be a function that takes a `callback` keyword argument.
+ For example:
+
+ >>> def func(callback=None):
+ ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]:
+ ... if callback:
+ ... callback(i, c)
+ ... return 4
+
+
+ Use ``with callback_iter(func)`` to get an iterator over the parameters
+ that are delivered to the callback.
+
+ >>> with callback_iter(func) as it:
+ ... for args, kwargs in it:
+ ... print(args)
+ (1, 'a')
+ (2, 'b')
+ (3, 'c')
+
+ The function will be called in a background thread. The ``done`` property
+ indicates whether it has completed execution.
+
+ >>> it.done
+ True
+
+ If it completes successfully, its return value will be available
+ in the ``result`` property.
+
+ >>> it.result
+ 4
+
+ Notes:
+
+ * If the function uses some keyword argument besides ``callback``, supply
+ *callback_kwd*.
+ * If it finished executing, but raised an exception, accessing the
+ ``result`` property will raise the same exception.
+ * If it hasn't finished executing, accessing the ``result``
+ property from within the ``with`` block will raise ``RuntimeError``.
+ * If it hasn't finished executing, accessing the ``result`` property from
+ outside the ``with`` block will raise a
+ ``more_itertools.AbortThread`` exception.
+ * Provide *wait_seconds* to adjust how frequently the it is polled for
+ output.
+
+ """
+
+ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
+ self._func = func
+ self._callback_kwd = callback_kwd
+ self._aborted = False
+ self._future = None
+ self._wait_seconds = wait_seconds
+ self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1)
+ self._iterator = self._reader()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._aborted = True
+ self._executor.shutdown()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterator)
+
+ @property
+ def done(self):
+ if self._future is None:
+ return False
+ return self._future.done()
+
+ @property
+ def result(self):
+ if not self.done:
+ raise RuntimeError('Function has not yet completed')
+
+ return self._future.result()
+
+ def _reader(self):
+ q = Queue()
+
+ def callback(*args, **kwargs):
+ if self._aborted:
+ raise AbortThread('canceled by user')
+
+ q.put((args, kwargs))
+
+ self._future = self._executor.submit(
+ self._func, **{self._callback_kwd: callback}
+ )
+
+ while True:
+ try:
+ item = q.get(timeout=self._wait_seconds)
+ except Empty:
+ pass
+ else:
+ q.task_done()
+ yield item
+
+ if self._future.done():
+ break
+
+ remaining = []
+ while True:
+ try:
+ item = q.get_nowait()
+ except Empty:
+ break
+ else:
+ q.task_done()
+ remaining.append(item)
+ q.join()
+ yield from remaining
+
+
+def windowed_complete(iterable, n):
+ """
+ Yield ``(beginning, middle, end)`` tuples, where:
+
+ * Each ``middle`` has *n* items from *iterable*
+ * Each ``beginning`` has the items before the ones in ``middle``
+ * Each ``end`` has the items after the ones in ``middle``
+
+ >>> iterable = range(7)
+ >>> n = 3
+ >>> for beginning, middle, end in windowed_complete(iterable, n):
+ ... print(beginning, middle, end)
+ () (0, 1, 2) (3, 4, 5, 6)
+ (0,) (1, 2, 3) (4, 5, 6)
+ (0, 1) (2, 3, 4) (5, 6)
+ (0, 1, 2) (3, 4, 5) (6,)
+ (0, 1, 2, 3) (4, 5, 6) ()
+
+ Note that *n* must be at least 0 and most equal to the length of
+ *iterable*.
+
+ This function will exhaust the iterable and may require significant
+ storage.
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+
+ seq = tuple(iterable)
+ size = len(seq)
+
+ if n > size:
+ raise ValueError('n must be <= len(seq)')
+
+ for i in range(size - n + 1):
+ beginning = seq[:i]
+ middle = seq[i : i + n]
+ end = seq[i + n :]
+ yield beginning, middle, end
+
+
+def all_unique(iterable, key=None):
+ """
+ Returns ``True`` if all the elements of *iterable* are unique (no two
+ elements are equal).
+
+ >>> all_unique('ABCB')
+ False
+
+ If a *key* function is specified, it will be used to make comparisons.
+
+ >>> all_unique('ABCb')
+ True
+ >>> all_unique('ABCb', str.lower)
+ False
+
+ The function returns as soon as the first non-unique element is
+ encountered. Iterables with a mix of hashable and unhashable items can
+ be used, but the function will be slower for unhashable items.
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ for element in map(key, iterable) if key else iterable:
+ try:
+ if element in seenset:
+ return False
+ seenset_add(element)
+ except TypeError:
+ if element in seenlist:
+ return False
+ seenlist_add(element)
+ return True
+
+
+def nth_product(index, *args):
+ """Equivalent to ``list(product(*args))[index]``.
+
+ The products of *args* can be ordered lexicographically.
+ :func:`nth_product` computes the product at sort position *index* without
+ computing the previous products.
+
+ >>> nth_product(8, range(2), range(2), range(2), range(2))
+ (1, 0, 0, 0)
+
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pools = list(map(tuple, reversed(args)))
+ ns = list(map(len, pools))
+
+ c = reduce(mul, ns)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ result = []
+ for pool, n in zip(pools, ns):
+ result.append(pool[index % n])
+ index //= n
+
+ return tuple(reversed(result))
+
+
+def nth_permutation(iterable, r, index):
+ """Equivalent to ``list(permutations(iterable, r))[index]```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`nth_permutation`
+ computes the subsequence at sort position *index* directly, without
+ computing the previous subsequences.
+
+ >>> nth_permutation('ghijk', 2, 5)
+ ('h', 'i')
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = list(iterable)
+ n = len(pool)
+
+ if r is None or r == n:
+ r, c = n, factorial(n)
+ elif not 0 <= r < n:
+ raise ValueError
+ else:
+ c = factorial(n) // factorial(n - r)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ if c == 0:
+ return tuple()
+
+ result = [0] * r
+ q = index * factorial(n) // c if r < n else index
+ for d in range(1, n + 1):
+ q, i = divmod(q, d)
+ if 0 <= n - d < r:
+ result[n - d] = i
+ if q == 0:
+ break
+
+ return tuple(map(pool.pop, result))
+
+
+def value_chain(*args):
+ """Yield all arguments passed to the function in the same order in which
+ they were passed. If an argument itself is iterable then iterate over its
+ values.
+
+ >>> list(value_chain(1, 2, 3, [4, 5, 6]))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and are emitted
+ as-is:
+
+ >>> list(value_chain('12', '34', ['56', '78']))
+ ['12', '34', '56', '78']
+
+
+ Multiple levels of nesting are not flattened.
+
+ """
+ for value in args:
+ if isinstance(value, (str, bytes)):
+ yield value
+ continue
+ try:
+ yield from value
+ except TypeError:
+ yield value
+
+
+def product_index(element, *args):
+ """Equivalent to ``list(product(*args)).index(element)``
+
+ The products of *args* can be ordered lexicographically.
+ :func:`product_index` computes the first index of *element* without
+ computing the previous products.
+
+ >>> product_index([8, 2], range(10), range(5))
+ 42
+
+ ``ValueError`` will be raised if the given *element* isn't in the product
+ of *args*.
+ """
+ index = 0
+
+ for x, pool in zip_longest(element, args, fillvalue=_marker):
+ if x is _marker or pool is _marker:
+ raise ValueError('element is not a product of args')
+
+ pool = tuple(pool)
+ index = index * len(pool) + pool.index(x)
+
+ return index
+
+
+def combination_index(element, iterable):
+ """Equivalent to ``list(combinations(iterable, r)).index(element)``
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`combination_index` computes the index of the
+ first *element*, without computing the previous combinations.
+
+ >>> combination_index('adf', 'abcdefg')
+ 10
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ combinations of *iterable*.
+ """
+ element = enumerate(element)
+ k, y = next(element, (None, None))
+ if k is None:
+ return 0
+
+ indexes = []
+ pool = enumerate(iterable)
+ for n, x in pool:
+ if x == y:
+ indexes.append(n)
+ tmp, y = next(element, (None, None))
+ if tmp is None:
+ break
+ else:
+ k = tmp
+ else:
+ raise ValueError('element is not a combination of iterable')
+
+ n, _ = last(pool, default=(n, None))
+
+ # Python versiosn below 3.8 don't have math.comb
+ index = 1
+ for i, j in enumerate(reversed(indexes), start=1):
+ j = n - j
+ if i <= j:
+ index += factorial(j) // (factorial(i) * factorial(j - i))
+
+ return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+
+
+def permutation_index(element, iterable):
+ """Equivalent to ``list(permutations(iterable, r)).index(element)```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`permutation_index`
+ computes the index of the first *element* directly, without computing
+ the previous permutations.
+
+ >>> permutation_index([1, 3, 2], range(5))
+ 19
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ permutations of *iterable*.
+ """
+ index = 0
+ pool = list(iterable)
+ for i, x in zip(range(len(pool), -1, -1), element):
+ r = pool.index(x)
+ index = index * i + r
+ del pool[r]
+
+ return index
+
+
+class countable:
+ """Wrap *iterable* and keep a count of how many items have been consumed.
+
+ The ``items_seen`` attribute starts at ``0`` and increments as the iterable
+ is consumed:
+
+ >>> iterable = map(str, range(10))
+ >>> it = countable(iterable)
+ >>> it.items_seen
+ 0
+ >>> next(it), next(it)
+ ('0', '1')
+ >>> list(it)
+ ['2', '3', '4', '5', '6', '7', '8', '9']
+ >>> it.items_seen
+ 10
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self.items_seen = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._it)
+ self.items_seen += 1
+
+ return item
diff --git a/setuptools/_vendor/more_itertools/recipes.py b/setuptools/_vendor/more_itertools/recipes.py
new file mode 100644
index 0000000..521abd7
--- /dev/null
+++ b/setuptools/_vendor/more_itertools/recipes.py
@@ -0,0 +1,620 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+import warnings
+from collections import deque
+from itertools import (
+ chain,
+ combinations,
+ count,
+ cycle,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ tee,
+ zip_longest,
+)
+import operator
+from random import randrange, sample, choice
+
+__all__ = [
+ 'all_equal',
+ 'consume',
+ 'convolve',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pad_none',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+
+ If there are fewer than *n* items in the iterable, all of them are
+ returned.
+
+ >>> take(10, range(3))
+ [0, 1, 2]
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def pad_none(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, pad_none(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+padnone = pad_none
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def _pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ yield from zip(a, b)
+
+
+try:
+ from itertools import pairwise as itertools_pairwise
+except ImportError:
+ pairwise = _pairwise
+else:
+
+ def pairwise(iterable):
+ yield from itertools_pairwise(iterable)
+
+ pairwise.__doc__ = _pairwise.__doc__
+
+
+def grouper(iterable, n, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper('ABCDEFG', 3, 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ if isinstance(iterable, int):
+ warnings.warn(
+ "grouper expects iterable as first parameter", DeprecationWarning
+ )
+ n, iterable = iterable, n
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ If *pred* is None, :func:`bool` is used.
+
+ >>> iterable = [0, 1, False, True, '', ' ']
+ >>> false_items, true_items = partition(None, iterable)
+ >>> list(false_items), list(true_items)
+ ([0, False, ''], [1, True, ' '])
+
+ """
+ if pred is None:
+ pred = bool
+
+ evaluations = ((pred(x), x) for x in iterable)
+ t1, t2 = tee(evaluations)
+ return (
+ (x for (cond, x) in t1 if not cond),
+ (x for (cond, x) in t2 if cond),
+ )
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1, 2, 3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ :func:`powerset` will operate on iterables that aren't :class:`set`
+ instances, so repeated elements in the input will produce repeated elements
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
+ duplicates:
+
+ >>> seq = [1, 1, 0]
+ >>> list(powerset(seq))
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
+ >>> from more_itertools import unique_everseen
+ >>> list(powerset(unique_everseen(seq)))
+ [(), (1,), (0,), (1, 0)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ Remember that ``list`` objects are unhashable - you can use the *key*
+ parameter to transform the list to a tuple (which is hashable) to
+ avoid a slowdown.
+
+ >>> iterable = ([1, 2], [2, 3], [1, 2])
+ >>> list(unique_everseen(iterable)) # Slow
+ [[1, 2], [2, 3]]
+ >>> list(unique_everseen(iterable, key=tuple)) # Faster
+ [[1, 2], [2, 3]]
+
+ Similary, you may want to convert unhashable ``set`` objects with
+ ``key=frozenset``. For ``dict`` objects,
+ ``key=lambda x: frozenset(x.items())`` can be used.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=None, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, repeat=1):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * repeat
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ >>> nth_combination(range(5), 3, 5)
+ (0, 3, 4)
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`
+ or :func:`value_chain`.
+
+ """
+ return chain([value], iterator)
+
+
+def convolve(signal, kernel):
+ """Convolve the iterable *signal* with the iterable *kernel*.
+
+ >>> signal = (1, 2, 3, 4, 5)
+ >>> kernel = [3, 2, 1]
+ >>> list(convolve(signal, kernel))
+ [3, 8, 14, 20, 26, 14, 5]
+
+ Note: the input arguments are not interchangeable, as the *kernel*
+ is immediately consumed and stored.
+
+ """
+ kernel = tuple(kernel)[::-1]
+ n = len(kernel)
+ window = deque([0], maxlen=n) * n
+ for x in chain(signal, repeat(0, n - 1)):
+ window.append(x)
+ yield sum(map(operator.mul, kernel, window))
diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt
new file mode 100644
index 0000000..b10ef50
--- /dev/null
+++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+nspektr
diff --git a/setuptools/_vendor/nspektr/__init__.py b/setuptools/_vendor/nspektr/__init__.py
new file mode 100644
index 0000000..938bbdb
--- /dev/null
+++ b/setuptools/_vendor/nspektr/__init__.py
@@ -0,0 +1,145 @@
+import itertools
+import functools
+import contextlib
+
+from setuptools.extern.packaging.requirements import Requirement
+from setuptools.extern.packaging.version import Version
+from setuptools.extern.more_itertools import always_iterable
+from setuptools.extern.jaraco.context import suppress
+from setuptools.extern.jaraco.functools import apply
+
+from ._compat import metadata, repair_extras
+
+
+def resolve(req: Requirement) -> metadata.Distribution:
+ """
+ Resolve the requirement to its distribution.
+
+ Ignore exception detail for Python 3.9 compatibility.
+
+ >>> resolve(Requirement('pytest<3')) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ importlib.metadata.PackageNotFoundError: No package metadata was found for pytest<3
+ """
+ dist = metadata.distribution(req.name)
+ if not req.specifier.contains(Version(dist.version), prereleases=True):
+ raise metadata.PackageNotFoundError(str(req))
+ dist.extras = req.extras # type: ignore
+ return dist
+
+
+@apply(bool)
+@suppress(metadata.PackageNotFoundError)
+def is_satisfied(req: Requirement):
+ return resolve(req)
+
+
+unsatisfied = functools.partial(itertools.filterfalse, is_satisfied)
+
+
+class NullMarker:
+ @classmethod
+ def wrap(cls, req: Requirement):
+ return req.marker or cls()
+
+ def evaluate(self, *args, **kwargs):
+ return True
+
+
+def find_direct_dependencies(dist, extras=None):
+ """
+ Find direct, declared dependencies for dist.
+ """
+ simple = (
+ req
+ for req in map(Requirement, always_iterable(dist.requires))
+ if NullMarker.wrap(req).evaluate(dict(extra=None))
+ )
+ extra_deps = (
+ req
+ for req in map(Requirement, always_iterable(dist.requires))
+ for extra in always_iterable(getattr(dist, 'extras', extras))
+ if NullMarker.wrap(req).evaluate(dict(extra=extra))
+ )
+ return itertools.chain(simple, extra_deps)
+
+
+def traverse(items, visit):
+ """
+ Given an iterable of items, traverse the items.
+
+ For each item, visit is called to return any additional items
+ to include in the traversal.
+ """
+ while True:
+ try:
+ item = next(items)
+ except StopIteration:
+ return
+ yield item
+ items = itertools.chain(items, visit(item))
+
+
+def find_req_dependencies(req):
+ with contextlib.suppress(metadata.PackageNotFoundError):
+ dist = resolve(req)
+ yield from find_direct_dependencies(dist)
+
+
+def find_dependencies(dist, extras=None):
+ """
+ Find all reachable dependencies for dist.
+
+ dist is an importlib.metadata.Distribution (or similar).
+ TODO: create a suitable protocol for type hint.
+
+ >>> deps = find_dependencies(resolve(Requirement('nspektr')))
+ >>> all(isinstance(dep, Requirement) for dep in deps)
+ True
+ >>> not any('pytest' in str(dep) for dep in deps)
+ True
+ >>> test_deps = find_dependencies(resolve(Requirement('nspektr[testing]')))
+ >>> any('pytest' in str(dep) for dep in test_deps)
+ True
+ """
+
+ def visit(req, seen=set()):
+ if req in seen:
+ return ()
+ seen.add(req)
+ return find_req_dependencies(req)
+
+ return traverse(find_direct_dependencies(dist, extras), visit)
+
+
+class Unresolved(Exception):
+ def __iter__(self):
+ return iter(self.args[0])
+
+
+def missing(ep):
+ """
+ Generate the unresolved dependencies (if any) of ep.
+ """
+ return unsatisfied(find_dependencies(ep.dist, repair_extras(ep.extras)))
+
+
+def check(ep):
+ """
+ >>> ep, = metadata.entry_points(group='console_scripts', name='pip')
+ >>> check(ep)
+ >>> dist = metadata.distribution('nspektr')
+
+ Since 'docs' extras are not installed, requesting them should fail.
+
+ >>> ep = metadata.EntryPoint(
+ ... group=None, name=None, value='nspektr [docs]')._for(dist)
+ >>> check(ep)
+ Traceback (most recent call last):
+ ...
+ nspektr.Unresolved: [...]
+ """
+ missed = list(missing(ep))
+ if missed:
+ raise Unresolved(missed)
diff --git a/setuptools/_vendor/nspektr/_compat.py b/setuptools/_vendor/nspektr/_compat.py
new file mode 100644
index 0000000..3278379
--- /dev/null
+++ b/setuptools/_vendor/nspektr/_compat.py
@@ -0,0 +1,21 @@
+import contextlib
+import sys
+
+
+if sys.version_info >= (3, 10):
+ import importlib.metadata as metadata
+else:
+ import setuptools.extern.importlib_metadata as metadata # type: ignore # noqa: F401
+
+
+def repair_extras(extras):
+ """
+ Repair extras that appear as match objects.
+
+ python/importlib_metadata#369 revealed a flaw in the EntryPoint
+ implementation. This function wraps the extras to ensure
+ they are proper strings even on older implementations.
+ """
+ with contextlib.suppress(AttributeError):
+ return list(item.group(0) for item in extras)
+ return extras
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt b/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000..1c191ee
--- /dev/null
+++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+ordered_set
diff --git a/setuptools/_vendor/ordered_set.py b/setuptools/_vendor/ordered_set.py
new file mode 100644
index 0000000..1487600
--- /dev/null
+++ b/setuptools/_vendor/ordered_set.py
@@ -0,0 +1,488 @@
+"""
+An OrderedSet is a custom MutableSet that remembers its order, so that every
+entry has an index that can be looked up.
+
+Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
+and released under the MIT license.
+"""
+import itertools as it
+from collections import deque
+
+try:
+ # Python 3
+ from collections.abc import MutableSet, Sequence
+except ImportError:
+ # Python 2.7
+ from collections import MutableSet, Sequence
+
+SLICE_ALL = slice(None)
+__version__ = "3.1"
+
+
+def is_iterable(obj):
+ """
+ Are we being asked to look up a list of things, instead of a single thing?
+ We check for the `__iter__` attribute so that this can cover types that
+ don't have to be known by this module, such as NumPy arrays.
+
+ Strings, however, should be considered as atomic values to look up, not
+ iterables. The same goes for tuples, since they are immutable and therefore
+ valid entries.
+
+ We don't need to check for the Python 2 `unicode` type, because it doesn't
+ have an `__iter__` attribute anyway.
+ """
+ return (
+ hasattr(obj, "__iter__")
+ and not isinstance(obj, str)
+ and not isinstance(obj, tuple)
+ )
+
+
+class OrderedSet(MutableSet, Sequence):
+ """
+ An OrderedSet is a custom MutableSet that remembers its order, so that
+ every entry has an index that can be looked up.
+
+ Example:
+ >>> OrderedSet([1, 1, 2, 3, 2])
+ OrderedSet([1, 2, 3])
+ """
+
+ def __init__(self, iterable=None):
+ self.items = []
+ self.map = {}
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ """
+ Returns the number of unique elements in the ordered set
+
+ Example:
+ >>> len(OrderedSet([]))
+ 0
+ >>> len(OrderedSet([1, 2]))
+ 2
+ """
+ return len(self.items)
+
+ def __getitem__(self, index):
+ """
+ Get the item at a given index.
+
+ If `index` is a slice, you will get back that slice of items, as a
+ new OrderedSet.
+
+ If `index` is a list or a similar iterable, you'll get a list of
+ items corresponding to those indices. This is similar to NumPy's
+ "fancy indexing". The result is not an OrderedSet because you may ask
+ for duplicate indices, and the number of elements returned should be
+ the number of elements asked for.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset[1]
+ 2
+ """
+ if isinstance(index, slice) and index == SLICE_ALL:
+ return self.copy()
+ elif is_iterable(index):
+ return [self.items[i] for i in index]
+ elif hasattr(index, "__index__") or isinstance(index, slice):
+ result = self.items[index]
+ if isinstance(result, list):
+ return self.__class__(result)
+ else:
+ return result
+ else:
+ raise TypeError("Don't know how to index an OrderedSet by %r" % index)
+
+ def copy(self):
+ """
+ Return a shallow copy of this object.
+
+ Example:
+ >>> this = OrderedSet([1, 2, 3])
+ >>> other = this.copy()
+ >>> this == other
+ True
+ >>> this is other
+ False
+ """
+ return self.__class__(self)
+
+ def __getstate__(self):
+ if len(self) == 0:
+ # The state can't be an empty list.
+ # We need to return a truthy value, or else __setstate__ won't be run.
+ #
+ # This could have been done more gracefully by always putting the state
+ # in a tuple, but this way is backwards- and forwards- compatible with
+ # previous versions of OrderedSet.
+ return (None,)
+ else:
+ return list(self)
+
+ def __setstate__(self, state):
+ if state == (None,):
+ self.__init__([])
+ else:
+ self.__init__(state)
+
+ def __contains__(self, key):
+ """
+ Test if the item is in this ordered set
+
+ Example:
+ >>> 1 in OrderedSet([1, 3, 2])
+ True
+ >>> 5 in OrderedSet([1, 3, 2])
+ False
+ """
+ return key in self.map
+
+ def add(self, key):
+ """
+ Add `key` as an item to this OrderedSet, then return its index.
+
+ If `key` is already in the OrderedSet, return the index it already
+ had.
+
+ Example:
+ >>> oset = OrderedSet()
+ >>> oset.append(3)
+ 0
+ >>> print(oset)
+ OrderedSet([3])
+ """
+ if key not in self.map:
+ self.map[key] = len(self.items)
+ self.items.append(key)
+ return self.map[key]
+
+ append = add
+
+ def update(self, sequence):
+ """
+ Update the set with the given iterable sequence, then return the index
+ of the last element inserted.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.update([3, 1, 5, 1, 4])
+ 4
+ >>> print(oset)
+ OrderedSet([1, 2, 3, 5, 4])
+ """
+ item_index = None
+ try:
+ for item in sequence:
+ item_index = self.add(item)
+ except TypeError:
+ raise ValueError(
+ "Argument needs to be an iterable, got %s" % type(sequence)
+ )
+ return item_index
+
+ def index(self, key):
+ """
+ Get the index of a given entry, raising an IndexError if it's not
+ present.
+
+ `key` can be an iterable of entries that is not a string, in which case
+ this returns a list of indices.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.index(2)
+ 1
+ """
+ if is_iterable(key):
+ return [self.index(subkey) for subkey in key]
+ return self.map[key]
+
+ # Provide some compatibility with pd.Index
+ get_loc = index
+ get_indexer = index
+
+ def pop(self):
+ """
+ Remove and return the last element from the set.
+
+ Raises KeyError if the set is empty.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.pop()
+ 3
+ """
+ if not self.items:
+ raise KeyError("Set is empty")
+
+ elem = self.items[-1]
+ del self.items[-1]
+ del self.map[elem]
+ return elem
+
+ def discard(self, key):
+ """
+ Remove an element. Do not raise an exception if absent.
+
+ The MutableSet mixin uses this to implement the .remove() method, which
+ *does* raise an error when asked to remove a non-existent item.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.discard(2)
+ >>> print(oset)
+ OrderedSet([1, 3])
+ >>> oset.discard(2)
+ >>> print(oset)
+ OrderedSet([1, 3])
+ """
+ if key in self:
+ i = self.map[key]
+ del self.items[i]
+ del self.map[key]
+ for k, v in self.map.items():
+ if v >= i:
+ self.map[k] = v - 1
+
+ def clear(self):
+ """
+ Remove all items from this OrderedSet.
+ """
+ del self.items[:]
+ self.map.clear()
+
+ def __iter__(self):
+ """
+ Example:
+ >>> list(iter(OrderedSet([1, 2, 3])))
+ [1, 2, 3]
+ """
+ return iter(self.items)
+
+ def __reversed__(self):
+ """
+ Example:
+ >>> list(reversed(OrderedSet([1, 2, 3])))
+ [3, 2, 1]
+ """
+ return reversed(self.items)
+
+ def __repr__(self):
+ if not self:
+ return "%s()" % (self.__class__.__name__,)
+ return "%s(%r)" % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ """
+ Returns true if the containers have the same items. If `other` is a
+ Sequence, then order is checked, otherwise it is ignored.
+
+ Example:
+ >>> oset = OrderedSet([1, 3, 2])
+ >>> oset == [1, 3, 2]
+ True
+ >>> oset == [1, 2, 3]
+ False
+ >>> oset == [2, 3]
+ False
+ >>> oset == OrderedSet([3, 2, 1])
+ False
+ """
+ # In Python 2 deque is not a Sequence, so treat it as one for
+ # consistent behavior with Python 3.
+ if isinstance(other, (Sequence, deque)):
+ # Check that this OrderedSet contains the same elements, in the
+ # same order, as the other object.
+ return list(self) == list(other)
+ try:
+ other_as_set = set(other)
+ except TypeError:
+ # If `other` can't be converted into a set, it's not equal.
+ return False
+ else:
+ return set(self) == other_as_set
+
+ def union(self, *sets):
+ """
+ Combines all unique items.
+ Each items order is defined by its first appearance.
+
+ Example:
+ >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
+ >>> print(oset)
+ OrderedSet([3, 1, 4, 5, 2, 0])
+ >>> oset.union([8, 9])
+ OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
+ >>> oset | {10}
+ OrderedSet([3, 1, 4, 5, 2, 0, 10])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ containers = map(list, it.chain([self], sets))
+ items = it.chain.from_iterable(containers)
+ return cls(items)
+
+ def __and__(self, other):
+ # the parent implementation of this is backwards
+ return self.intersection(other)
+
+ def intersection(self, *sets):
+ """
+ Returns elements in common between all sets. Order is defined only
+ by the first set.
+
+ Example:
+ >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
+ >>> print(oset)
+ OrderedSet([1, 2, 3])
+ >>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
+ OrderedSet([2])
+ >>> oset.intersection()
+ OrderedSet([1, 2, 3])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ if sets:
+ common = set.intersection(*map(set, sets))
+ items = (item for item in self if item in common)
+ else:
+ items = self
+ return cls(items)
+
+ def difference(self, *sets):
+ """
+ Returns all elements that are in this set but not the others.
+
+ Example:
+ >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
+ OrderedSet([1, 3])
+ >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
+ OrderedSet([1])
+ >>> OrderedSet([1, 2, 3]) - OrderedSet([2])
+ OrderedSet([1, 3])
+ >>> OrderedSet([1, 2, 3]).difference()
+ OrderedSet([1, 2, 3])
+ """
+ cls = self.__class__
+ if sets:
+ other = set.union(*map(set, sets))
+ items = (item for item in self if item not in other)
+ else:
+ items = self
+ return cls(items)
+
+ def issubset(self, other):
+ """
+ Report whether another set contains this set.
+
+ Example:
+ >>> OrderedSet([1, 2, 3]).issubset({1, 2})
+ False
+ >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
+ True
+ >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
+ False
+ """
+ if len(self) > len(other): # Fast check for obvious cases
+ return False
+ return all(item in other for item in self)
+
+ def issuperset(self, other):
+ """
+ Report whether this set contains another set.
+
+ Example:
+ >>> OrderedSet([1, 2]).issuperset([1, 2, 3])
+ False
+ >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
+ True
+ >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
+ False
+ """
+ if len(self) < len(other): # Fast check for obvious cases
+ return False
+ return all(item in self for item in other)
+
+ def symmetric_difference(self, other):
+ """
+ Return the symmetric difference of two OrderedSets as a new set.
+ That is, the new set will contain all elements that are in exactly
+ one of the sets.
+
+ Their order will be preserved, with elements from `self` preceding
+ elements from `other`.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.symmetric_difference(other)
+ OrderedSet([4, 5, 9, 2])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ diff1 = cls(self).difference(other)
+ diff2 = cls(other).difference(self)
+ return diff1.union(diff2)
+
+ def _update_items(self, items):
+ """
+ Replace the 'items' list of this OrderedSet with a new one, updating
+ self.map accordingly.
+ """
+ self.items = items
+ self.map = {item: idx for (idx, item) in enumerate(items)}
+
+ def difference_update(self, *sets):
+ """
+ Update this OrderedSet to remove items from one or more other sets.
+
+ Example:
+ >>> this = OrderedSet([1, 2, 3])
+ >>> this.difference_update(OrderedSet([2, 4]))
+ >>> print(this)
+ OrderedSet([1, 3])
+
+ >>> this = OrderedSet([1, 2, 3, 4, 5])
+ >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
+ >>> print(this)
+ OrderedSet([3, 5])
+ """
+ items_to_remove = set()
+ for other in sets:
+ items_to_remove |= set(other)
+ self._update_items([item for item in self.items if item not in items_to_remove])
+
+ def intersection_update(self, other):
+ """
+ Update this OrderedSet to keep only items in another set, preserving
+ their order in this set.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.intersection_update(other)
+ >>> print(this)
+ OrderedSet([1, 3, 7])
+ """
+ other = set(other)
+ self._update_items([item for item in self.items if item in other])
+
+ def symmetric_difference_update(self, other):
+ """
+ Update this OrderedSet to remove items from another set, then
+ add items from the other set that were not present in this set.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.symmetric_difference_update(other)
+ >>> print(this)
+ OrderedSet([4, 5, 9, 2])
+ """
+ items_to_add = [item for item in other if item not in self]
+ items_to_remove = set(other)
+ self._update_items(
+ [item for item in self.items if item not in items_to_remove] + items_to_add
+ )
diff --git a/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt b/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt
new file mode 100644
index 0000000..748809f
--- /dev/null
+++ b/setuptools/_vendor/packaging-21.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+packaging
diff --git a/setuptools/_vendor/packaging/__about__.py b/setuptools/_vendor/packaging/__about__.py
index 95d330e..3551bc2 100644
--- a/setuptools/_vendor/packaging/__about__.py
+++ b/setuptools/_vendor/packaging/__about__.py
@@ -1,21 +1,26 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "16.8"
+__version__ = "21.3"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
-__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = "2014-2019 %s" % __author__
diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py
index 5ee6220..3c50c5d 100644
--- a/setuptools/_vendor/packaging/__init__.py
+++ b/setuptools/_vendor/packaging/__init__.py
@@ -1,14 +1,25 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
from .__about__ import (
- __author__, __copyright__, __email__, __license__, __summary__, __title__,
- __uri__, __version__
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
)
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
]
diff --git a/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc
deleted file mode 100644
index d88ad13..0000000
--- a/setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index e9a878d..0000000
--- a/setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc
deleted file mode 100644
index ca59089..0000000
--- a/setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc
deleted file mode 100644
index 2640f23..0000000
--- a/setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc
deleted file mode 100644
index c5139c2..0000000
--- a/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc b/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc
deleted file mode 100644
index d5f01d8..0000000
--- a/setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc
+++ /dev/null
Binary files differ
diff --git a/setuptools/_vendor/packaging/_compat.py b/setuptools/_vendor/packaging/_compat.py
deleted file mode 100644
index 210bb80..0000000
--- a/setuptools/_vendor/packaging/_compat.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-
-# flake8: noqa
-
-if PY3:
- string_types = str,
-else:
- string_types = basestring,
-
-
-def with_metaclass(meta, *bases):
- """
- Create a base class with a metaclass.
- """
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py
new file mode 100644
index 0000000..4c379aa
--- /dev/null
+++ b/setuptools/_vendor/packaging/_manylinux.py
@@ -0,0 +1,301 @@
+import collections
+import functools
+import os
+import re
+import struct
+import sys
+import warnings
+from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader:
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+ class _InvalidELFFileHeader(ValueError):
+ """
+ An invalid ELF file header was found.
+ """
+
+ ELF_MAGIC_NUMBER = 0x7F454C46
+ ELFCLASS32 = 1
+ ELFCLASS64 = 2
+ ELFDATA2LSB = 1
+ ELFDATA2MSB = 2
+ EM_386 = 3
+ EM_S390 = 22
+ EM_ARM = 40
+ EM_X86_64 = 62
+ EF_ARM_ABIMASK = 0xFF000000
+ EF_ARM_ABI_VER5 = 0x05000000
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+ def __init__(self, file: IO[bytes]) -> None:
+ def unpack(fmt: str) -> int:
+ try:
+ data = file.read(struct.calcsize(fmt))
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
+ except struct.error:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ return result[0]
+
+ self.e_ident_magic = unpack(">I")
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_class = unpack("B")
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_data = unpack("B")
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_version = unpack("B")
+ self.e_ident_osabi = unpack("B")
+ self.e_ident_abiversion = unpack("B")
+ self.e_ident_pad = file.read(7)
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+ self.e_type = unpack(format_h)
+ self.e_machine = unpack(format_h)
+ self.e_version = unpack(format_i)
+ self.e_entry = unpack(format_p)
+ self.e_phoff = unpack(format_p)
+ self.e_shoff = unpack(format_p)
+ self.e_flags = unpack(format_i)
+ self.e_ehsize = unpack(format_h)
+ self.e_phentsize = unpack(format_h)
+ self.e_phnum = unpack(format_h)
+ self.e_shentsize = unpack(format_h)
+ self.e_shnum = unpack(format_h)
+ self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header() -> Optional[_ELFFileHeader]:
+ try:
+ with open(sys.executable, "rb") as f:
+ elf_header = _ELFFileHeader(f)
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+ return None
+ return elf_header
+
+
+def _is_linux_armhf() -> bool:
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_ARM
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+ ) == elf_header.EF_ARM_ABI_VER5
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+ return result
+
+
+def _is_linux_i686() -> bool:
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_386
+ return result
+
+
+def _have_compatible_abi(arch: str) -> bool:
+ if arch == "armv7l":
+ return _is_linux_armhf()
+ if arch == "i686":
+ return _is_linux_i686()
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
+ assert version_string is not None
+ _, version = version_string.split()
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+ """Returns glibc version string, or None if not using glibc."""
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+ """Parse glibc version.
+
+ We use a regexp instead of str.split because we want to discard any
+ random junk that might come after the minor version -- this might happen
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ uses version strings like "2.20-2014.11"). See gh-3588.
+ """
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ "Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str,
+ RuntimeWarning,
+ )
+ return -1, -1
+ return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return (-1, -1)
+ return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+ sys_glibc = _get_glibc_version()
+ if sys_glibc < version:
+ return False
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux # noqa
+ except ImportError:
+ return True
+ if hasattr(_manylinux, "manylinux_compatible"):
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+ if result is not None:
+ return bool(result)
+ return True
+ if version == _GLibCVersion(2, 5):
+ if hasattr(_manylinux, "manylinux1_compatible"):
+ return bool(_manylinux.manylinux1_compatible)
+ if version == _GLibCVersion(2, 12):
+ if hasattr(_manylinux, "manylinux2010_compatible"):
+ return bool(_manylinux.manylinux2010_compatible)
+ if version == _GLibCVersion(2, 17):
+ if hasattr(_manylinux, "manylinux2014_compatible"):
+ return bool(_manylinux.manylinux2014_compatible)
+ return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
+ (2, 17): "manylinux2014",
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
+ (2, 12): "manylinux2010",
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
+ (2, 5): "manylinux1",
+}
+
+
+def platform_tags(linux: str, arch: str) -> Iterator[str]:
+ if not _have_compatible_abi(arch):
+ return
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
+ too_old_glibc2 = _GLibCVersion(2, 16)
+ if arch in {"x86_64", "i686"}:
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
+ too_old_glibc2 = _GLibCVersion(2, 4)
+ current_glibc = _GLibCVersion(*_get_glibc_version())
+ glibc_max_list = [current_glibc]
+ # We can assume compatibility across glibc major versions.
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+ #
+ # Build a list of maximum glibc versions so that we can
+ # output the canonical list of all glibc from current_glibc
+ # down to too_old_glibc2, including all intermediary versions.
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(tag, arch, glibc_version):
+ yield linux.replace("linux", tag)
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(legacy_tag, arch, glibc_version):
+ yield linux.replace("linux", legacy_tag)
diff --git a/setuptools/_vendor/packaging/_musllinux.py b/setuptools/_vendor/packaging/_musllinux.py
new file mode 100644
index 0000000..8ac3059
--- /dev/null
+++ b/setuptools/_vendor/packaging/_musllinux.py
@@ -0,0 +1,136 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import contextlib
+import functools
+import operator
+import os
+import re
+import struct
+import subprocess
+import sys
+from typing import IO, Iterator, NamedTuple, Optional, Tuple
+
+
+def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
+ return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+
+
+def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
+ """Detect musl libc location by parsing the Python executable.
+
+ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+ """
+ f.seek(0)
+ try:
+ ident = _read_unpacked(f, "16B")
+ except struct.error:
+ return None
+ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
+ return None
+ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
+
+ try:
+ # e_fmt: Format for program header.
+ # p_fmt: Format for section header.
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+ e_fmt, p_fmt, p_idx = {
+ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
+ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
+ }[ident[4]]
+ except KeyError:
+ return None
+ else:
+ p_get = operator.itemgetter(*p_idx)
+
+ # Find the interpreter section and return its content.
+ try:
+ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
+ except struct.error:
+ return None
+ for i in range(e_phnum + 1):
+ f.seek(e_phoff + e_phentsize * i)
+ try:
+ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
+ except struct.error:
+ return None
+ if p_type != 3: # Not PT_INTERP.
+ continue
+ f.seek(p_offset)
+ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
+ if "musl" not in interpreter:
+ return None
+ return interpreter
+ return None
+
+
+class _MuslVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+ if len(lines) < 2 or lines[0][:4] != "musl":
+ return None
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+ if not m:
+ return None
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+ """Detect currently-running musl runtime version.
+
+ This is done by checking the specified executable's dynamic linking
+ information, and invoking the loader to parse its output for a version
+ string. If the loader is musl, the output would be something like::
+
+ musl libc (x86_64)
+ Version 1.2.2
+ Dynamic Program Loader
+ """
+ with contextlib.ExitStack() as stack:
+ try:
+ f = stack.enter_context(open(executable, "rb"))
+ except OSError:
+ return None
+ ld = _parse_ld_musl_from_elf(f)
+ if not ld:
+ return None
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+ return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(arch: str) -> Iterator[str]:
+ """Generate musllinux tags compatible to the current platform.
+
+ :param arch: Should be the part of platform tag after the ``linux_``
+ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
+ prerequisite for the current platform to be musllinux-compatible.
+
+ :returns: An iterator of compatible musllinux tags.
+ """
+ sys_musl = _get_musl_version(sys.executable)
+ if sys_musl is None: # Python not dynamically linked against musl.
+ return
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__": # pragma: no cover
+ import sysconfig
+
+ plat = sysconfig.get_platform()
+ assert plat.startswith("linux-"), "not linux"
+
+ print("plat:", plat)
+ print("musl:", _get_musl_version(sys.executable))
+ print("tags:", end=" ")
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+ print(t, end="\n ")
diff --git a/setuptools/_vendor/packaging/_structures.py b/setuptools/_vendor/packaging/_structures.py
index ccc2786..90a6465 100644
--- a/setuptools/_vendor/packaging/_structures.py
+++ b/setuptools/_vendor/packaging/_structures.py
@@ -1,68 +1,61 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
-class Infinity(object):
-
- def __repr__(self):
+class InfinityType:
+ def __repr__(self) -> str:
return "Infinity"
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(repr(self))
- def __lt__(self, other):
+ def __lt__(self, other: object) -> bool:
return False
- def __le__(self, other):
+ def __le__(self, other: object) -> bool:
return False
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
+ def __gt__(self, other: object) -> bool:
return True
- def __ge__(self, other):
+ def __ge__(self, other: object) -> bool:
return True
- def __neg__(self):
+ def __neg__(self: object) -> "NegativeInfinityType":
return NegativeInfinity
-Infinity = Infinity()
+Infinity = InfinityType()
-class NegativeInfinity(object):
- def __repr__(self):
+class NegativeInfinityType:
+ def __repr__(self) -> str:
return "-Infinity"
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(repr(self))
- def __lt__(self, other):
+ def __lt__(self, other: object) -> bool:
return True
- def __le__(self, other):
+ def __le__(self, other: object) -> bool:
return True
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
return isinstance(other, self.__class__)
- def __ne__(self, other):
- return not isinstance(other, self.__class__)
-
- def __gt__(self, other):
+ def __gt__(self, other: object) -> bool:
return False
- def __ge__(self, other):
+ def __ge__(self, other: object) -> bool:
return False
- def __neg__(self):
+ def __neg__(self: object) -> InfinityType:
return Infinity
-NegativeInfinity = NegativeInfinity()
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py
index 031332a..eb0541b 100644
--- a/setuptools/_vendor/packaging/markers.py
+++ b/setuptools/_vendor/packaging/markers.py
@@ -1,26 +1,37 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from setuptools.extern.pyparsing import ( # noqa: N817
+ Forward,
+ Group,
+ Literal as L,
+ ParseException,
+ ParseResults,
+ QuotedString,
+ ZeroOrMore,
+ stringEnd,
+ stringStart,
+)
-from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
-from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
-from setuptools.extern.pyparsing import Literal as L # noqa
-
-from ._compat import string_types
-from .specifiers import Specifier, InvalidSpecifier
-
+from .specifiers import InvalidSpecifier, Specifier
__all__ = [
- "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
- "Marker", "default_environment",
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
]
+Operator = Callable[[str, str], bool]
+
class InvalidMarker(ValueError):
"""
@@ -41,78 +52,67 @@ class UndefinedEnvironmentName(ValueError):
"""
-class Node(object):
-
- def __init__(self, value):
+class Node:
+ def __init__(self, value: Any) -> None:
self.value = value
- def __str__(self):
+ def __str__(self) -> str:
return str(self.value)
- def __repr__(self):
- return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
- def serialize(self):
+ def serialize(self) -> str:
raise NotImplementedError
class Variable(Node):
-
- def serialize(self):
+ def serialize(self) -> str:
return str(self)
class Value(Node):
-
- def serialize(self):
- return '"{0}"'.format(self)
+ def serialize(self) -> str:
+ return f'"{self}"'
class Op(Node):
-
- def serialize(self):
+ def serialize(self) -> str:
return str(self)
VARIABLE = (
- L("implementation_version") |
- L("platform_python_implementation") |
- L("implementation_name") |
- L("python_full_version") |
- L("platform_release") |
- L("platform_version") |
- L("platform_machine") |
- L("platform_system") |
- L("python_version") |
- L("sys_platform") |
- L("os_name") |
- L("os.name") | # PEP-345
- L("sys.platform") | # PEP-345
- L("platform.version") | # PEP-345
- L("platform.machine") | # PEP-345
- L("platform.python_implementation") | # PEP-345
- L("python_implementation") | # undocumented setuptools legacy
- L("extra")
+ L("implementation_version")
+ | L("platform_python_implementation")
+ | L("implementation_name")
+ | L("python_full_version")
+ | L("platform_release")
+ | L("platform_version")
+ | L("platform_machine")
+ | L("platform_system")
+ | L("python_version")
+ | L("sys_platform")
+ | L("os_name")
+ | L("os.name") # PEP-345
+ | L("sys.platform") # PEP-345
+ | L("platform.version") # PEP-345
+ | L("platform.machine") # PEP-345
+ | L("platform.python_implementation") # PEP-345
+ | L("python_implementation") # undocumented setuptools legacy
+ | L("extra") # PEP-508
)
ALIASES = {
- 'os.name': 'os_name',
- 'sys.platform': 'sys_platform',
- 'platform.version': 'platform_version',
- 'platform.machine': 'platform_machine',
- 'platform.python_implementation': 'platform_python_implementation',
- 'python_implementation': 'platform_python_implementation'
+ "os.name": "os_name",
+ "sys.platform": "sys_platform",
+ "platform.version": "platform_version",
+ "platform.machine": "platform_machine",
+ "platform.python_implementation": "platform_python_implementation",
+ "python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
- L("===") |
- L("==") |
- L(">=") |
- L("<=") |
- L("!=") |
- L("~=") |
- L(">") |
- L("<")
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
@@ -138,22 +138,28 @@ MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
-def _coerce_parse_result(results):
+def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
-def _format_marker(marker, first=True):
- assert isinstance(marker, (list, tuple, string_types))
+def _format_marker(
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
+) -> str:
+
+ assert isinstance(marker, (list, tuple, str))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
- if (isinstance(marker, list) and len(marker) == 1 and
- isinstance(marker[0], (list, tuple))):
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
return _format_marker(marker[0])
if isinstance(marker, list):
@@ -168,7 +174,7 @@ def _format_marker(marker, first=True):
return marker
-_operators = {
+_operators: Dict[str, Operator] = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
@@ -180,7 +186,7 @@ _operators = {
}
-def _eval_op(lhs, op, rhs):
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
@@ -188,34 +194,36 @@ def _eval_op(lhs, op, rhs):
else:
return spec.contains(lhs)
- oper = _operators.get(op.serialize())
+ oper: Optional[Operator] = _operators.get(op.serialize())
if oper is None:
- raise UndefinedComparison(
- "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
- )
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
return oper(lhs, rhs)
-_undefined = object()
+class Undefined:
+ pass
+
+
+_undefined = Undefined()
-def _get_env(environment, name):
- value = environment.get(name, _undefined)
+def _get_env(environment: Dict[str, str], name: str) -> str:
+ value: Union[str, Undefined] = environment.get(name, _undefined)
- if value is _undefined:
+ if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
- "{0!r} does not exist in evaluation environment.".format(name)
+ f"{name!r} does not exist in evaluation environment."
)
return value
-def _evaluate_markers(markers, environment):
- groups = [[]]
+def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
+ groups: List[List[bool]] = [[]]
for marker in markers:
- assert isinstance(marker, (list, tuple, string_types))
+ assert isinstance(marker, (list, tuple, str))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
@@ -238,22 +246,17 @@ def _evaluate_markers(markers, environment):
return any(all(item) for item in groups)
-def format_full_version(info):
- version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+def format_full_version(info: "sys._version_info") -> str:
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
- if kind != 'final':
+ if kind != "final":
version += kind[0] + str(info.serial)
return version
-def default_environment():
- if hasattr(sys, 'implementation'):
- iver = format_full_version(sys.implementation.version)
- implementation_name = sys.implementation.name
- else:
- iver = '0'
- implementation_name = ''
-
+def default_environment() -> Dict[str, str]:
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
return {
"implementation_name": implementation_name,
"implementation_version": iver,
@@ -264,28 +267,28 @@ def default_environment():
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
- "python_version": platform.python_version()[:3],
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
-class Marker(object):
-
- def __init__(self, marker):
+class Marker:
+ def __init__(self, marker: str) -> None:
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
- err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
- marker, marker[e.loc:e.loc + 8])
- raise InvalidMarker(err_str)
+ raise InvalidMarker(
+ f"Invalid marker: {marker!r}, parse error at "
+ f"{marker[e.loc : e.loc + 8]!r}"
+ )
- def __str__(self):
+ def __str__(self) -> str:
return _format_marker(self._markers)
- def __repr__(self):
- return "<Marker({0!r})>".format(str(self))
+ def __repr__(self) -> str:
+ return f"<Marker('{self}')>"
- def evaluate(self, environment=None):
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py
index 5b49341..0d93231 100644
--- a/setuptools/_vendor/packaging/requirements.py
+++ b/setuptools/_vendor/packaging/requirements.py
@@ -1,15 +1,24 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
-import string
import re
-
-from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
-from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
-from setuptools.extern.pyparsing import Literal as L # noqa
-from setuptools.extern.six.moves.urllib import parse as urlparse
+import string
+import urllib.parse
+from typing import List, Optional as TOptional, Set
+
+from setuptools.extern.pyparsing import ( # noqa
+ Combine,
+ Literal as L,
+ Optional,
+ ParseException,
+ Regex,
+ Word,
+ ZeroOrMore,
+ originalTextFor,
+ stringEnd,
+ stringStart,
+)
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
@@ -38,8 +47,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
-URI = Regex(r'[^ ]+')("url")
-URL = (AT + URI)
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
@@ -48,31 +57,34 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
-VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
- joinString=",", adjacent=False)("_raw_spec")
-_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+VERSION_MANY = Combine(
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
- lambda s, l, t: Marker(s[t._original_start:t._original_end])
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
-MARKER_SEPERATOR = SEMICOLON
-MARKER = MARKER_SEPERATOR + MARKER_EXPR
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
-NAMED_REQUIREMENT = \
- NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
-class Requirement(object):
+class Requirement:
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
@@ -85,43 +97,50 @@ class Requirement(object):
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
- def __init__(self, requirement_string):
+ def __init__(self, requirement_string: str) -> None:
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
- "Invalid requirement, parse error at \"{0!r}\"".format(
- requirement_string[e.loc:e.loc + 8]))
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
+ )
- self.name = req.name
+ self.name: str = req.name
if req.url:
- parsed_url = urlparse.urlparse(req.url)
- if not (parsed_url.scheme and parsed_url.netloc) or (
- not parsed_url.scheme and not parsed_url.netloc):
- raise InvalidRequirement("Invalid URL given")
- self.url = req.url
+ parsed_url = urllib.parse.urlparse(req.url)
+ if parsed_url.scheme == "file":
+ if urllib.parse.urlunparse(parsed_url) != req.url:
+ raise InvalidRequirement("Invalid URL given")
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc
+ ):
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
+ self.url: TOptional[str] = req.url
else:
self.url = None
- self.extras = set(req.extras.asList() if req.extras else [])
- self.specifier = SpecifierSet(req.specifier)
- self.marker = req.marker if req.marker else None
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
- def __str__(self):
- parts = [self.name]
+ def __str__(self) -> str:
+ parts: List[str] = [self.name]
if self.extras:
- parts.append("[{0}]".format(",".join(sorted(self.extras))))
+ formatted_extras = ",".join(sorted(self.extras))
+ parts.append(f"[{formatted_extras}]")
if self.specifier:
parts.append(str(self.specifier))
if self.url:
- parts.append("@ {0}".format(self.url))
+ parts.append(f"@ {self.url}")
+ if self.marker:
+ parts.append(" ")
if self.marker:
- parts.append("; {0}".format(self.marker))
+ parts.append(f"; {self.marker}")
return "".join(parts)
- def __repr__(self):
- return "<Requirement({0!r})>".format(str(self))
+ def __repr__(self) -> str:
+ return f"<Requirement('{self}')>"
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
index 7f5a76c..0e218a6 100644
--- a/setuptools/_vendor/packaging/specifiers.py
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -1,15 +1,33 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
-
-from ._compat import string_types, with_metaclass
-from .version import Version, LegacyVersion, parse
+import warnings
+from typing import (
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from .utils import canonicalize_version
+from .version import LegacyVersion, Version, parse
+
+ParsedVersion = Union[Version, LegacyVersion]
+UnparsedVersion = Union[Version, LegacyVersion, str]
+VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
+CallableOperator = Callable[[ParsedVersion, str], bool]
class InvalidSpecifier(ValueError):
@@ -18,57 +36,51 @@ class InvalidSpecifier(ValueError):
"""
-class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
-
+class BaseSpecifier(metaclass=abc.ABCMeta):
@abc.abstractmethod
- def __str__(self):
+ def __str__(self) -> str:
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
- def __hash__(self):
+ def __hash__(self) -> int:
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
- def __eq__(self, other):
+ def __eq__(self, other: object) -> bool:
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
- @abc.abstractmethod
- def __ne__(self, other):
- """
- Returns a boolean representing whether or not the two Specifier like
- objects are not equal.
- """
-
@abc.abstractproperty
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
- def contains(self, item, prereleases=None):
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
- def filter(self, iterable, prereleases=None):
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
@@ -77,14 +89,15 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
class _IndividualSpecifier(BaseSpecifier):
- _operators = {}
+ _operators: Dict[str, str] = {}
+ _regex: Pattern[str]
- def __init__(self, spec="", prereleases=None):
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
match = self._regex.search(spec)
if not match:
- raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
- self._spec = (
+ self._spec: Tuple[str, str] = (
match.group("operator").strip(),
match.group("version").strip(),
)
@@ -92,94 +105,93 @@ class _IndividualSpecifier(BaseSpecifier):
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
- def __repr__(self):
+ def __repr__(self) -> str:
pre = (
- ", prereleases={0!r}".format(self.prereleases)
+ f", prereleases={self.prereleases!r}"
if self._prereleases is not None
else ""
)
- return "<{0}({1!r}{2})>".format(
- self.__class__.__name__,
- str(self),
- pre,
- )
-
- def __str__(self):
- return "{0}{1}".format(*self._spec)
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
- def __hash__(self):
- return hash(self._spec)
+ def __str__(self) -> str:
+ return "{}{}".format(*self._spec)
- def __eq__(self, other):
- if isinstance(other, string_types):
- try:
- other = self.__class__(other)
- except InvalidSpecifier:
- return NotImplemented
- elif not isinstance(other, self.__class__):
- return NotImplemented
+ @property
+ def _canonical_spec(self) -> Tuple[str, str]:
+ return self._spec[0], canonicalize_version(self._spec[1])
- return self._spec == other._spec
+ def __hash__(self) -> int:
+ return hash(self._canonical_spec)
- def __ne__(self, other):
- if isinstance(other, string_types):
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, str):
try:
- other = self.__class__(other)
+ other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
- return self._spec != other._spec
+ return self._canonical_spec == other._canonical_spec
- def _get_operator(self, op):
- return getattr(self, "_compare_{0}".format(self._operators[op]))
+ def _get_operator(self, op: str) -> CallableOperator:
+ operator_callable: CallableOperator = getattr(
+ self, f"_compare_{self._operators[op]}"
+ )
+ return operator_callable
- def _coerce_version(self, version):
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
- def operator(self):
+ def operator(self) -> str:
return self._spec[0]
@property
- def version(self):
+ def version(self) -> str:
return self._spec[1]
@property
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
return self._prereleases
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
- def __contains__(self, item):
+ def __contains__(self, item: str) -> bool:
return self.contains(item)
- def contains(self, item, prereleases=None):
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
- item = self._coerce_version(item)
+ normalized_item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
- if item.is_prerelease and not prereleases:
+ if normalized_item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
- return self._get_operator(self.operator)(item, self.version)
+ operator_callable: CallableOperator = self._get_operator(self.operator)
+ return operator_callable(normalized_item, self.version)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
- def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
@@ -192,13 +204,14 @@ class _IndividualSpecifier(BaseSpecifier):
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
- # prereleases, then we'll store it for later incase nothing
+ # prereleases, then we'll store it for later in case nothing
# else matches this specifier.
- if (parsed_version.is_prerelease and not
- (prereleases or self.prereleases)):
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
- # accepting prereleases from the begining.
+ # accepting prereleases from the beginning.
else:
yielded = True
yield version
@@ -213,8 +226,7 @@ class _IndividualSpecifier(BaseSpecifier):
class LegacySpecifier(_IndividualSpecifier):
- _regex_str = (
- r"""
+ _regex_str = r"""
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
@@ -225,10 +237,8 @@ class LegacySpecifier(_IndividualSpecifier):
# them, and a comma since it's a version separator.
)
"""
- )
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"==": "equal",
@@ -239,43 +249,56 @@ class LegacySpecifier(_IndividualSpecifier):
">": "greater_than",
}
- def _coerce_version(self, version):
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+ super().__init__(spec, prereleases)
+
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
- def _compare_equal(self, prospective, spec):
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective == self._coerce_version(spec)
- def _compare_not_equal(self, prospective, spec):
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective != self._coerce_version(spec)
- def _compare_less_than_equal(self, prospective, spec):
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective <= self._coerce_version(spec)
- def _compare_greater_than_equal(self, prospective, spec):
+ def _compare_greater_than_equal(
+ self, prospective: LegacyVersion, spec: str
+ ) -> bool:
return prospective >= self._coerce_version(spec)
- def _compare_less_than(self, prospective, spec):
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective < self._coerce_version(spec)
- def _compare_greater_than(self, prospective, spec):
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
return prospective > self._coerce_version(spec)
-def _require_version_compare(fn):
+def _require_version_compare(
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
+) -> Callable[["Specifier", ParsedVersion, str], bool]:
@functools.wraps(fn)
- def wrapped(self, prospective, spec):
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
+
return wrapped
class Specifier(_IndividualSpecifier):
- _regex_str = (
- r"""
+ _regex_str = r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
@@ -367,10 +390,8 @@ class Specifier(_IndividualSpecifier):
)
)
"""
- )
- _regex = re.compile(
- r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"~=": "compatible",
@@ -384,7 +405,8 @@ class Specifier(_IndividualSpecifier):
}
@_require_version_compare
- def _compare_compatible(self, prospective, spec):
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
+
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
@@ -392,76 +414,86 @@ class Specifier(_IndividualSpecifier):
# the other specifiers.
# We want everything but the last item in the version, but we want to
- # ignore post and dev releases and we want to treat the pre-release as
- # it's own separate segment.
+ # ignore suffix segments.
prefix = ".".join(
- list(
- itertools.takewhile(
- lambda x: (not x.startswith("post") and not
- x.startswith("dev")),
- _version_split(spec),
- )
- )[:-1]
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
- return (self._get_operator(">=")(prospective, spec) and
- self._get_operator("==")(prospective, prefix))
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
@_require_version_compare
- def _compare_equal(self, prospective, spec):
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
- spec = _version_split(spec[:-2]) # Remove the trailing .*
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
- prospective = _version_split(str(prospective))
+ split_prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
- prospective = prospective[:len(spec)]
+ shortened_prospective = split_prospective[: len(split_spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
- spec, prospective = _pad_version(spec, prospective)
+ padded_spec, padded_prospective = _pad_version(
+ split_spec, shortened_prospective
+ )
+
+ return padded_prospective == padded_spec
else:
# Convert our spec string into a Version
- spec = Version(spec)
+ spec_version = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
- if not spec.local:
+ if not spec_version.local:
prospective = Version(prospective.public)
- return prospective == spec
+ return prospective == spec_version
@_require_version_compare
- def _compare_not_equal(self, prospective, spec):
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
return not self._compare_equal(prospective, spec)
@_require_version_compare
- def _compare_less_than_equal(self, prospective, spec):
- return prospective <= Version(spec)
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) <= Version(spec)
@_require_version_compare
- def _compare_greater_than_equal(self, prospective, spec):
- return prospective >= Version(spec)
+ def _compare_greater_than_equal(
+ self, prospective: ParsedVersion, spec: str
+ ) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) >= Version(spec)
@_require_version_compare
- def _compare_less_than(self, prospective, spec):
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
@@ -483,10 +515,11 @@ class Specifier(_IndividualSpecifier):
return True
@_require_version_compare
- def _compare_greater_than(self, prospective, spec):
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
@@ -503,7 +536,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is techincally greater than, to match.
+ # in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
@@ -513,11 +546,12 @@ class Specifier(_IndividualSpecifier):
# same version in the spec.
return True
- def _compare_arbitrary(self, prospective, spec):
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
return str(prospective).lower() == str(spec).lower()
@property
- def prereleases(self):
+ def prereleases(self) -> bool:
+
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
@@ -541,15 +575,15 @@ class Specifier(_IndividualSpecifier):
return False
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
-def _version_split(version):
- result = []
+def _version_split(version: str) -> List[str]:
+ result: List[str] = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
@@ -559,7 +593,13 @@ def _version_split(version):
return result
-def _pad_version(left, right):
+def _is_not_suffix(segment: str) -> bool:
+ return not any(
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+ )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
left_split, right_split = [], []
# Get the release segment of our versions
@@ -567,36 +607,29 @@ def _pad_version(left, right):
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
- left_split.append(left[len(left_split[0]):])
- right_split.append(right[len(right_split[0]):])
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
# Insert our padding
- left_split.insert(
- 1,
- ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
- )
- right_split.insert(
- 1,
- ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
- )
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
- return (
- list(itertools.chain(*left_split)),
- list(itertools.chain(*right_split)),
- )
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
class SpecifierSet(BaseSpecifier):
+ def __init__(
+ self, specifiers: str = "", prereleases: Optional[bool] = None
+ ) -> None:
- def __init__(self, specifiers="", prereleases=None):
- # Split on , to break each indidivual specifier into it's own item, and
+ # Split on , to break each individual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
- specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
- parsed = set()
- for specifier in specifiers:
+ parsed: Set[_IndividualSpecifier] = set()
+ for specifier in split_specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
@@ -609,23 +642,23 @@ class SpecifierSet(BaseSpecifier):
# we accept prereleases or not.
self._prereleases = prereleases
- def __repr__(self):
+ def __repr__(self) -> str:
pre = (
- ", prereleases={0!r}".format(self.prereleases)
+ f", prereleases={self.prereleases!r}"
if self._prereleases is not None
else ""
)
- return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+ return f"<SpecifierSet({str(self)!r}{pre})>"
- def __str__(self):
+ def __str__(self) -> str:
return ",".join(sorted(str(s) for s in self._specs))
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self._specs)
- def __and__(self, other):
- if isinstance(other, string_types):
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+ if isinstance(other, str):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
@@ -647,34 +680,23 @@ class SpecifierSet(BaseSpecifier):
return specifier
- def __eq__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, (str, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
- def __ne__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
- other = SpecifierSet(str(other))
- elif not isinstance(other, SpecifierSet):
- return NotImplemented
-
- return self._specs != other._specs
-
- def __len__(self):
+ def __len__(self) -> int:
return len(self._specs)
- def __iter__(self):
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
return iter(self._specs)
@property
- def prereleases(self):
+ def prereleases(self) -> Optional[bool]:
+
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
@@ -691,13 +713,16 @@ class SpecifierSet(BaseSpecifier):
return any(s.prereleases for s in self._specs)
@prereleases.setter
- def prereleases(self, value):
+ def prereleases(self, value: bool) -> None:
self._prereleases = value
- def __contains__(self, item):
+ def __contains__(self, item: UnparsedVersion) -> bool:
return self.contains(item)
- def contains(self, item, prereleases=None):
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
@@ -721,12 +746,12 @@ class SpecifierSet(BaseSpecifier):
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
- return all(
- s.contains(item, prereleases=prereleases)
- for s in self._specs
- )
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
+ ) -> Iterable[VersionTypeVar]:
- def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
@@ -744,8 +769,11 @@ class SpecifierSet(BaseSpecifier):
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
- filtered = []
- found_prereleases = []
+ filtered: List[VersionTypeVar] = []
+ found_prereleases: List[VersionTypeVar] = []
+
+ item: UnparsedVersion
+ parsed_version: Union[Version, LegacyVersion]
for item in iterable:
# Ensure that we some kind of Version class for this item.
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
new file mode 100644
index 0000000..9a3d25a
--- /dev/null
+++ b/setuptools/_vendor/packaging/tags.py
@@ -0,0 +1,487 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+ Dict,
+ FrozenSet,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag:
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
+
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
+ # times when scanning a page of links for packages with tags matching that
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
+ # downstream consumers.
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+ @property
+ def interpreter(self) -> str:
+ return self._interpreter
+
+ @property
+ def abi(self) -> str:
+ return self._abi
+
+ @property
+ def platform(self) -> str:
+ return self._platform
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Tag):
+ return NotImplemented
+
+ return (
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
+ and (self._platform == other._platform)
+ and (self._abi == other._abi)
+ and (self._interpreter == other._interpreter)
+ )
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __str__(self) -> str:
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+ def __repr__(self) -> str:
+ return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+ value = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
+def _normalize_string(string: str) -> str:
+ return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+ py_version = tuple(py_version) # To allow for version comparison.
+ abis = []
+ version = _version_nodot(py_version[:2])
+ debug = pymalloc = ucs4 = ""
+ with_debug = _get_config_var("Py_DEBUG", warn)
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version < (3, 8):
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append(f"cp{version}")
+ abis.insert(
+ 0,
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+ ),
+ )
+ return abis
+
+
+def cpython_tags(
+ python_version: Optional[PythonVersion] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp<python_version>-<abi>-<platform>
+ - cp<python_version>-abi3-<platform>
+ - cp<python_version>-none-<platform>
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+ if abis is None:
+ if len(python_version) > 1:
+ abis = _cpython_abis(python_version, warn)
+ else:
+ abis = []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError:
+ pass
+
+ platforms = list(platforms or platform_tags())
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+ if _abi3_applies(python_version):
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+ if _abi3_applies(python_version):
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{version}".format(
+ version=_version_nodot((python_version[0], minor_version))
+ )
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> Iterator[str]:
+ abi = sysconfig.get_config_var("SOABI")
+ if abi:
+ yield _normalize_string(abi)
+
+
+def generic_tags(
+ interpreter: Optional[str] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a generic interpreter.
+
+ The tags consist of:
+ - <interpreter>-<abi>-<platform>
+
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = "".join([interp_name, interp_version])
+ if abis is None:
+ abis = _generic_abi()
+ platforms = list(platforms or platform_tags())
+ abis = list(abis)
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+ """
+ Yields Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all previous versions of that major version.
+ """
+ if len(py_version) > 1:
+ yield f"py{_version_nodot(py_version[:2])}"
+ yield f"py{py_version[0]}"
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+ python_version: Optional[PythonVersion] = None,
+ interpreter: Optional[str] = None,
+ platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+ """
+ Yields the sequence of tags that are compatible with a specific version of Python.
+
+ The tags consist of:
+ - py*-none-<platform>
+ - <interpreter>-none-any # ... if `interpreter` is provided.
+ - py*-none-any
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or platform_tags())
+ for version in _py_interpreter_range(python_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ if cpu_arch in {"arm64", "x86_64"}:
+ formats.append("universal2")
+
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+ formats.append("universal")
+
+ return formats
+
+
+def mac_platforms(
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ else:
+ version = version
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+ else:
+ arch = arch
+
+ if (10, 0) <= version and version < (11, 0):
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+ # "minor" version number. The major version was always 10.
+ for minor_version in range(version[1], -1, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=10, minor=minor_version, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Starting with Mac OS 11, each yearly release bumps the major version
+ # number. The minor versions are now the midyear updates.
+ for major_version in range(version[0], 10, -1):
+ compat_version = major_version, 0
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=major_version, minor=0, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+ # releases exist.
+ #
+ # However, the "universal2" binary format can have a
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+ # that version of macOS.
+ if arch == "x86_64":
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+ else:
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_format = "universal2"
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+ linux = _normalize_string(sysconfig.get_platform())
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv7l"
+ _, arch = linux.split("_", 1)
+ yield from _manylinux.platform_tags(linux, arch)
+ yield from _musllinux.platform_tags(arch)
+ yield linux
+
+
+def _generic_platforms() -> Iterator[str]:
+ yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name() -> str:
+ """
+ Returns the name of the running interpreter.
+ """
+ name = sys.implementation.name
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+ """
+ Returns the version of the running interpreter.
+ """
+ version = _get_config_var("py_version_nodot", warn=warn)
+ if version:
+ version = str(version)
+ else:
+ version = _version_nodot(sys.version_info[:2])
+ return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+ return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ yield from cpython_tags(warn=warn)
+ else:
+ yield from generic_tags()
+
+ if interp_name == "pp":
+ yield from compatible_tags(interpreter="pp3")
+ else:
+ yield from compatible_tags()
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
index 942387c..bab11b8 100644
--- a/setuptools/_vendor/packaging/utils.py
+++ b/setuptools/_vendor/packaging/utils.py
@@ -1,14 +1,136 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidWheelFilename(ValueError):
+ """
+ An invalid wheel filename was found, users should refer to PEP 427.
+ """
+
+
+class InvalidSdistFilename(ValueError):
+ """
+ An invalid sdist filename was found, users should refer to the packaging user guide.
+ """
_canonicalize_regex = re.compile(r"[-_.]+")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
-def canonicalize_name(name):
+def canonicalize_name(name: str) -> NormalizedName:
# This is taken from PEP 503.
- return _canonicalize_regex.sub("-", name).lower()
+ value = _canonicalize_regex.sub("-", name).lower()
+ return cast(NormalizedName, value)
+
+
+def canonicalize_version(version: Union[Version, str]) -> str:
+ """
+ This is very similar to Version.__str__, but has one subtle difference
+ with the way it handles the release segment.
+ """
+ if isinstance(version, str):
+ try:
+ parsed = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+ else:
+ parsed = version
+
+ parts = []
+
+ # Epoch
+ if parsed.epoch != 0:
+ parts.append(f"{parsed.epoch}!")
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
+
+ # Pre-release
+ if parsed.pre is not None:
+ parts.append("".join(str(x) for x in parsed.pre))
+
+ # Post-release
+ if parsed.post is not None:
+ parts.append(f".post{parsed.post}")
+
+ # Development release
+ if parsed.dev is not None:
+ parts.append(f".dev{parsed.dev}")
+
+ # Local version segment
+ if parsed.local is not None:
+ parts.append(f"+{parsed.local}")
+
+ return "".join(parts)
+
+
+def parse_wheel_filename(
+ filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+ if not filename.endswith(".whl"):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
+ )
+
+ filename = filename[:-4]
+ dashes = filename.count("-")
+ if dashes not in (4, 5):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (wrong number of parts): {filename}"
+ )
+
+ parts = filename.split("-", dashes - 2)
+ name_part = parts[0]
+ # See PEP 427 for the rules on escaping the project name
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
+ name = canonicalize_name(name_part)
+ version = Version(parts[1])
+ if dashes == 5:
+ build_part = parts[2]
+ build_match = _build_tag_regex.match(build_part)
+ if build_match is None:
+ raise InvalidWheelFilename(
+ f"Invalid build number: {build_part} in '{filename}'"
+ )
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+ else:
+ build = ()
+ tags = parse_tag(parts[-1])
+ return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+ if filename.endswith(".tar.gz"):
+ file_stem = filename[: -len(".tar.gz")]
+ elif filename.endswith(".zip"):
+ file_stem = filename[: -len(".zip")]
+ else:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+ f" {filename}"
+ )
+
+ # We are requiring a PEP 440 version, which cannot contain dashes,
+ # so we split on the last dash.
+ name_part, sep, version_part = file_stem.rpartition("-")
+ if not sep:
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+ name = canonicalize_name(name_part)
+ version = Version(version_part)
+ return (name, version)
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
index 83b5ee8..de9a09a 100644
--- a/setuptools/_vendor/packaging/version.py
+++ b/setuptools/_vendor/packaging/version.py
@@ -1,27 +1,45 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
-
-from ._structures import Infinity
-
-
-__all__ = [
- "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+import warnings
+from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+InfiniteTypes = Union[InfinityType, NegativeInfinityType]
+PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
+SubLocalType = Union[InfiniteTypes, int, str]
+LocalType = Union[
+ NegativeInfinityType,
+ Tuple[
+ Union[
+ SubLocalType,
+ Tuple[SubLocalType, str],
+ Tuple[NegativeInfinityType, SubLocalType],
+ ],
+ ...,
+ ],
+]
+CmpKey = Tuple[
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+]
+LegacyCmpKey = Tuple[int, Tuple[str, ...]]
+VersionComparisonMethod = Callable[
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
]
-
_Version = collections.namedtuple(
- "_Version",
- ["epoch", "release", "dev", "pre", "post", "local"],
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
)
-def parse(version):
+def parse(version: str) -> Union["LegacyVersion", "Version"]:
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
@@ -39,79 +57,126 @@ class InvalidVersion(ValueError):
"""
-class _BaseVersion(object):
+class _BaseVersion:
+ _key: Union[CmpKey, LegacyCmpKey]
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self._key)
- def __lt__(self, other):
- return self._compare(other, lambda s, o: s < o)
+ # Please keep the duplicated `isinstance` check
+ # in the six comparisons hereunder
+ # unless you find a way to avoid adding overhead function calls.
+ def __lt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __le__(self, other):
- return self._compare(other, lambda s, o: s <= o)
+ return self._key < other._key
- def __eq__(self, other):
- return self._compare(other, lambda s, o: s == o)
+ def __le__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __ge__(self, other):
- return self._compare(other, lambda s, o: s >= o)
+ return self._key <= other._key
- def __gt__(self, other):
- return self._compare(other, lambda s, o: s > o)
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
- def __ne__(self, other):
- return self._compare(other, lambda s, o: s != o)
+ return self._key == other._key
- def _compare(self, other, method):
+ def __ge__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
- return method(self._key, other._key)
+ return self._key >= other._key
+ def __gt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
-class LegacyVersion(_BaseVersion):
+ return self._key > other._key
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key != other._key
- def __init__(self, version):
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version: str) -> None:
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
- def __str__(self):
+ warnings.warn(
+ "Creating a LegacyVersion has been deprecated and will be "
+ "removed in the next major release",
+ DeprecationWarning,
+ )
+
+ def __str__(self) -> str:
return self._version
- def __repr__(self):
- return "<LegacyVersion({0})>".format(repr(str(self)))
+ def __repr__(self) -> str:
+ return f"<LegacyVersion('{self}')>"
@property
- def public(self):
+ def public(self) -> str:
return self._version
@property
- def base_version(self):
+ def base_version(self) -> str:
return self._version
@property
- def local(self):
+ def epoch(self) -> int:
+ return -1
+
+ @property
+ def release(self) -> None:
return None
@property
- def is_prerelease(self):
+ def pre(self) -> None:
+ return None
+
+ @property
+ def post(self) -> None:
+ return None
+
+ @property
+ def dev(self) -> None:
+ return None
+
+ @property
+ def local(self) -> None:
+ return None
+
+ @property
+ def is_prerelease(self) -> bool:
+ return False
+
+ @property
+ def is_postrelease(self) -> bool:
return False
@property
- def is_postrelease(self):
+ def is_devrelease(self) -> bool:
return False
-_legacy_version_component_re = re.compile(
- r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
-)
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
- "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+ "pre": "c",
+ "preview": "c",
+ "-": "final-",
+ "rc": "c",
+ "dev": "@",
}
-def _parse_version_parts(s):
+def _parse_version_parts(s: str) -> Iterator[str]:
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
@@ -128,7 +193,8 @@ def _parse_version_parts(s):
yield "*final"
-def _legacy_cmpkey(version):
+def _legacy_cmpkey(version: str) -> LegacyCmpKey:
+
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
@@ -137,7 +203,7 @@ def _legacy_cmpkey(version):
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
- parts = []
+ parts: List[str] = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
@@ -150,9 +216,9 @@ def _legacy_cmpkey(version):
parts.pop()
parts.append(part)
- parts = tuple(parts)
- return epoch, parts
+ return epoch, tuple(parts)
+
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
@@ -190,33 +256,24 @@ VERSION_PATTERN = r"""
class Version(_BaseVersion):
- _regex = re.compile(
- r"^\s*" + VERSION_PATTERN + r"\s*$",
- re.VERBOSE | re.IGNORECASE,
- )
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ def __init__(self, version: str) -> None:
- def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
- raise InvalidVersion("Invalid version: '{0}'".format(version))
+ raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
- pre=_parse_letter_version(
- match.group("pre_l"),
- match.group("pre_n"),
- ),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
- match.group("post_l"),
- match.group("post_n1") or match.group("post_n2"),
- ),
- dev=_parse_letter_version(
- match.group("dev_l"),
- match.group("dev_n"),
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
@@ -230,72 +287,113 @@ class Version(_BaseVersion):
self._version.local,
)
- def __repr__(self):
- return "<Version({0})>".format(repr(str(self)))
+ def __repr__(self) -> str:
+ return f"<Version('{self}')>"
- def __str__(self):
+ def __str__(self) -> str:
parts = []
# Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self._version.release))
+ parts.append(".".join(str(x) for x in self.release))
# Pre-release
- if self._version.pre is not None:
- parts.append("".join(str(x) for x in self._version.pre))
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
# Post-release
- if self._version.post is not None:
- parts.append(".post{0}".format(self._version.post[1]))
+ if self.post is not None:
+ parts.append(f".post{self.post}")
# Development release
- if self._version.dev is not None:
- parts.append(".dev{0}".format(self._version.dev[1]))
+ if self.dev is not None:
+ parts.append(f".dev{self.dev}")
# Local version segment
- if self._version.local is not None:
- parts.append(
- "+{0}".format(".".join(str(x) for x in self._version.local))
- )
+ if self.local is not None:
+ parts.append(f"+{self.local}")
return "".join(parts)
@property
- def public(self):
+ def epoch(self) -> int:
+ _epoch: int = self._version.epoch
+ return _epoch
+
+ @property
+ def release(self) -> Tuple[int, ...]:
+ _release: Tuple[int, ...] = self._version.release
+ return _release
+
+ @property
+ def pre(self) -> Optional[Tuple[str, int]]:
+ _pre: Optional[Tuple[str, int]] = self._version.pre
+ return _pre
+
+ @property
+ def post(self) -> Optional[int]:
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self) -> Optional[int]:
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self) -> Optional[str]:
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
return str(self).split("+", 1)[0]
@property
- def base_version(self):
+ def base_version(self) -> str:
parts = []
# Epoch
- if self._version.epoch != 0:
- parts.append("{0}!".format(self._version.epoch))
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
# Release segment
- parts.append(".".join(str(x) for x in self._version.release))
+ parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
- def local(self):
- version_string = str(self)
- if "+" in version_string:
- return version_string.split("+", 1)[1]
+ def is_prerelease(self) -> bool:
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ return self.release[0] if len(self.release) >= 1 else 0
@property
- def is_prerelease(self):
- return bool(self._version.dev or self._version.pre)
+ def minor(self) -> int:
+ return self.release[1] if len(self.release) >= 2 else 0
@property
- def is_postrelease(self):
- return bool(self._version.post)
+ def micro(self) -> int:
+ return self.release[2] if len(self.release) >= 3 else 0
-def _parse_letter_version(letter, number):
+def _parse_letter_version(
+ letter: str, number: Union[str, bytes, SupportsInt]
+) -> Optional[Tuple[str, int]]:
+
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
@@ -325,34 +423,40 @@ def _parse_letter_version(letter, number):
return letter, int(number)
+ return None
+
-_local_version_seperators = re.compile(r"[\._-]")
+_local_version_separators = re.compile(r"[\._-]")
-def _parse_local_version(local):
+def _parse_local_version(local: str) -> Optional[LocalType]:
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
- for part in _local_version_seperators.split(local)
+ for part in _local_version_separators.split(local)
)
+ return None
+
+def _cmpkey(
+ epoch: int,
+ release: Tuple[int, ...],
+ pre: Optional[Tuple[str, int]],
+ post: Optional[Tuple[str, int]],
+ dev: Optional[Tuple[str, int]],
+ local: Optional[Tuple[SubLocalType]],
+) -> CmpKey:
-def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
- release = tuple(
- reversed(list(
- itertools.dropwhile(
- lambda x: x == 0,
- reversed(release),
- )
- ))
+ _release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
@@ -360,23 +464,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
- pre = -Infinity
+ _pre: PrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
- pre = Infinity
+ _pre = Infinity
+ else:
+ _pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
- post = -Infinity
+ _post: PrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
# Versions without a development segment should sort after those with one.
if dev is None:
- dev = Infinity
+ _dev: PrePostDevType = Infinity
+
+ else:
+ _dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
- local = -Infinity
+ _local: LocalType = NegativeInfinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
@@ -385,9 +497,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
- local = tuple(
- (i, "") if isinstance(i, int) else (-Infinity, i)
- for i in local
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
- return epoch, release, pre, post, dev, local
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
new file mode 100644
index 0000000..bbc959e
--- /dev/null
+++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/LICENSE.txt
@@ -0,0 +1,18 @@
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt b/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
new file mode 100644
index 0000000..210dfec
--- /dev/null
+++ b/setuptools/_vendor/pyparsing-2.2.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+pyparsing
diff --git a/setuptools/_vendor/pyparsing.py b/setuptools/_vendor/pyparsing.py
index a212243..cf75e1e 100644
--- a/setuptools/_vendor/pyparsing.py
+++ b/setuptools/_vendor/pyparsing.py
@@ -1,6 +1,6 @@
# module pyparsing.py
#
-# Copyright (c) 2003-2016 Paul T. McGuire
+# Copyright (c) 2003-2018 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -25,6 +25,7 @@
__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
@@ -58,10 +59,23 @@ The pyparsing module handles some of the problems that are typically vexing when
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings
- embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
"""
-__version__ = "2.1.10"
-__versionTime__ = "07 Oct 2016 01:31 UTC"
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
@@ -83,6 +97,15 @@ except ImportError:
from threading import RLock
try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
from collections import OrderedDict as _OrderedDict
except ImportError:
try:
@@ -144,7 +167,7 @@ else:
except UnicodeEncodeError:
# Else encode it
ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
- xmlcharref = Regex('&#\d+;')
+ xmlcharref = Regex(r'&#\d+;')
xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
return xmlcharref.transformString(ret)
@@ -809,7 +832,7 @@ class ParseResults(object):
return None
def getName(self):
- """
+ r"""
Returns the results name for this token expression. Useful when several
different expressions might match at a particular location.
@@ -940,7 +963,7 @@ class ParseResults(object):
def __dir__(self):
return (dir(type(self)) + list(self.keys()))
-collections.MutableMapping.register(ParseResults)
+MutableMapping.register(ParseResults)
def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators.
@@ -1025,11 +1048,11 @@ def _trim_arity(func, maxargs=2):
# special handling for Python 3.5.0 - extra deep call stack by 1
offset = -3 if system_version == (3,5,0) else -2
frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
def extract_tb(tb, limit=0):
frames = traceback.extract_tb(tb, limit=limit)
frame_summary = frames[-1]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
else:
extract_stack = traceback.extract_stack
extract_tb = traceback.extract_tb
@@ -1226,7 +1249,7 @@ class ParserElement(object):
def setParseAction( self, *fns, **kwargs ):
"""
- Define action to perform when successfully matching parse element definition.
+ Define one or more actions to perform when successfully matching parse element definition.
Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
- s = the original string being parsed (see note below)
@@ -1264,7 +1287,7 @@ class ParserElement(object):
def addParseAction( self, *fns, **kwargs ):
"""
- Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
+ Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
See examples in L{I{copy}<copy>}.
"""
@@ -1374,7 +1397,7 @@ class ParserElement(object):
else:
preloc = loc
tokensStart = preloc
- if self.mayIndexError or loc >= len(instring):
+ if self.mayIndexError or preloc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
@@ -1408,7 +1431,6 @@ class ParserElement(object):
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
-
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
@@ -1443,10 +1465,14 @@ class ParserElement(object):
def clear(self):
cache.clear()
+
+ def cache_len(self):
+ return len(cache)
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
if _OrderedDict is not None:
class _FifoCache(object):
@@ -1460,15 +1486,22 @@ class ParserElement(object):
def set(self, key, value):
cache[key] = value
- if len(cache) > size:
- cache.popitem(False)
+ while len(cache) > size:
+ try:
+ cache.popitem(False)
+ except KeyError:
+ pass
def clear(self):
cache.clear()
+ def cache_len(self):
+ return len(cache)
+
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
else:
class _FifoCache(object):
@@ -1483,7 +1516,7 @@ class ParserElement(object):
def set(self, key, value):
cache[key] = value
- if len(cache) > size:
+ while len(key_fifo) > size:
cache.pop(key_fifo.popleft(), None)
key_fifo.append(key)
@@ -1491,9 +1524,13 @@ class ParserElement(object):
cache.clear()
key_fifo.clear()
+ def cache_len(self):
+ return len(cache)
+
self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
# argument cache for optimizing repeated calls when backtracking through recursive expressions
packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
@@ -1743,8 +1780,12 @@ class ParserElement(object):
cap_word = Word(alphas.upper(), alphas.lower())
print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+ # the sum() builtin can be used to merge results into a single ParseResults object
+ print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
prints::
- ['More', 'Iron', 'Lead', 'Gold', 'I']
+ [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+ ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
"""
try:
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
@@ -1819,7 +1860,7 @@ class ParserElement(object):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
- return And( [ self, And._ErrorStop(), other ] )
+ return self + And._ErrorStop() + other
def __rsub__(self, other ):
"""
@@ -2722,7 +2763,7 @@ class Word(Token):
class Regex(Token):
- """
+ r"""
Token for matching strings that match a given regular expression.
Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
@@ -2911,7 +2952,7 @@ class QuotedString(Token):
# replace escaped characters
if self.escChar:
- ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
+ ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
# replace escaped quotes
if self.escQuote:
@@ -3223,7 +3264,7 @@ class ParseExpression(ParserElement):
if isinstance( exprs, basestring ):
self.exprs = [ ParserElement._literalStringClass( exprs ) ]
- elif isinstance( exprs, collections.Iterable ):
+ elif isinstance( exprs, Iterable ):
exprs = list(exprs)
# if sequence of strings provided, wrap with Literal
if all(isinstance(expr, basestring) for expr in exprs):
@@ -4374,7 +4415,7 @@ def traceParseAction(f):
@traceParseAction
def remove_duplicate_chars(tokens):
- return ''.join(sorted(set(''.join(tokens)))
+ return ''.join(sorted(set(''.join(tokens))))
wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
@@ -4564,7 +4605,7 @@ def oneOf( strs, caseless=False, useRegex=True ):
symbols = []
if isinstance(strs,basestring):
symbols = strs.split()
- elif isinstance(strs, collections.Iterable):
+ elif isinstance(strs, Iterable):
symbols = list(strs)
else:
warnings.warn("Invalid argument to oneOf, expected string or iterable",
@@ -4715,7 +4756,7 @@ stringEnd = StringEnd().setName("stringEnd")
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
-_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
@@ -5020,7 +5061,9 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
- parseAction is the parse action to be associated with
expressions matching this operator expression (the
- parse action tuple member may be omitted)
+ parse action tuple member may be omitted); if the parse action
+ is passed a tuple or list of functions, this is equivalent to
+ calling C{setParseAction(*fn)} (L{ParserElement.setParseAction})
- lpar - expression for matching left-parentheses (default=C{Suppress('(')})
- rpar - expression for matching right-parentheses (default=C{Suppress(')')})
@@ -5093,7 +5136,10 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
else:
raise ValueError("operator must indicate right or left associativity")
if pa:
- matchExpr.setParseAction( pa )
+ if isinstance(pa, (tuple, list)):
+ matchExpr.setParseAction(*pa)
+ else:
+ matchExpr.setParseAction(pa)
thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
lastExpr = thisExpr
ret <<= lastExpr
diff --git a/setuptools/_vendor/six.py b/setuptools/_vendor/six.py
deleted file mode 100644
index 190c023..0000000
--- a/setuptools/_vendor/six.py
+++ /dev/null
@@ -1,868 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
- string_types = str,
- integer_types = int,
- class_types = type,
- text_type = str
- binary_type = bytes
-
- MAXSIZE = sys.maxsize
-else:
- string_types = basestring,
- integer_types = (int, long)
- class_types = (type, types.ClassType)
- text_type = unicode
- binary_type = str
-
- if sys.platform.startswith("java"):
- # Jython always uses 32 bits.
- MAXSIZE = int((1 << 31) - 1)
- else:
- # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
- class X(object):
-
- def __len__(self):
- return 1 << 31
- try:
- len(X())
- except OverflowError:
- # 32-bit
- MAXSIZE = int((1 << 31) - 1)
- else:
- # 64-bit
- MAXSIZE = int((1 << 63) - 1)
- del X
-
-
-def _add_doc(func, doc):
- """Add documentation to a function."""
- func.__doc__ = doc
-
-
-def _import_module(name):
- """Import module, returning the module after the last dot."""
- __import__(name)
- return sys.modules[name]
-
-
-class _LazyDescr(object):
-
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, tp):
- result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
- return result
-
-
-class MovedModule(_LazyDescr):
-
- def __init__(self, name, old, new=None):
- super(MovedModule, self).__init__(name)
- if PY3:
- if new is None:
- new = name
- self.mod = new
- else:
- self.mod = old
-
- def _resolve(self):
- return _import_module(self.mod)
-
- def __getattr__(self, attr):
- _module = self._resolve()
- value = getattr(_module, attr)
- setattr(self, attr, value)
- return value
-
-
-class _LazyModule(types.ModuleType):
-
- def __init__(self, name):
- super(_LazyModule, self).__init__(name)
- self.__doc__ = self.__class__.__doc__
-
- def __dir__(self):
- attrs = ["__doc__", "__name__"]
- attrs += [attr.name for attr in self._moved_attributes]
- return attrs
-
- # Subclasses should override this
- _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
- def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
- super(MovedAttribute, self).__init__(name)
- if PY3:
- if new_mod is None:
- new_mod = name
- self.mod = new_mod
- if new_attr is None:
- if old_attr is None:
- new_attr = name
- else:
- new_attr = old_attr
- self.attr = new_attr
- else:
- self.mod = old_mod
- if old_attr is None:
- old_attr = name
- self.attr = old_attr
-
- def _resolve(self):
- module = _import_module(self.mod)
- return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
- """
- A meta path importer to import six.moves and its submodules.
-
- This class implements a PEP302 finder and loader. It should be compatible
- with Python 2.5 and all existing versions of Python3
- """
-
- def __init__(self, six_module_name):
- self.name = six_module_name
- self.known_modules = {}
-
- def _add_module(self, mod, *fullnames):
- for fullname in fullnames:
- self.known_modules[self.name + "." + fullname] = mod
-
- def _get_module(self, fullname):
- return self.known_modules[self.name + "." + fullname]
-
- def find_module(self, fullname, path=None):
- if fullname in self.known_modules:
- return self
- return None
-
- def __get_module(self, fullname):
- try:
- return self.known_modules[fullname]
- except KeyError:
- raise ImportError("This loader does not know module " + fullname)
-
- def load_module(self, fullname):
- try:
- # in case of a reload
- return sys.modules[fullname]
- except KeyError:
- pass
- mod = self.__get_module(fullname)
- if isinstance(mod, MovedModule):
- mod = mod._resolve()
- else:
- mod.__loader__ = self
- sys.modules[fullname] = mod
- return mod
-
- def is_package(self, fullname):
- """
- Return true, if the named module is a package.
-
- We need this method to get correct spec objects with
- Python 3.4 (see PEP451)
- """
- return hasattr(self.__get_module(fullname), "__path__")
-
- def get_code(self, fullname):
- """Return None
-
- Required, if is_package is implemented"""
- self.__get_module(fullname) # eventually raises ImportError
- return None
- get_source = get_code # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
- """Lazy loading of moved objects"""
- __path__ = [] # mark as package
-
-
-_moved_attributes = [
- MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
- MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
- MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
- MovedAttribute("intern", "__builtin__", "sys"),
- MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
- MovedAttribute("reduce", "__builtin__", "functools"),
- MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
- MovedAttribute("StringIO", "StringIO", "io"),
- MovedAttribute("UserDict", "UserDict", "collections"),
- MovedAttribute("UserList", "UserList", "collections"),
- MovedAttribute("UserString", "UserString", "collections"),
- MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
- MovedModule("builtins", "__builtin__"),
- MovedModule("configparser", "ConfigParser"),
- MovedModule("copyreg", "copy_reg"),
- MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
- MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
- MovedModule("http_cookies", "Cookie", "http.cookies"),
- MovedModule("html_entities", "htmlentitydefs", "html.entities"),
- MovedModule("html_parser", "HTMLParser", "html.parser"),
- MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
- MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
- MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
- MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
- MovedModule("cPickle", "cPickle", "pickle"),
- MovedModule("queue", "Queue"),
- MovedModule("reprlib", "repr"),
- MovedModule("socketserver", "SocketServer"),
- MovedModule("_thread", "thread", "_thread"),
- MovedModule("tkinter", "Tkinter"),
- MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
- MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
- MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
- MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
- MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
- MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser",
- "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog",
- "tkinter.commondialog"),
- MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_font", "tkFont", "tkinter.font"),
- MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
- "tkinter.simpledialog"),
- MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
- MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
-for attr in _moved_attributes:
- setattr(_MovedItems, attr.name, attr)
- if isinstance(attr, MovedModule):
- _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
- MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
- MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
- MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
- MovedAttribute("urljoin", "urlparse", "urllib.parse"),
- MovedAttribute("urlparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
- MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
- MovedAttribute("quote", "urllib", "urllib.parse"),
- MovedAttribute("quote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote", "urllib", "urllib.parse"),
- MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("urlencode", "urllib", "urllib.parse"),
- MovedAttribute("splitquery", "urllib", "urllib.parse"),
- MovedAttribute("splittag", "urllib", "urllib.parse"),
- MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
- MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
- MovedAttribute("uses_params", "urlparse", "urllib.parse"),
- MovedAttribute("uses_query", "urlparse", "urllib.parse"),
- MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
- setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
- MovedAttribute("URLError", "urllib2", "urllib.error"),
- MovedAttribute("HTTPError", "urllib2", "urllib.error"),
- MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
- setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
- MovedAttribute("urlopen", "urllib2", "urllib.request"),
- MovedAttribute("install_opener", "urllib2", "urllib.request"),
- MovedAttribute("build_opener", "urllib2", "urllib.request"),
- MovedAttribute("pathname2url", "urllib", "urllib.request"),
- MovedAttribute("url2pathname", "urllib", "urllib.request"),
- MovedAttribute("getproxies", "urllib", "urllib.request"),
- MovedAttribute("Request", "urllib2", "urllib.request"),
- MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
- MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
- MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
- MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
- MovedAttribute("FileHandler", "urllib2", "urllib.request"),
- MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
- MovedAttribute("urlretrieve", "urllib", "urllib.request"),
- MovedAttribute("urlcleanup", "urllib", "urllib.request"),
- MovedAttribute("URLopener", "urllib", "urllib.request"),
- MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
- MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
- setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
- MovedAttribute("addbase", "urllib", "urllib.response"),
- MovedAttribute("addclosehook", "urllib", "urllib.response"),
- MovedAttribute("addinfo", "urllib", "urllib.response"),
- MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
- setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
- setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
- """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
- __path__ = [] # mark as package
- parse = _importer._get_module("moves.urllib_parse")
- error = _importer._get_module("moves.urllib_error")
- request = _importer._get_module("moves.urllib_request")
- response = _importer._get_module("moves.urllib_response")
- robotparser = _importer._get_module("moves.urllib_robotparser")
-
- def __dir__(self):
- return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
- "moves.urllib")
-
-
-def add_move(move):
- """Add an item to six.moves."""
- setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
- """Remove item from six.moves."""
- try:
- delattr(_MovedItems, name)
- except AttributeError:
- try:
- del moves.__dict__[name]
- except KeyError:
- raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
- _meth_func = "__func__"
- _meth_self = "__self__"
-
- _func_closure = "__closure__"
- _func_code = "__code__"
- _func_defaults = "__defaults__"
- _func_globals = "__globals__"
-else:
- _meth_func = "im_func"
- _meth_self = "im_self"
-
- _func_closure = "func_closure"
- _func_code = "func_code"
- _func_defaults = "func_defaults"
- _func_globals = "func_globals"
-
-
-try:
- advance_iterator = next
-except NameError:
- def advance_iterator(it):
- return it.next()
-next = advance_iterator
-
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
- def get_unbound_function(unbound):
- return unbound
-
- create_bound_method = types.MethodType
-
- def create_unbound_method(func, cls):
- return func
-
- Iterator = object
-else:
- def get_unbound_function(unbound):
- return unbound.im_func
-
- def create_bound_method(func, obj):
- return types.MethodType(func, obj, obj.__class__)
-
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
- class Iterator(object):
-
- def next(self):
- return type(self).__next__(self)
-
- callable = callable
-_add_doc(get_unbound_function,
- """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
- def iterkeys(d, **kw):
- return iter(d.keys(**kw))
-
- def itervalues(d, **kw):
- return iter(d.values(**kw))
-
- def iteritems(d, **kw):
- return iter(d.items(**kw))
-
- def iterlists(d, **kw):
- return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
-else:
- def iterkeys(d, **kw):
- return d.iterkeys(**kw)
-
- def itervalues(d, **kw):
- return d.itervalues(**kw)
-
- def iteritems(d, **kw):
- return d.iteritems(**kw)
-
- def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
- "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
- "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
- def b(s):
- return s.encode("latin-1")
-
- def u(s):
- return s
- unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
- byte2int = operator.itemgetter(0)
- indexbytes = operator.getitem
- iterbytes = iter
- import io
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
-else:
- def b(s):
- return s
- # Workaround for standalone backslash
-
- def u(s):
- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
- unichr = unichr
- int2byte = chr
-
- def byte2int(bs):
- return ord(bs[0])
-
- def indexbytes(buf, i):
- return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
- import StringIO
- StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
- exec_ = getattr(moves.builtins, "exec")
-
- def reraise(tp, value, tb=None):
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
-else:
- def exec_(_code_, _globs_=None, _locs_=None):
- """Execute code in a namespace."""
- if _globs_ is None:
- frame = sys._getframe(1)
- _globs_ = frame.f_globals
- if _locs_ is None:
- _locs_ = frame.f_locals
- del frame
- elif _locs_ is None:
- _locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
-
- exec_("""def reraise(tp, value, tb=None):
- raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- if from_value is None:
- raise value
- raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
- exec_("""def raise_from(value, from_value):
- raise value from from_value
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
- def print_(*args, **kwargs):
- """The new-style print function for Python 2.4 and 2.5."""
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
-
- def write(data):
- if not isinstance(data, basestring):
- data = str(data)
- # If the file has an encoding, encode unicode with it.
- if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
- errors = getattr(fp, "errors", None)
- if errors is None:
- errors = "strict"
- data = data.encode(fp.encoding, errors)
- fp.write(data)
- want_unicode = False
- sep = kwargs.pop("sep", None)
- if sep is not None:
- if isinstance(sep, unicode):
- want_unicode = True
- elif not isinstance(sep, str):
- raise TypeError("sep must be None or a string")
- end = kwargs.pop("end", None)
- if end is not None:
- if isinstance(end, unicode):
- want_unicode = True
- elif not isinstance(end, str):
- raise TypeError("end must be None or a string")
- if kwargs:
- raise TypeError("invalid keyword arguments to print()")
- if not want_unicode:
- for arg in args:
- if isinstance(arg, unicode):
- want_unicode = True
- break
- if want_unicode:
- newline = unicode("\n")
- space = unicode(" ")
- else:
- newline = "\n"
- space = " "
- if sep is None:
- sep = space
- if end is None:
- end = newline
- for i, arg in enumerate(args):
- if i:
- write(sep)
- write(arg)
- write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
- def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
-else:
- wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
-
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- slots = orig_vars.get('__slots__')
- if slots is not None:
- if isinstance(slots, str):
- slots = [slots]
- for slots_var in slots:
- orig_vars.pop(slots_var)
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
-
-
-def python_2_unicode_compatible(klass):
- """
- A decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = [] # required for PEP 302 and PEP 451
-__package__ = __name__ # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
- __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
- for i, importer in enumerate(sys.meta_path):
- # Here's some real nastiness: Another "instance" of the six module might
- # be floating around. Therefore, we can't use isinstance() to check for
- # the six meta path importer, since the other six instance will have
- # inserted an importer with different class.
- if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
- del sys.meta_path[i]
- break
- del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)
diff --git a/setuptools/_vendor/tomli/__init__.py b/setuptools/_vendor/tomli/__init__.py
new file mode 100644
index 0000000..4c6ec97
--- /dev/null
+++ b/setuptools/_vendor/tomli/__init__.py
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: MIT
+# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
+# Licensed to PSF under a Contributor Agreement.
+
+__all__ = ("loads", "load", "TOMLDecodeError")
+__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from ._parser import TOMLDecodeError, load, loads
+
+# Pretend this exception was created here.
+TOMLDecodeError.__module__ = __name__
diff --git a/setuptools/_vendor/tomli/_parser.py b/setuptools/_vendor/tomli/_parser.py
new file mode 100644
index 0000000..f1bb0aa
--- /dev/null
+++ b/setuptools/_vendor/tomli/_parser.py
@@ -0,0 +1,691 @@
+# SPDX-License-Identifier: MIT
+# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
+# Licensed to PSF under a Contributor Agreement.
+
+from __future__ import annotations
+
+from collections.abc import Iterable
+import string
+from types import MappingProxyType
+from typing import Any, BinaryIO, NamedTuple
+
+from ._re import (
+ RE_DATETIME,
+ RE_LOCALTIME,
+ RE_NUMBER,
+ match_to_datetime,
+ match_to_localtime,
+ match_to_number,
+)
+from ._types import Key, ParseFloat, Pos
+
+ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
+
+# Neither of these sets include quotation mark or backslash. They are
+# currently handled as separate cases in the parser functions.
+ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t")
+ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n")
+
+ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS
+ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+
+ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS
+
+TOML_WS = frozenset(" \t")
+TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n")
+BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_")
+KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'")
+HEXDIGIT_CHARS = frozenset(string.hexdigits)
+
+BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType(
+ {
+ "\\b": "\u0008", # backspace
+ "\\t": "\u0009", # tab
+ "\\n": "\u000A", # linefeed
+ "\\f": "\u000C", # form feed
+ "\\r": "\u000D", # carriage return
+ '\\"': "\u0022", # quote
+ "\\\\": "\u005C", # backslash
+ }
+)
+
+
+class TOMLDecodeError(ValueError):
+ """An error raised if a document is not valid TOML."""
+
+
+def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]:
+ """Parse TOML from a binary file object."""
+ b = __fp.read()
+ try:
+ s = b.decode()
+ except AttributeError:
+ raise TypeError(
+ "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`"
+ ) from None
+ return loads(s, parse_float=parse_float)
+
+
+def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901
+ """Parse TOML from a string."""
+
+ # The spec allows converting "\r\n" to "\n", even in string
+ # literals. Let's do so to simplify parsing.
+ src = __s.replace("\r\n", "\n")
+ pos = 0
+ out = Output(NestedDict(), Flags())
+ header: Key = ()
+ parse_float = make_safe_parse_float(parse_float)
+
+ # Parse one statement at a time
+ # (typically means one line in TOML source)
+ while True:
+ # 1. Skip line leading whitespace
+ pos = skip_chars(src, pos, TOML_WS)
+
+ # 2. Parse rules. Expect one of the following:
+ # - end of file
+ # - end of line
+ # - comment
+ # - key/value pair
+ # - append dict to list (and move to its namespace)
+ # - create dict (and move to its namespace)
+ # Skip trailing whitespace when applicable.
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char == "\n":
+ pos += 1
+ continue
+ if char in KEY_INITIAL_CHARS:
+ pos = key_value_rule(src, pos, out, header, parse_float)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char == "[":
+ try:
+ second_char: str | None = src[pos + 1]
+ except IndexError:
+ second_char = None
+ out.flags.finalize_pending()
+ if second_char == "[":
+ pos, header = create_list_rule(src, pos, out)
+ else:
+ pos, header = create_dict_rule(src, pos, out)
+ pos = skip_chars(src, pos, TOML_WS)
+ elif char != "#":
+ raise suffixed_err(src, pos, "Invalid statement")
+
+ # 3. Skip comment
+ pos = skip_comment(src, pos)
+
+ # 4. Expect end of line or end of file
+ try:
+ char = src[pos]
+ except IndexError:
+ break
+ if char != "\n":
+ raise suffixed_err(
+ src, pos, "Expected newline or end of document after a statement"
+ )
+ pos += 1
+
+ return out.data.dict
+
+
+class Flags:
+ """Flags that map to parsed keys/namespaces."""
+
+ # Marks an immutable namespace (inline array or inline table).
+ FROZEN = 0
+ # Marks a nest that has been explicitly created and can no longer
+ # be opened using the "[table]" syntax.
+ EXPLICIT_NEST = 1
+
+ def __init__(self) -> None:
+ self._flags: dict[str, dict] = {}
+ self._pending_flags: set[tuple[Key, int]] = set()
+
+ def add_pending(self, key: Key, flag: int) -> None:
+ self._pending_flags.add((key, flag))
+
+ def finalize_pending(self) -> None:
+ for key, flag in self._pending_flags:
+ self.set(key, flag, recursive=False)
+ self._pending_flags.clear()
+
+ def unset_all(self, key: Key) -> None:
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return
+ cont = cont[k]["nested"]
+ cont.pop(key[-1], None)
+
+ def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003
+ cont = self._flags
+ key_parent, key_stem = key[:-1], key[-1]
+ for k in key_parent:
+ if k not in cont:
+ cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont = cont[k]["nested"]
+ if key_stem not in cont:
+ cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}}
+ cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag)
+
+ def is_(self, key: Key, flag: int) -> bool:
+ if not key:
+ return False # document root has no flags
+ cont = self._flags
+ for k in key[:-1]:
+ if k not in cont:
+ return False
+ inner_cont = cont[k]
+ if flag in inner_cont["recursive_flags"]:
+ return True
+ cont = inner_cont["nested"]
+ key_stem = key[-1]
+ if key_stem in cont:
+ cont = cont[key_stem]
+ return flag in cont["flags"] or flag in cont["recursive_flags"]
+ return False
+
+
+class NestedDict:
+ def __init__(self) -> None:
+ # The parsed content of the TOML document
+ self.dict: dict[str, Any] = {}
+
+ def get_or_create_nest(
+ self,
+ key: Key,
+ *,
+ access_lists: bool = True,
+ ) -> dict:
+ cont: Any = self.dict
+ for k in key:
+ if k not in cont:
+ cont[k] = {}
+ cont = cont[k]
+ if access_lists and isinstance(cont, list):
+ cont = cont[-1]
+ if not isinstance(cont, dict):
+ raise KeyError("There is no nest behind this key")
+ return cont
+
+ def append_nest_to_list(self, key: Key) -> None:
+ cont = self.get_or_create_nest(key[:-1])
+ last_key = key[-1]
+ if last_key in cont:
+ list_ = cont[last_key]
+ if not isinstance(list_, list):
+ raise KeyError("An object other than list found behind this key")
+ list_.append({})
+ else:
+ cont[last_key] = [{}]
+
+
+class Output(NamedTuple):
+ data: NestedDict
+ flags: Flags
+
+
+def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos:
+ try:
+ while src[pos] in chars:
+ pos += 1
+ except IndexError:
+ pass
+ return pos
+
+
+def skip_until(
+ src: str,
+ pos: Pos,
+ expect: str,
+ *,
+ error_on: frozenset[str],
+ error_on_eof: bool,
+) -> Pos:
+ try:
+ new_pos = src.index(expect, pos)
+ except ValueError:
+ new_pos = len(src)
+ if error_on_eof:
+ raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None
+
+ if not error_on.isdisjoint(src[pos:new_pos]):
+ while src[pos] not in error_on:
+ pos += 1
+ raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}")
+ return new_pos
+
+
+def skip_comment(src: str, pos: Pos) -> Pos:
+ try:
+ char: str | None = src[pos]
+ except IndexError:
+ char = None
+ if char == "#":
+ return skip_until(
+ src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False
+ )
+ return pos
+
+
+def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos:
+ while True:
+ pos_before_skip = pos
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ pos = skip_comment(src, pos)
+ if pos == pos_before_skip:
+ return pos
+
+
+def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]:
+ pos += 1 # Skip "["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Cannot declare {key} twice")
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.get_or_create_nest(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Cannot overwrite a value") from None
+
+ if not src.startswith("]", pos):
+ raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
+ return pos + 1, key
+
+
+def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]:
+ pos += 2 # Skip "[["
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key = parse_key(src, pos)
+
+ if out.flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}")
+ # Free the namespace now that it points to another empty list item...
+ out.flags.unset_all(key)
+ # ...but this key precisely is still prohibited from table declaration
+ out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
+ try:
+ out.data.append_nest_to_list(key)
+ except KeyError:
+ raise suffixed_err(src, pos, "Cannot overwrite a value") from None
+
+ if not src.startswith("]]", pos):
+ raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
+ return pos + 2, key
+
+
+def key_value_rule(
+ src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat
+) -> Pos:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ abs_key_parent = header + key_parent
+
+ relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
+ for cont_key in relative_path_cont_keys:
+ # Check that dotted key syntax does not redefine an existing table
+ if out.flags.is_(cont_key, Flags.EXPLICIT_NEST):
+ raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}")
+ # Containers in the relative path can't be opened with the table syntax or
+ # dotted key/value syntax in following table sections.
+ out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST)
+
+ if out.flags.is_(abs_key_parent, Flags.FROZEN):
+ raise suffixed_err(
+ src, pos, f"Cannot mutate immutable namespace {abs_key_parent}"
+ )
+
+ try:
+ nest = out.data.get_or_create_nest(abs_key_parent)
+ except KeyError:
+ raise suffixed_err(src, pos, "Cannot overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, "Cannot overwrite a value")
+ # Mark inline table and array namespaces recursively immutable
+ if isinstance(value, (dict, list)):
+ out.flags.set(header + key, Flags.FROZEN, recursive=True)
+ nest[key_stem] = value
+ return pos
+
+
+def parse_key_value_pair(
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> tuple[Pos, Key, Any]:
+ pos, key = parse_key(src, pos)
+ try:
+ char: str | None = src[pos]
+ except IndexError:
+ char = None
+ if char != "=":
+ raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, value = parse_value(src, pos, parse_float)
+ return pos, key, value
+
+
+def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]:
+ pos, key_part = parse_key_part(src, pos)
+ key: Key = (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+ while True:
+ try:
+ char: str | None = src[pos]
+ except IndexError:
+ char = None
+ if char != ".":
+ return pos, key
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+ pos, key_part = parse_key_part(src, pos)
+ key += (key_part,)
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]:
+ try:
+ char: str | None = src[pos]
+ except IndexError:
+ char = None
+ if char in BARE_KEY_CHARS:
+ start_pos = pos
+ pos = skip_chars(src, pos, BARE_KEY_CHARS)
+ return pos, src[start_pos:pos]
+ if char == "'":
+ return parse_literal_str(src, pos)
+ if char == '"':
+ return parse_one_line_basic_str(src, pos)
+ raise suffixed_err(src, pos, "Invalid initial character for a key part")
+
+
+def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]:
+ pos += 1
+ return parse_basic_str(src, pos, multiline=False)
+
+
+def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]:
+ pos += 1
+ array: list = []
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+ while True:
+ pos, val = parse_value(src, pos, parse_float)
+ array.append(val)
+ pos = skip_comments_and_array_ws(src, pos)
+
+ c = src[pos : pos + 1]
+ if c == "]":
+ return pos + 1, array
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed array")
+ pos += 1
+
+ pos = skip_comments_and_array_ws(src, pos)
+ if src.startswith("]", pos):
+ return pos + 1, array
+
+
+def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]:
+ pos += 1
+ nested_dict = NestedDict()
+ flags = Flags()
+
+ pos = skip_chars(src, pos, TOML_WS)
+ if src.startswith("}", pos):
+ return pos + 1, nested_dict.dict
+ while True:
+ pos, key, value = parse_key_value_pair(src, pos, parse_float)
+ key_parent, key_stem = key[:-1], key[-1]
+ if flags.is_(key, Flags.FROZEN):
+ raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}")
+ try:
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
+ except KeyError:
+ raise suffixed_err(src, pos, "Cannot overwrite a value") from None
+ if key_stem in nest:
+ raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}")
+ nest[key_stem] = value
+ pos = skip_chars(src, pos, TOML_WS)
+ c = src[pos : pos + 1]
+ if c == "}":
+ return pos + 1, nested_dict.dict
+ if c != ",":
+ raise suffixed_err(src, pos, "Unclosed inline table")
+ if isinstance(value, (dict, list)):
+ flags.set(key, Flags.FROZEN, recursive=True)
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS)
+
+
+def parse_basic_str_escape(
+ src: str, pos: Pos, *, multiline: bool = False
+) -> tuple[Pos, str]:
+ escape_id = src[pos : pos + 2]
+ pos += 2
+ if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}:
+ # Skip whitespace until next non-whitespace character or end of
+ # the doc. Error if non-whitespace is found before newline.
+ if escape_id != "\\\n":
+ pos = skip_chars(src, pos, TOML_WS)
+ try:
+ char = src[pos]
+ except IndexError:
+ return pos, ""
+ if char != "\n":
+ raise suffixed_err(src, pos, "Unescaped '\\' in a string")
+ pos += 1
+ pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
+ return pos, ""
+ if escape_id == "\\u":
+ return parse_hex_char(src, pos, 4)
+ if escape_id == "\\U":
+ return parse_hex_char(src, pos, 8)
+ try:
+ return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
+ except KeyError:
+ raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None
+
+
+def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]:
+ return parse_basic_str_escape(src, pos, multiline=True)
+
+
+def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]:
+ hex_str = src[pos : pos + hex_len]
+ if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str):
+ raise suffixed_err(src, pos, "Invalid hex value")
+ pos += hex_len
+ hex_int = int(hex_str, 16)
+ if not is_unicode_scalar_value(hex_int):
+ raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value")
+ return pos, chr(hex_int)
+
+
+def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]:
+ pos += 1 # Skip starting apostrophe
+ start_pos = pos
+ pos = skip_until(
+ src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True
+ )
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
+
+
+def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]:
+ pos += 3
+ if src.startswith("\n", pos):
+ pos += 1
+
+ if literal:
+ delim = "'"
+ end_pos = skip_until(
+ src,
+ pos,
+ "'''",
+ error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
+ error_on_eof=True,
+ )
+ result = src[pos:end_pos]
+ pos = end_pos + 3
+ else:
+ delim = '"'
+ pos, result = parse_basic_str(src, pos, multiline=True)
+
+ # Add at maximum two extra apostrophes/quotes if the end sequence
+ # is 4 or 5 chars long instead of just 3.
+ if not src.startswith(delim, pos):
+ return pos, result
+ pos += 1
+ if not src.startswith(delim, pos):
+ return pos, result + delim
+ pos += 1
+ return pos, result + (delim * 2)
+
+
+def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]:
+ if multiline:
+ error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape_multiline
+ else:
+ error_on = ILLEGAL_BASIC_STR_CHARS
+ parse_escapes = parse_basic_str_escape
+ result = ""
+ start_pos = pos
+ while True:
+ try:
+ char = src[pos]
+ except IndexError:
+ raise suffixed_err(src, pos, "Unterminated string") from None
+ if char == '"':
+ if not multiline:
+ return pos + 1, result + src[start_pos:pos]
+ if src.startswith('"""', pos):
+ return pos + 3, result + src[start_pos:pos]
+ pos += 1
+ continue
+ if char == "\\":
+ result += src[start_pos:pos]
+ pos, parsed_escape = parse_escapes(src, pos)
+ result += parsed_escape
+ start_pos = pos
+ continue
+ if char in error_on:
+ raise suffixed_err(src, pos, f"Illegal character {char!r}")
+ pos += 1
+
+
+def parse_value( # noqa: C901
+ src: str, pos: Pos, parse_float: ParseFloat
+) -> tuple[Pos, Any]:
+ try:
+ char: str | None = src[pos]
+ except IndexError:
+ char = None
+
+ # IMPORTANT: order conditions based on speed of checking and likelihood
+
+ # Basic strings
+ if char == '"':
+ if src.startswith('"""', pos):
+ return parse_multiline_str(src, pos, literal=False)
+ return parse_one_line_basic_str(src, pos)
+
+ # Literal strings
+ if char == "'":
+ if src.startswith("'''", pos):
+ return parse_multiline_str(src, pos, literal=True)
+ return parse_literal_str(src, pos)
+
+ # Booleans
+ if char == "t":
+ if src.startswith("true", pos):
+ return pos + 4, True
+ if char == "f":
+ if src.startswith("false", pos):
+ return pos + 5, False
+
+ # Arrays
+ if char == "[":
+ return parse_array(src, pos, parse_float)
+
+ # Inline tables
+ if char == "{":
+ return parse_inline_table(src, pos, parse_float)
+
+ # Dates and times
+ datetime_match = RE_DATETIME.match(src, pos)
+ if datetime_match:
+ try:
+ datetime_obj = match_to_datetime(datetime_match)
+ except ValueError as e:
+ raise suffixed_err(src, pos, "Invalid date or datetime") from e
+ return datetime_match.end(), datetime_obj
+ localtime_match = RE_LOCALTIME.match(src, pos)
+ if localtime_match:
+ return localtime_match.end(), match_to_localtime(localtime_match)
+
+ # Integers and "normal" floats.
+ # The regex will greedily match any type starting with a decimal
+ # char, so needs to be located after handling of dates and times.
+ number_match = RE_NUMBER.match(src, pos)
+ if number_match:
+ return number_match.end(), match_to_number(number_match, parse_float)
+
+ # Special floats
+ first_three = src[pos : pos + 3]
+ if first_three in {"inf", "nan"}:
+ return pos + 3, parse_float(first_three)
+ first_four = src[pos : pos + 4]
+ if first_four in {"-inf", "+inf", "-nan", "+nan"}:
+ return pos + 4, parse_float(first_four)
+
+ raise suffixed_err(src, pos, "Invalid value")
+
+
+def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError:
+ """Return a `TOMLDecodeError` where error message is suffixed with
+ coordinates in source."""
+
+ def coord_repr(src: str, pos: Pos) -> str:
+ if pos >= len(src):
+ return "end of document"
+ line = src.count("\n", 0, pos) + 1
+ if line == 1:
+ column = pos + 1
+ else:
+ column = pos - src.rindex("\n", 0, pos)
+ return f"line {line}, column {column}"
+
+ return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})")
+
+
+def is_unicode_scalar_value(codepoint: int) -> bool:
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
+
+
+def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat:
+ """A decorator to make `parse_float` safe.
+
+ `parse_float` must not return dicts or lists, because these types
+ would be mixed with parsed TOML tables and arrays, thus confusing
+ the parser. The returned decorated callable raises `ValueError`
+ instead of returning illegal types.
+ """
+ # The default `float` callable never returns illegal types. Optimize it.
+ if parse_float is float: # type: ignore[comparison-overlap]
+ return float
+
+ def safe_parse_float(float_str: str) -> Any:
+ float_value = parse_float(float_str)
+ if isinstance(float_value, (dict, list)):
+ raise ValueError("parse_float must not return dicts or lists")
+ return float_value
+
+ return safe_parse_float
diff --git a/setuptools/_vendor/tomli/_re.py b/setuptools/_vendor/tomli/_re.py
new file mode 100644
index 0000000..994bb74
--- /dev/null
+++ b/setuptools/_vendor/tomli/_re.py
@@ -0,0 +1,107 @@
+# SPDX-License-Identifier: MIT
+# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
+# Licensed to PSF under a Contributor Agreement.
+
+from __future__ import annotations
+
+from datetime import date, datetime, time, timedelta, timezone, tzinfo
+from functools import lru_cache
+import re
+from typing import Any
+
+from ._types import ParseFloat
+
+# E.g.
+# - 00:32:00.999999
+# - 00:32:00
+_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?"
+
+RE_NUMBER = re.compile(
+ r"""
+0
+(?:
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
+ |
+ b[01](?:_?[01])* # bin
+ |
+ o[0-7](?:_?[0-7])* # oct
+)
+|
+[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
+(?P<floatpart>
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
+)
+""",
+ flags=re.VERBOSE,
+)
+RE_LOCALTIME = re.compile(_TIME_RE_STR)
+RE_DATETIME = re.compile(
+ rf"""
+([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
+(?:
+ [Tt ]
+ {_TIME_RE_STR}
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
+)?
+""",
+ flags=re.VERBOSE,
+)
+
+
+def match_to_datetime(match: re.Match) -> datetime | date:
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
+
+ Raises ValueError if the match does not correspond to a valid date
+ or datetime.
+ """
+ (
+ year_str,
+ month_str,
+ day_str,
+ hour_str,
+ minute_str,
+ sec_str,
+ micros_str,
+ zulu_time,
+ offset_sign_str,
+ offset_hour_str,
+ offset_minute_str,
+ ) = match.groups()
+ year, month, day = int(year_str), int(month_str), int(day_str)
+ if hour_str is None:
+ return date(year, month, day)
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ if offset_sign_str:
+ tz: tzinfo | None = cached_tz(
+ offset_hour_str, offset_minute_str, offset_sign_str
+ )
+ elif zulu_time:
+ tz = timezone.utc
+ else: # local date-time
+ tz = None
+ return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
+
+
+@lru_cache(maxsize=None)
+def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone:
+ sign = 1 if sign_str == "+" else -1
+ return timezone(
+ timedelta(
+ hours=sign * int(hour_str),
+ minutes=sign * int(minute_str),
+ )
+ )
+
+
+def match_to_localtime(match: re.Match) -> time:
+ hour_str, minute_str, sec_str, micros_str = match.groups()
+ micros = int(micros_str.ljust(6, "0")) if micros_str else 0
+ return time(int(hour_str), int(minute_str), int(sec_str), micros)
+
+
+def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any:
+ if match.group("floatpart"):
+ return parse_float(match.group())
+ return int(match.group(), 0)
diff --git a/setuptools/_vendor/tomli/_types.py b/setuptools/_vendor/tomli/_types.py
new file mode 100644
index 0000000..d949412
--- /dev/null
+++ b/setuptools/_vendor/tomli/_types.py
@@ -0,0 +1,10 @@
+# SPDX-License-Identifier: MIT
+# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
+# Licensed to PSF under a Contributor Agreement.
+
+from typing import Any, Callable, Tuple
+
+# Type annotations
+ParseFloat = Callable[[str], Any]
+Key = Tuple[str, ...]
+Pos = int
diff --git a/setuptools/_vendor/typing_extensions.py b/setuptools/_vendor/typing_extensions.py
new file mode 100644
index 0000000..9f1c7aa
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions.py
@@ -0,0 +1,2296 @@
+import abc
+import collections
+import collections.abc
+import operator
+import sys
+import typing
+
+# After PEP 560, internal typing API was substantially reworked.
+# This is especially important for Protocol class which uses internal APIs
+# quite extensively.
+PEP_560 = sys.version_info[:3] >= (3, 7, 0)
+
+if PEP_560:
+ GenericMeta = type
+else:
+ # 3.6
+ from typing import GenericMeta, _type_vars # noqa
+
+# The two functions below are copies of typing internal helpers.
+# They are needed by _ProtocolMeta
+
+
+def _no_slots_copy(dct):
+ dict_copy = dict(dct)
+ if '__slots__' in dict_copy:
+ for slot in dict_copy['__slots__']:
+ dict_copy.pop(slot, None)
+ return dict_copy
+
+
+def _check_generic(cls, parameters):
+ if not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ alen = len(parameters)
+ elen = len(cls.__parameters__)
+ if alen != elen:
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};"
+ f" actual {alen}, expected {elen}")
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+ # Super-special typing primitives.
+ 'ClassVar',
+ 'Concatenate',
+ 'Final',
+ 'ParamSpec',
+ 'Self',
+ 'Type',
+
+ # ABCs (from collections.abc).
+ 'Awaitable',
+ 'AsyncIterator',
+ 'AsyncIterable',
+ 'Coroutine',
+ 'AsyncGenerator',
+ 'AsyncContextManager',
+ 'ChainMap',
+
+ # Concrete collection types.
+ 'ContextManager',
+ 'Counter',
+ 'Deque',
+ 'DefaultDict',
+ 'OrderedDict',
+ 'TypedDict',
+
+ # Structural checks, a.k.a. protocols.
+ 'SupportsIndex',
+
+ # One-off things.
+ 'Annotated',
+ 'final',
+ 'IntVar',
+ 'Literal',
+ 'NewType',
+ 'overload',
+ 'Protocol',
+ 'runtime',
+ 'runtime_checkable',
+ 'Text',
+ 'TypeAlias',
+ 'TypeGuard',
+ 'TYPE_CHECKING',
+]
+
+if PEP_560:
+ __all__.extend(["get_args", "get_origin", "get_type_hints"])
+
+# 3.6.2+
+if hasattr(typing, 'NoReturn'):
+ NoReturn = typing.NoReturn
+# 3.6.0-3.6.1
+else:
+ class _NoReturn(typing._FinalTypingBase, _root=True):
+ """Special type indicating functions that never return.
+ Example::
+
+ from typing import NoReturn
+
+ def stop() -> NoReturn:
+ raise Exception('no way')
+
+ This type is invalid in other positions, e.g., ``List[NoReturn]``
+ will fail in static type checkers.
+ """
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("NoReturn cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("NoReturn cannot be used with issubclass().")
+
+ NoReturn = _NoReturn(_root=True)
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T') # Any type.
+KT = typing.TypeVar('KT') # Key type.
+VT = typing.TypeVar('VT') # Value type.
+T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+ClassVar = typing.ClassVar
+
+# On older versions of typing there is an internal class named "Final".
+# 3.8+
+if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
+ Final = typing.Final
+# 3.7
+elif sys.version_info[:2] >= (3, 7):
+ class _FinalForm(typing._SpecialForm, _root=True):
+
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only single type')
+ return typing._GenericAlias(self, (item,))
+
+ Final = _FinalForm('Final',
+ doc="""A special typing construct to indicate that a name
+ cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.""")
+# 3.6
+else:
+ class _Final(typing._FinalTypingBase, _root=True):
+ """A special typing construct to indicate that a name
+ cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.
+ """
+
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(typing._type_check(item,
+ f'{cls.__name__[1:]} accepts only single type.'),
+ _root=True)
+ raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
+
+ def _eval_type(self, globalns, localns):
+ new_tp = typing._eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super().__repr__()
+ if self.__type__ is not None:
+ r += f'[{typing._type_repr(self.__type__)}]'
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Final):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+ Final = _Final(_root=True)
+
+
+# 3.8+
+if hasattr(typing, 'final'):
+ final = typing.final
+# 3.6-3.7
+else:
+ def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties.
+ """
+ return f
+
+
+def IntVar(name):
+ return typing.TypeVar(name)
+
+
+# 3.8+:
+if hasattr(typing, 'Literal'):
+ Literal = typing.Literal
+# 3.7:
+elif sys.version_info[:2] >= (3, 7):
+ class _LiteralForm(typing._SpecialForm, _root=True):
+
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ def __getitem__(self, parameters):
+ return typing._GenericAlias(self, parameters)
+
+ Literal = _LiteralForm('Literal',
+ doc="""A type that can be used to indicate to type checkers
+ that the corresponding value has a value literally equivalent
+ to the provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to
+ the value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime
+ checking verifying that the parameter is actually a value
+ instead of a type.""")
+# 3.6:
+else:
+ class _Literal(typing._FinalTypingBase, _root=True):
+ """A type that can be used to indicate to type checkers that the
+ corresponding value has a value literally equivalent to the
+ provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to the
+ value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime checking
+ verifying that the parameter is actually a value instead of a type.
+ """
+
+ __slots__ = ('__values__',)
+
+ def __init__(self, values=None, **kwds):
+ self.__values__ = values
+
+ def __getitem__(self, values):
+ cls = type(self)
+ if self.__values__ is None:
+ if not isinstance(values, tuple):
+ values = (values,)
+ return cls(values, _root=True)
+ raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
+
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def __repr__(self):
+ r = super().__repr__()
+ if self.__values__ is not None:
+ r += f'[{", ".join(map(typing._type_repr, self.__values__))}]'
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__values__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Literal):
+ return NotImplemented
+ if self.__values__ is not None:
+ return self.__values__ == other.__values__
+ return self is other
+
+ Literal = _Literal(_root=True)
+
+
+_overload_dummy = typing._overload_dummy # noqa
+overload = typing.overload
+
+
+# This is not a real generic class. Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+
+class _ExtensionsGenericMeta(GenericMeta):
+ def __subclasscheck__(self, subclass):
+ """This mimics a more modern GenericMeta.__subclasscheck__() logic
+ (that does not have problems with recursion) to work around interactions
+ between collections, typing, and typing_extensions on older
+ versions of Python, see https://github.com/python/typing/issues/501.
+ """
+ if self.__origin__ is not None:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
+ raise TypeError("Parameterized generics cannot be used with class "
+ "or instance checks")
+ return False
+ if not self.__extra__:
+ return super().__subclasscheck__(subclass)
+ res = self.__extra__.__subclasshook__(subclass)
+ if res is not NotImplemented:
+ return res
+ if self.__extra__ in subclass.__mro__:
+ return True
+ for scls in self.__extra__.__subclasses__():
+ if isinstance(scls, GenericMeta):
+ continue
+ if issubclass(subclass, scls):
+ return True
+ return False
+
+
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+
+# 3.6.1+
+if hasattr(typing, 'Deque'):
+ Deque = typing.Deque
+# 3.6.0
+else:
+ class Deque(collections.deque, typing.MutableSequence[T],
+ metaclass=_ExtensionsGenericMeta,
+ extra=collections.deque):
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Deque:
+ return collections.deque(*args, **kwds)
+ return typing._generic_new(collections.deque, cls, *args, **kwds)
+
+ContextManager = typing.ContextManager
+# 3.6.2+
+if hasattr(typing, 'AsyncContextManager'):
+ AsyncContextManager = typing.AsyncContextManager
+# 3.6.0-3.6.1
+else:
+ from _collections_abc import _check_methods as _check_methods_in_mro # noqa
+
+ class AsyncContextManager(typing.Generic[T_co]):
+ __slots__ = ()
+
+ async def __aenter__(self):
+ return self
+
+ @abc.abstractmethod
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is AsyncContextManager:
+ return _check_methods_in_mro(C, "__aenter__", "__aexit__")
+ return NotImplemented
+
+DefaultDict = typing.DefaultDict
+
+# 3.7.2+
+if hasattr(typing, 'OrderedDict'):
+ OrderedDict = typing.OrderedDict
+# 3.7.0-3.7.2
+elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2):
+ OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
+# 3.6
+else:
+ class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT],
+ metaclass=_ExtensionsGenericMeta,
+ extra=collections.OrderedDict):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is OrderedDict:
+ return collections.OrderedDict(*args, **kwds)
+ return typing._generic_new(collections.OrderedDict, cls, *args, **kwds)
+
+# 3.6.2+
+if hasattr(typing, 'Counter'):
+ Counter = typing.Counter
+# 3.6.0-3.6.1
+else:
+ class Counter(collections.Counter,
+ typing.Dict[T, int],
+ metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Counter:
+ return collections.Counter(*args, **kwds)
+ return typing._generic_new(collections.Counter, cls, *args, **kwds)
+
+# 3.6.1+
+if hasattr(typing, 'ChainMap'):
+ ChainMap = typing.ChainMap
+elif hasattr(collections, 'ChainMap'):
+ class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
+ metaclass=_ExtensionsGenericMeta,
+ extra=collections.ChainMap):
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is ChainMap:
+ return collections.ChainMap(*args, **kwds)
+ return typing._generic_new(collections.ChainMap, cls, *args, **kwds)
+
+# 3.6.1+
+if hasattr(typing, 'AsyncGenerator'):
+ AsyncGenerator = typing.AsyncGenerator
+# 3.6.0
+else:
+ class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
+ metaclass=_ExtensionsGenericMeta,
+ extra=collections.abc.AsyncGenerator):
+ __slots__ = ()
+
+NewType = typing.NewType
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+def _gorg(cls):
+ """This function exists for compatibility with old typing versions."""
+ assert isinstance(cls, GenericMeta)
+ if hasattr(cls, '_gorg'):
+ return cls._gorg
+ while cls.__origin__ is not None:
+ cls = cls.__origin__
+ return cls
+
+
+_PROTO_WHITELIST = ['Callable', 'Awaitable',
+ 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
+ 'ContextManager', 'AsyncContextManager']
+
+
+def _get_protocol_attrs(cls):
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in ('Protocol', 'Generic'):
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+ if (not attr.startswith('_abc_') and attr not in (
+ '__abstractmethods__', '__annotations__', '__weakref__',
+ '_is_protocol', '_is_runtime_protocol', '__dict__',
+ '__args__', '__slots__',
+ '__next_in_mro__', '__parameters__', '__origin__',
+ '__orig_bases__', '__extra__', '__tree_hash__',
+ '__doc__', '__subclasshook__', '__init__', '__new__',
+ '__module__', '_MutableMapping__marker', '_gorg')):
+ attrs.add(attr)
+ return attrs
+
+
+def _is_callable_members_only(cls):
+ return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
+
+
+# 3.8+
+if hasattr(typing, 'Protocol'):
+ Protocol = typing.Protocol
+# 3.7
+elif PEP_560:
+ from typing import _collect_type_vars # noqa
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ class _ProtocolMeta(abc.ABCMeta):
+ # This metaclass is a bit unfortunate and exists only because of the lack
+ # of __instancehook__.
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if ((not getattr(cls, '_is_protocol', False) or
+ _is_callable_members_only(cls)) and
+ issubclass(instance.__class__, cls)):
+ return True
+ if cls._is_protocol:
+ if all(hasattr(instance, attr) and
+ (not callable(getattr(cls, attr, None)) or
+ getattr(instance, attr) is not None)
+ for attr in _get_protocol_attrs(cls)):
+ return True
+ return super().__instancecheck__(instance)
+
+ class Protocol(metaclass=_ProtocolMeta):
+ # There is quite a lot of overlapping code with typing.Generic.
+ # Unfortunately it is hard to avoid this while these live in two different
+ # modules. The duplicated code will be removed when Protocol is moved to typing.
+ """Base class for protocol classes. Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self) -> int:
+ return 0
+
+ def func(x: Proto) -> int:
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with
+ @typing_extensions.runtime act as simple-minded runtime protocol that checks
+ only the presence of given attributes, ignoring their type signatures.
+
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self) -> T:
+ ...
+ """
+ __slots__ = ()
+ _is_protocol = True
+
+ def __new__(cls, *args, **kwds):
+ if cls is Protocol:
+ raise TypeError("Type Protocol cannot be instantiated; "
+ "it can only be used as a base class")
+ return super().__new__(cls)
+
+ @typing._tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and cls is not typing.Tuple:
+ raise TypeError(
+ f"Parameter list to {cls.__qualname__}[...] cannot be empty")
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params) # noqa
+ if cls is Protocol:
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, typing.TypeVar) for p in params):
+ i = 0
+ while isinstance(params[i], typing.TypeVar):
+ i += 1
+ raise TypeError(
+ "Parameters to Protocol[...] must all be type variables."
+ f" Parameter {i + 1} is {params[i]}")
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to Protocol[...] must all be unique")
+ else:
+ # Subscripting a regular Generic subclass.
+ _check_generic(cls, params)
+ return typing._GenericAlias(cls, params)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ tvars = []
+ if '__orig_bases__' in cls.__dict__:
+ error = typing.Generic in cls.__orig_bases__
+ else:
+ error = typing.Generic in cls.__bases__
+ if error:
+ raise TypeError("Cannot inherit from plain Generic")
+ if '__orig_bases__' in cls.__dict__:
+ tvars = _collect_type_vars(cls.__orig_bases__)
+ # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...] and/or Protocol[...].
+ gvars = None
+ for base in cls.__orig_bases__:
+ if (isinstance(base, typing._GenericAlias) and
+ base.__origin__ in (typing.Generic, Protocol)):
+ # for error messages
+ the_base = base.__origin__.__name__
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...]"
+ " and/or Protocol[...] multiple types.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
+ s_args = ', '.join(str(g) for g in gvars)
+ raise TypeError(f"Some type variables ({s_vars}) are"
+ f" not listed in {the_base}[{s_args}]")
+ tvars = gvars
+ cls.__parameters__ = tuple(tvars)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', None):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ def _proto_hook(other):
+ if not cls.__dict__.get('_is_protocol', None):
+ return NotImplemented
+ if not getattr(cls, '_is_runtime_protocol', False):
+ if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
+ return NotImplemented
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime protocols")
+ if not _is_callable_members_only(cls):
+ if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
+ return NotImplemented
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ if not isinstance(other, type):
+ # Same error as for issubclass(1, int)
+ raise TypeError('issubclass() arg 1 must be a class')
+ for attr in _get_protocol_attrs(cls):
+ for base in other.__mro__:
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+ annotations = getattr(base, '__annotations__', {})
+ if (isinstance(annotations, typing.Mapping) and
+ attr in annotations and
+ isinstance(other, _ProtocolMeta) and
+ other._is_protocol):
+ break
+ else:
+ return NotImplemented
+ return True
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # We have nothing more to do for non-protocols.
+ if not cls._is_protocol:
+ return
+
+ # Check consistency of bases.
+ for base in cls.__bases__:
+ if not (base in (object, typing.Generic) or
+ base.__module__ == 'collections.abc' and
+ base.__name__ in _PROTO_WHITELIST or
+ isinstance(base, _ProtocolMeta) and base._is_protocol):
+ raise TypeError('Protocols can only inherit from other'
+ f' protocols, got {repr(base)}')
+ cls.__init__ = _no_init
+# 3.6
+else:
+ from typing import _next_in_mro, _type_check # noqa
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ class _ProtocolMeta(GenericMeta):
+ """Internal metaclass for Protocol.
+
+ This exists so Protocol classes can be generic without deriving
+ from Generic.
+ """
+ def __new__(cls, name, bases, namespace,
+ tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+ # This is just a version copied from GenericMeta.__new__ that
+ # includes "Protocol" special treatment. (Comments removed for brevity.)
+ assert extra is None # Protocols should not have extra
+ if tvars is not None:
+ assert origin is not None
+ assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars
+ else:
+ tvars = _type_vars(bases)
+ gvars = None
+ for base in bases:
+ if base is typing.Generic:
+ raise TypeError("Cannot inherit from plain Generic")
+ if (isinstance(base, GenericMeta) and
+ base.__origin__ in (typing.Generic, Protocol)):
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...] or"
+ " Protocol[...] multiple times.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ s_vars = ", ".join(str(t) for t in tvars if t not in gvarset)
+ s_args = ", ".join(str(g) for g in gvars)
+ cls_name = "Generic" if any(b.__origin__ is typing.Generic
+ for b in bases) else "Protocol"
+ raise TypeError(f"Some type variables ({s_vars}) are"
+ f" not listed in {cls_name}[{s_args}]")
+ tvars = gvars
+
+ initial_bases = bases
+ if (extra is not None and type(extra) is abc.ABCMeta and
+ extra not in bases):
+ bases = (extra,) + bases
+ bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
+ for b in bases)
+ if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases):
+ bases = tuple(b for b in bases if b is not typing.Generic)
+ namespace.update({'__origin__': origin, '__extra__': extra})
+ self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
+ _root=True)
+ super(GenericMeta, self).__setattr__('_gorg',
+ self if not origin else
+ _gorg(origin))
+ self.__parameters__ = tvars
+ self.__args__ = tuple(... if a is typing._TypingEllipsis else
+ () if a is typing._TypingEmpty else
+ a for a in args) if args else None
+ self.__next_in_mro__ = _next_in_mro(self)
+ if orig_bases is None:
+ self.__orig_bases__ = initial_bases
+ elif origin is not None:
+ self._abc_registry = origin._abc_registry
+ self._abc_cache = origin._abc_cache
+ if hasattr(self, '_subs_tree'):
+ self.__tree_hash__ = (hash(self._subs_tree()) if origin else
+ super(GenericMeta, self).__hash__())
+ return self
+
+ def __init__(cls, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ if not cls.__dict__.get('_is_protocol', None):
+ cls._is_protocol = any(b is Protocol or
+ isinstance(b, _ProtocolMeta) and
+ b.__origin__ is Protocol
+ for b in cls.__bases__)
+ if cls._is_protocol:
+ for base in cls.__mro__[1:]:
+ if not (base in (object, typing.Generic) or
+ base.__module__ == 'collections.abc' and
+ base.__name__ in _PROTO_WHITELIST or
+ isinstance(base, typing.TypingMeta) and base._is_protocol or
+ isinstance(base, GenericMeta) and
+ base.__origin__ is typing.Generic):
+ raise TypeError(f'Protocols can only inherit from other'
+ f' protocols, got {repr(base)}')
+
+ cls.__init__ = _no_init
+
+ def _proto_hook(other):
+ if not cls.__dict__.get('_is_protocol', None):
+ return NotImplemented
+ if not isinstance(other, type):
+ # Same error as for issubclass(1, int)
+ raise TypeError('issubclass() arg 1 must be a class')
+ for attr in _get_protocol_attrs(cls):
+ for base in other.__mro__:
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+ annotations = getattr(base, '__annotations__', {})
+ if (isinstance(annotations, typing.Mapping) and
+ attr in annotations and
+ isinstance(other, _ProtocolMeta) and
+ other._is_protocol):
+ break
+ else:
+ return NotImplemented
+ return True
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ def __instancecheck__(self, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if ((not getattr(self, '_is_protocol', False) or
+ _is_callable_members_only(self)) and
+ issubclass(instance.__class__, self)):
+ return True
+ if self._is_protocol:
+ if all(hasattr(instance, attr) and
+ (not callable(getattr(self, attr, None)) or
+ getattr(instance, attr) is not None)
+ for attr in _get_protocol_attrs(self)):
+ return True
+ return super(GenericMeta, self).__instancecheck__(instance)
+
+ def __subclasscheck__(self, cls):
+ if self.__origin__ is not None:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
+ raise TypeError("Parameterized generics cannot be used with class "
+ "or instance checks")
+ return False
+ if (self.__dict__.get('_is_protocol', None) and
+ not self.__dict__.get('_is_runtime_protocol', None)):
+ if sys._getframe(1).f_globals['__name__'] in ['abc',
+ 'functools',
+ 'typing']:
+ return False
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime protocols")
+ if (self.__dict__.get('_is_runtime_protocol', None) and
+ not _is_callable_members_only(self)):
+ if sys._getframe(1).f_globals['__name__'] in ['abc',
+ 'functools',
+ 'typing']:
+ return super(GenericMeta, self).__subclasscheck__(cls)
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ return super(GenericMeta, self).__subclasscheck__(cls)
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ # We also need to copy this from GenericMeta.__getitem__ to get
+ # special treatment of "Protocol". (Comments removed for brevity.)
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and _gorg(self) is not typing.Tuple:
+ raise TypeError(
+ f"Parameter list to {self.__qualname__}[...] cannot be empty")
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ if self in (typing.Generic, Protocol):
+ if not all(isinstance(p, typing.TypeVar) for p in params):
+ raise TypeError(
+ f"Parameters to {repr(self)}[...] must all be type variables")
+ if len(set(params)) != len(params):
+ raise TypeError(
+ f"Parameters to {repr(self)}[...] must all be unique")
+ tvars = params
+ args = params
+ elif self in (typing.Tuple, typing.Callable):
+ tvars = _type_vars(params)
+ args = params
+ elif self.__origin__ in (typing.Generic, Protocol):
+ raise TypeError(f"Cannot subscript already-subscripted {repr(self)}")
+ else:
+ _check_generic(self, params)
+ tvars = _type_vars(params)
+ args = params
+
+ prepend = (self,) if self.__origin__ is None else ()
+ return self.__class__(self.__name__,
+ prepend + self.__bases__,
+ _no_slots_copy(self.__dict__),
+ tvars=tvars,
+ args=args,
+ origin=self,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ class Protocol(metaclass=_ProtocolMeta):
+ """Base class for protocol classes. Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self) -> int:
+ return 0
+
+ def func(x: Proto) -> int:
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with
+ @typing_extensions.runtime act as simple-minded runtime protocol that checks
+ only the presence of given attributes, ignoring their type signatures.
+
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self) -> T:
+ ...
+ """
+ __slots__ = ()
+ _is_protocol = True
+
+ def __new__(cls, *args, **kwds):
+ if _gorg(cls) is Protocol:
+ raise TypeError("Type Protocol cannot be instantiated; "
+ "it can be used only as a base class")
+ return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+# 3.8+
+if hasattr(typing, 'runtime_checkable'):
+ runtime_checkable = typing.runtime_checkable
+# 3.6-3.7
+else:
+ def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol, so that it
+ can be used with isinstance() and issubclass(). Raise TypeError
+ if applied to a non-protocol class.
+
+ This allows a simple-minded structural check very similar to the
+ one-offs in collections.abc such as Hashable.
+ """
+ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
+ cls._is_runtime_protocol = True
+ return cls
+
+
+# Exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# 3.8+
+if hasattr(typing, 'SupportsIndex'):
+ SupportsIndex = typing.SupportsIndex
+# 3.6-3.7
+else:
+ @runtime_checkable
+ class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __index__(self) -> int:
+ pass
+
+
+if sys.version_info >= (3, 9, 2):
+ # The standard library TypedDict in Python 3.8 does not store runtime information
+ # about which (if any) keys are optional. See https://bugs.python.org/issue38834
+ # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+ # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
+ TypedDict = typing.TypedDict
+else:
+ def _check_fails(cls, other):
+ try:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc',
+ 'functools',
+ 'typing']:
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+ except (AttributeError, ValueError):
+ pass
+ return False
+
+ def _dict_new(*args, **kwargs):
+ if not args:
+ raise TypeError('TypedDict.__new__(): not enough arguments')
+ _, args = args[0], args[1:] # allow the "cls" keyword be passed
+ return dict(*args, **kwargs)
+
+ _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
+
+ def _typeddict_new(*args, total=True, **kwargs):
+ if not args:
+ raise TypeError('TypedDict.__new__(): not enough arguments')
+ _, args = args[0], args[1:] # allow the "cls" keyword be passed
+ if args:
+ typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
+ elif '_typename' in kwargs:
+ typename = kwargs.pop('_typename')
+ import warnings
+ warnings.warn("Passing '_typename' as keyword argument is deprecated",
+ DeprecationWarning, stacklevel=2)
+ else:
+ raise TypeError("TypedDict.__new__() missing 1 required positional "
+ "argument: '_typename'")
+ if args:
+ try:
+ fields, = args # allow the "_fields" keyword be passed
+ except ValueError:
+ raise TypeError('TypedDict.__new__() takes from 2 to 3 '
+ f'positional arguments but {len(args) + 2} '
+ 'were given')
+ elif '_fields' in kwargs and len(kwargs) == 1:
+ fields = kwargs.pop('_fields')
+ import warnings
+ warnings.warn("Passing '_fields' as keyword argument is deprecated",
+ DeprecationWarning, stacklevel=2)
+ else:
+ fields = None
+
+ if fields is None:
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(fields)}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(typename, (), ns, total=total)
+
+ _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
+ ' /, *, total=True, **kwargs)')
+
+ class _TypedDictMeta(type):
+ def __init__(cls, name, bases, ns, total=True):
+ super().__init__(name, bases, ns)
+
+ def __new__(cls, name, bases, ns, total=True):
+ # Create new typed dict class object.
+ # This method is called directly when TypedDict is subclassed,
+ # or via _typeddict_new when TypedDict is instantiated. This way
+ # TypedDict supports all three syntaxes described in its docstring.
+ # Subclasses and instances of TypedDict return actual dictionaries
+ # via _dict_new.
+ ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
+ tp_dict = super().__new__(cls, name, (dict,), ns)
+
+ annotations = {}
+ own_annotations = ns.get('__annotations__', {})
+ own_annotation_keys = set(own_annotations.keys())
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ own_annotations = {
+ n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+
+ for base in bases:
+ annotations.update(base.__dict__.get('__annotations__', {}))
+ required_keys.update(base.__dict__.get('__required_keys__', ()))
+ optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+ annotations.update(own_annotations)
+ if total:
+ required_keys.update(own_annotation_keys)
+ else:
+ optional_keys.update(own_annotation_keys)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __instancecheck__ = __subclasscheck__ = _check_fails
+
+ TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
+ TypedDict.__module__ = __name__
+ TypedDict.__doc__ = \
+ """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type that expects all of its
+ instances to have a certain set of keys, with each key
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime but is only enforced by type checkers.
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports two additional equivalent forms::
+
+ Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ The class syntax is only supported in Python 3.6+, while two other
+ syntax forms work for Python 2.7 and 3.2+
+ """
+
+
+# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
+if hasattr(typing, 'Annotated'):
+ Annotated = typing.Annotated
+ get_type_hints = typing.get_type_hints
+ # Not exported and not a public API, but needed for get_origin() and get_args()
+ # to work.
+ _AnnotatedAlias = typing._AnnotatedAlias
+# 3.7-3.8
+elif PEP_560:
+ class _AnnotatedAlias(typing._GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+ f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__,) + self.__metadata__
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ if self.__origin__ != other.__origin__:
+ return False
+ return self.__metadata__ == other.__metadata__
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+ class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type (and will be in
+ the __origin__ field), the remaining arguments are kept as a tuple in
+ the __extra__ field.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @typing._tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = typing._type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ f"Cannot subclass {cls.__module__}.Annotated"
+ )
+
+ def _strip_annotations(t):
+ """Strips the annotations from a given type.
+ """
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_annotations(t.__origin__)
+ if isinstance(t, typing._GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ res = t.copy_with(stripped_args)
+ res._special = t._special
+ return res
+ return t
+
+ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+ """Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+ hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+ if include_extras:
+ return hint
+ return {k: _strip_annotations(t) for k, t in hint.items()}
+# 3.6
+else:
+
+ def _is_dunder(name):
+ """Returns True if name is a __dunder_variable_name__."""
+ return len(name) > 4 and name.startswith('__') and name.endswith('__')
+
+ # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
+ # checks, argument expansion etc. are done on the _subs_tre. As a result we
+ # can't provide a get_type_hints function that strips out annotations.
+
+ class AnnotatedMeta(typing.GenericMeta):
+ """Metaclass for Annotated"""
+
+ def __new__(cls, name, bases, namespace, **kwargs):
+ if any(b is not object for b in bases):
+ raise TypeError("Cannot subclass " + str(Annotated))
+ return super().__new__(cls, name, bases, namespace, **kwargs)
+
+ @property
+ def __metadata__(self):
+ return self._subs_tree()[2]
+
+ def _tree_repr(self, tree):
+ cls, origin, metadata = tree
+ if not isinstance(origin, tuple):
+ tp_repr = typing._type_repr(origin)
+ else:
+ tp_repr = origin[0]._tree_repr(origin)
+ metadata_reprs = ", ".join(repr(arg) for arg in metadata)
+ return f'{cls}[{tp_repr}, {metadata_reprs}]'
+
+ def _subs_tree(self, tvars=None, args=None): # noqa
+ if self is Annotated:
+ return Annotated
+ res = super()._subs_tree(tvars=tvars, args=args)
+ # Flatten nested Annotated
+ if isinstance(res[1], tuple) and res[1][0] is Annotated:
+ sub_tp = res[1][1]
+ sub_annot = res[1][2]
+ return (Annotated, sub_tp, sub_annot + res[2])
+ return res
+
+ def _get_cons(self):
+ """Return the class used to create instance of this type."""
+ if self.__origin__ is None:
+ raise TypeError("Cannot get the underlying type of a "
+ "non-specialized Annotated type.")
+ tree = self._subs_tree()
+ while isinstance(tree, tuple) and tree[0] is Annotated:
+ tree = tree[1]
+ if isinstance(tree, tuple):
+ return tree[0]
+ else:
+ return tree
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if self.__origin__ is not None: # specializing an instantiated type
+ return super().__getitem__(params)
+ elif not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be instantiated "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ else:
+ msg = "Annotated[t, ...]: t must be a type."
+ tp = typing._type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return self.__class__(
+ self.__name__,
+ self.__bases__,
+ _no_slots_copy(self.__dict__),
+ tvars=_type_vars((tp,)),
+ # Metadata is a tuple so it won't be touched by _replace_args et al.
+ args=(tp, metadata),
+ origin=self,
+ )
+
+ def __call__(self, *args, **kwargs):
+ cons = self._get_cons()
+ result = cons(*args, **kwargs)
+ try:
+ result.__orig_class__ = self
+ except AttributeError:
+ pass
+ return result
+
+ def __getattr__(self, attr):
+ # For simplicity we just don't relay all dunder names
+ if self.__origin__ is not None and not _is_dunder(attr):
+ return getattr(self._get_cons(), attr)
+ raise AttributeError(attr)
+
+ def __setattr__(self, attr, value):
+ if _is_dunder(attr) or attr.startswith('_abc_'):
+ super().__setattr__(attr, value)
+ elif self.__origin__ is None:
+ raise AttributeError(attr)
+ else:
+ setattr(self._get_cons(), attr, value)
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Annotated cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Annotated cannot be used with issubclass().")
+
+ class Annotated(metaclass=AnnotatedMeta):
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type, the remaining
+ arguments are kept as a tuple in the __metadata__ field.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+ get_origin = typing.get_origin
+ get_args = typing.get_args
+# 3.7-3.9
+elif PEP_560:
+ try:
+ # 3.9+
+ from typing import _BaseGenericAlias
+ except ImportError:
+ _BaseGenericAlias = typing._GenericAlias
+ try:
+ # 3.9+
+ from typing import GenericAlias
+ except ImportError:
+ GenericAlias = typing._GenericAlias
+
+ def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ get_origin(P.args) is P
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias,
+ ParamSpecArgs, ParamSpecKwargs)):
+ return tp.__origin__
+ if tp is typing.Generic:
+ return typing.Generic
+ return None
+
+ def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__,) + tp.__metadata__
+ if isinstance(tp, (typing._GenericAlias, GenericAlias)):
+ if getattr(tp, "_special", False):
+ return ()
+ res = tp.__args__
+ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+ TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ class _TypeAliasForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ @_TypeAliasForm
+ def TypeAlias(self, parameters):
+ """Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example above.
+ """
+ raise TypeError(f"{self} is not subscriptable")
+# 3.7-3.8
+elif sys.version_info[:2] >= (3, 7):
+ class _TypeAliasForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ TypeAlias = _TypeAliasForm('TypeAlias',
+ doc="""Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example
+ above.""")
+# 3.6
+else:
+ class _TypeAliasMeta(typing.TypingMeta):
+ """Metaclass for TypeAlias"""
+
+ def __repr__(self):
+ return 'typing_extensions.TypeAlias'
+
+ class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
+ """Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example above.
+ """
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("TypeAlias cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("TypeAlias cannot be used with issubclass().")
+
+ def __repr__(self):
+ return 'typing_extensions.TypeAlias'
+
+ TypeAlias = _TypeAliasBase(_root=True)
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+ ParamSpecArgs = typing.ParamSpecArgs
+ ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.6-3.9
+else:
+ class _Immutable:
+ """Mixin to indicate that object should not be copied."""
+ __slots__ = ()
+
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memo):
+ return self
+
+ class ParamSpecArgs(_Immutable):
+ """The args for a ParamSpec object.
+
+ Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+ ParamSpecArgs objects have a reference back to their ParamSpec:
+
+ P.args.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.args"
+
+ class ParamSpecKwargs(_Immutable):
+ """The kwargs for a ParamSpec object.
+
+ Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+ ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+ P.kwargs.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.kwargs"
+
+# 3.10+
+if hasattr(typing, 'ParamSpec'):
+ ParamSpec = typing.ParamSpec
+# 3.6-3.9
+else:
+
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class ParamSpec(list):
+ """Parameter specification variable.
+
+ Usage::
+
+ P = ParamSpec('P')
+
+ Parameter specification variables exist primarily for the benefit of static
+ type checkers. They are used to forward the parameter types of one
+ callable to another callable, a pattern commonly found in higher order
+ functions and decorators. They are only valid when used in ``Concatenate``,
+ or s the first argument to ``Callable``. In Python 3.10 and higher,
+ they are also supported in user-defined Generics at runtime.
+ See class Generic for more information on generic types. An
+ example for annotating a decorator::
+
+ T = TypeVar('T')
+ P = ParamSpec('P')
+
+ def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+ '''A type-safe decorator to add logging to a function.'''
+ def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+ logging.info(f'{f.__name__} was called')
+ return f(*args, **kwargs)
+ return inner
+
+ @add_logging
+ def add_two(x: float, y: float) -> float:
+ '''Add two numbers together.'''
+ return x + y
+
+ Parameter specification variables defined with covariant=True or
+ contravariant=True can be used to declare covariant or contravariant
+ generic types. These keyword arguments are valid, but their actual semantics
+ are yet to be decided. See PEP 612 for details.
+
+ Parameter specification variables can be introspected. e.g.:
+
+ P.__name__ == 'T'
+ P.__bound__ == None
+ P.__covariant__ == False
+ P.__contravariant__ == False
+
+ Note that only parameter specification variables defined in global scope can
+ be pickled.
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ @property
+ def args(self):
+ return ParamSpecArgs(self)
+
+ @property
+ def kwargs(self):
+ return ParamSpecKwargs(self)
+
+ def __init__(self, name, *, bound=None, covariant=False, contravariant=False):
+ super().__init__([self])
+ self.__name__ = name
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ if bound:
+ self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+ else:
+ self.__bound__ = None
+
+ # for pickling:
+ try:
+ def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ def_mod = None
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __repr__(self):
+ if self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ # Hack to get typing._type_check to pass.
+ def __call__(self, *args, **kwargs):
+ pass
+
+ if not PEP_560:
+ # Only needed in 3.6.
+ def _get_type_vars(self, tvars):
+ if self not in tvars:
+ tvars.append(self)
+
+
+# 3.6-3.9
+if not hasattr(typing, 'Concatenate'):
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class _ConcatenateGenericAlias(list):
+
+ # Trick Generic into looking into this for __parameters__.
+ if PEP_560:
+ __class__ = typing._GenericAlias
+ else:
+ __class__ = typing._TypingBase
+
+ # Flag in 3.8.
+ _special = False
+ # Attribute in 3.6 and earlier.
+ _gorg = typing.Generic
+
+ def __init__(self, origin, args):
+ super().__init__(args)
+ self.__origin__ = origin
+ self.__args__ = args
+
+ def __repr__(self):
+ _type_repr = typing._type_repr
+ return (f'{_type_repr(self.__origin__)}'
+ f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
+ # Hack to get typing._type_check to pass in Generic.
+ def __call__(self, *args, **kwargs):
+ pass
+
+ @property
+ def __parameters__(self):
+ return tuple(
+ tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+ )
+
+ if not PEP_560:
+ # Only required in 3.6.
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ typing._get_type_vars(self.__parameters__, tvars)
+
+
+# 3.6-3.9
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Concatenate of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if not isinstance(parameters[-1], ParamSpec):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable.")
+ msg = "Concatenate[arg, ...]: each arg must be a type."
+ parameters = tuple(typing._type_check(p, msg) for p in parameters)
+ return _ConcatenateGenericAlias(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'Concatenate'):
+ Concatenate = typing.Concatenate
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_TypeAliasForm
+ def Concatenate(self, parameters):
+ """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """
+ return _concatenate_getitem(self, parameters)
+# 3.7-8
+elif sys.version_info[:2] >= (3, 7):
+ class _ConcatenateForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ def __getitem__(self, parameters):
+ return _concatenate_getitem(self, parameters)
+
+ Concatenate = _ConcatenateForm(
+ 'Concatenate',
+ doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """)
+# 3.6
+else:
+ class _ConcatenateAliasMeta(typing.TypingMeta):
+ """Metaclass for Concatenate."""
+
+ def __repr__(self):
+ return 'typing_extensions.Concatenate'
+
+ class _ConcatenateAliasBase(typing._FinalTypingBase,
+ metaclass=_ConcatenateAliasMeta,
+ _root=True):
+ """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Concatenate cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Concatenate cannot be used with issubclass().")
+
+ def __repr__(self):
+ return 'typing_extensions.Concatenate'
+
+ def __getitem__(self, parameters):
+ return _concatenate_getitem(self, parameters)
+
+ Concatenate = _ConcatenateAliasBase(_root=True)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+ TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ class _TypeGuardForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ @_TypeGuardForm
+ def TypeGuard(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only single type.')
+ return typing._GenericAlias(self, (item,))
+# 3.7-3.8
+elif sys.version_info[:2] >= (3, 7):
+ class _TypeGuardForm(typing._SpecialForm, _root=True):
+
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type')
+ return typing._GenericAlias(self, (item,))
+
+ TypeGuard = _TypeGuardForm(
+ 'TypeGuard',
+ doc="""Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """)
+# 3.6
+else:
+ class _TypeGuard(typing._FinalTypingBase, _root=True):
+ """Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """
+
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(typing._type_check(item,
+ f'{cls.__name__[1:]} accepts only a single type.'),
+ _root=True)
+ raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
+
+ def _eval_type(self, globalns, localns):
+ new_tp = typing._eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super().__repr__()
+ if self.__type__ is not None:
+ r += f'[{typing._type_repr(self.__type__)}]'
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _TypeGuard):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+ TypeGuard = _TypeGuard(_root=True)
+
+if hasattr(typing, "Self"):
+ Self = typing.Self
+elif sys.version_info[:2] >= (3, 7):
+ # Vendored from cpython typing._SpecialFrom
+ class _SpecialForm(typing._Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
+
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
+
+ def __getattr__(self, item):
+ if item in {'__name__', '__qualname__'}:
+ return self._name
+
+ raise AttributeError(item)
+
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
+
+ def __repr__(self):
+ return f'typing_extensions.{self._name}'
+
+ def __reduce__(self):
+ return self._name
+
+ def __call__(self, *args, **kwds):
+ raise TypeError(f"Cannot instantiate {self!r}")
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance()")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass()")
+
+ @typing._tp_cache
+ def __getitem__(self, parameters):
+ return self._getitem(self, parameters)
+
+ @_SpecialForm
+ def Self(self, params):
+ """Used to spell the type of "self" in classes.
+
+ Example::
+
+ from typing import Self
+
+ class ReturnsSelf:
+ def parse(self, data: bytes) -> Self:
+ ...
+ return self
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+else:
+ class _Self(typing._FinalTypingBase, _root=True):
+ """Used to spell the type of "self" in classes.
+
+ Example::
+
+ from typing import Self
+
+ class ReturnsSelf:
+ def parse(self, data: bytes) -> Self:
+ ...
+ return self
+
+ """
+
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass().")
+
+ Self = _Self(_root=True)
+
+
+if hasattr(typing, 'Required'):
+ Required = typing.Required
+ NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9):
+ class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ @_ExtensionsSpecialForm
+ def Required(self, parameters):
+ """A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only single type')
+ return typing._GenericAlias(self, (item,))
+
+ @_ExtensionsSpecialForm
+ def NotRequired(self, parameters):
+ """A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only single type')
+ return typing._GenericAlias(self, (item,))
+
+elif sys.version_info[:2] >= (3, 7):
+ class _RequiredForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ '{} accepts only single type'.format(self._name))
+ return typing._GenericAlias(self, (item,))
+
+ Required = _RequiredForm(
+ 'Required',
+ doc="""A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """)
+ NotRequired = _RequiredForm(
+ 'NotRequired',
+ doc="""A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """)
+else:
+ # NOTE: Modeled after _Final's implementation when _FinalTypingBase available
+ class _MaybeRequired(typing._FinalTypingBase, _root=True):
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(typing._type_check(item,
+ '{} accepts only single type.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ new_tp = typing._eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super().__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(typing._type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, type(self)):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+ class _Required(_MaybeRequired, _root=True):
+ """A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """
+
+ class _NotRequired(_MaybeRequired, _root=True):
+ """A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """
+
+ Required = _Required(_root=True)
+ NotRequired = _NotRequired(_root=True)
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index be3e72e..cf0e531 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -1,3 +1,14 @@
-packaging==16.8
-pyparsing==2.1.10
-six==1.10.0
+packaging==21.3
+pyparsing==2.2.1
+ordered-set==3.1.1
+more_itertools==8.8.0
+jaraco.text==3.7.0
+importlib_resources==5.4.0
+importlib_metadata==4.11.1
+nspektr==0.3.0
+# required for importlib_metadata on older Pythons
+typing_extensions==4.0.1
+# required for importlib_resources and _metadata on older Pythons
+zipp==3.7.0
+tomli==2.0.1
+# validate-pyproject[all]==0.6 # Special handling in tools/vendored, don't uncomment or remove
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000..e82f676
--- /dev/null
+++ b/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/setuptools/_vendor/zipp.py b/setuptools/_vendor/zipp.py
new file mode 100644
index 0000000..26b723c
--- /dev/null
+++ b/setuptools/_vendor/zipp.py
@@ -0,0 +1,329 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+import pathlib
+
+if sys.version_info < (3, 7):
+ from collections import OrderedDict
+else:
+ OrderedDict = dict
+
+
+__all__ = ['Path']
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastLookup when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ source.__class__ = cls
+ return source
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'mem/abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('mem/abcde.zip', 'a.txt')
+ >>> b
+ Path('mem/abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('mem/abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> import os
+ >>> str(c).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip/b/c.txt'
+
+ At the root, ``name``, ``filename``, and ``parent``
+ resolve to the zipfile. Note these attributes are not
+ valid and will raise a ``ValueError`` if the zipfile
+ has no filename.
+
+ >>> root.name
+ 'abcde.zip'
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip'
+ >>> str(root.parent)
+ 'mem'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ """
+ Construct a Path from a ZipFile or filename.
+
+ Note: When the source is an existing ZipFile object,
+ its type (__class__) will be mutated to a
+ specialized type. If the caller wishes to retain the
+ original type, the caller should either create a
+ separate ZipFile object or pass a filename.
+ """
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return pathlib.Path(self.at).name or self.filename.name
+
+ @property
+ def suffix(self):
+ return pathlib.Path(self.at).suffix or self.filename.suffix
+
+ @property
+ def suffixes(self):
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
+
+ @property
+ def stem(self):
+ return pathlib.Path(self.at).stem or self.filename.stem
+
+ @property
+ def filename(self):
+ return pathlib.Path(self.root.filename).joinpath(self.at)
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return self.__class__(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return self.exists() and not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, *other):
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ if not self.at:
+ return self.filename.parent
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
index 8143604..73b2db7 100755..100644
--- a/setuptools/archive_util.py
+++ b/setuptools/archive_util.py
@@ -8,7 +8,7 @@ import posixpath
import contextlib
from distutils.errors import DistutilsError
-from pkg_resources import ensure_directory
+from ._path import ensure_directory
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
@@ -25,7 +25,8 @@ def default_filter(src, dst):
return dst
-def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+def unpack_archive(
+ filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
@@ -124,6 +125,56 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
os.chmod(target, unix_attributes)
+def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
+ """Resolve any links and extract link targets as normal files."""
+ while tar_member_obj is not None and (
+ tar_member_obj.islnk() or tar_member_obj.issym()):
+ linkpath = tar_member_obj.linkname
+ if tar_member_obj.issym():
+ base = posixpath.dirname(tar_member_obj.name)
+ linkpath = posixpath.join(base, linkpath)
+ linkpath = posixpath.normpath(linkpath)
+ tar_member_obj = tar_obj._getmember(linkpath)
+
+ is_file_or_dir = (
+ tar_member_obj is not None and
+ (tar_member_obj.isfile() or tar_member_obj.isdir())
+ )
+ if is_file_or_dir:
+ return tar_member_obj
+
+ raise LookupError('Got unknown file type')
+
+
+def _iter_open_tar(tar_obj, extract_dir, progress_filter):
+ """Emit member-destination pairs from a tar archive."""
+ # don't do any chowning!
+ tar_obj.chown = lambda *args: None
+
+ with contextlib.closing(tar_obj):
+ for member in tar_obj:
+ name = member.name
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name.split('/'):
+ continue
+
+ prelim_dst = os.path.join(extract_dir, *name.split('/'))
+
+ try:
+ member = _resolve_tar_file_or_dir(tar_obj, member)
+ except LookupError:
+ continue
+
+ final_dst = progress_filter(name, prelim_dst)
+ if not final_dst:
+ continue
+
+ if final_dst.endswith(os.sep):
+ final_dst = final_dst[:-1]
+
+ yield member, final_dst
+
+
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
@@ -133,41 +184,22 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""
try:
tarobj = tarfile.open(filename)
- except tarfile.TarError:
+ except tarfile.TarError as e:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
- )
- with contextlib.closing(tarobj):
- # don't do any chowning!
- tarobj.chown = lambda *args: None
- for member in tarobj:
- name = member.name
- # don't extract absolute paths or ones with .. in them
- if not name.startswith('/') and '..' not in name.split('/'):
- prelim_dst = os.path.join(extract_dir, *name.split('/'))
-
- # resolve any links and to extract the link targets as normal
- # files
- while member is not None and (member.islnk() or member.issym()):
- linkpath = member.linkname
- if member.issym():
- base = posixpath.dirname(member.name)
- linkpath = posixpath.join(base, linkpath)
- linkpath = posixpath.normpath(linkpath)
- member = tarobj._getmember(linkpath)
-
- if member is not None and (member.isfile() or member.isdir()):
- final_dst = progress_filter(name, prelim_dst)
- if final_dst:
- if final_dst.endswith(os.sep):
- final_dst = final_dst[:-1]
- try:
- # XXX Ugh
- tarobj._extract_member(member, final_dst)
- except tarfile.ExtractError:
- # chown/chmod/mkfifo/mknode/makedev failed
- pass
- return True
+ ) from e
+
+ for member, final_dst in _iter_open_tar(
+ tarobj, extract_dir, progress_filter,
+ ):
+ try:
+ # XXX Ugh
+ tarobj._extract_member(member, final_dst)
+ except tarfile.ExtractError:
+ # chown/chmod/mkfifo/mknode/makedev failed
+ pass
+
+ return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 609ea1e..5dc65e2 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -26,14 +26,28 @@ bug reports or API stability):
Again, this is not a formal definition! Just a "taste" of the module.
"""
+import io
import os
import sys
import tokenize
import shutil
import contextlib
+import tempfile
+import warnings
import setuptools
import distutils
+from ._reqs import parse_strings
+from .extern.more_itertools import always_iterable
+
+
+__all__ = ['get_requires_for_build_sdist',
+ 'get_requires_for_build_wheel',
+ 'prepare_metadata_for_build_wheel',
+ 'build_wheel',
+ 'build_sdist',
+ '__legacy__',
+ 'SetupRequirementsError']
class SetupRequirementsError(BaseException):
@@ -43,7 +57,9 @@ class SetupRequirementsError(BaseException):
class Distribution(setuptools.dist.Distribution):
def fetch_build_eggs(self, specifiers):
- raise SetupRequirementsError(specifiers)
+ specifier_list = list(parse_strings(specifiers))
+
+ raise SetupRequirementsError(specifier_list)
@classmethod
@contextlib.contextmanager
@@ -61,36 +77,20 @@ class Distribution(setuptools.dist.Distribution):
distutils.core.Distribution = orig
-def _run_setup(setup_script='setup.py'):
- # Note that we can reuse our build directory between calls
- # Correctness comes first, then optimization later
- __file__ = setup_script
- __name__ = '__main__'
- f = getattr(tokenize, 'open', open)(__file__)
- code = f.read().replace('\\r\\n', '\\n')
- f.close()
- exec(compile(code, __file__, 'exec'), locals())
-
-
-def _fix_config(config_settings):
- config_settings = config_settings or {}
- config_settings.setdefault('--global-option', [])
- return config_settings
-
-
-def _get_build_requires(config_settings):
- config_settings = _fix_config(config_settings)
- requirements = ['setuptools', 'wheel']
+@contextlib.contextmanager
+def no_install_setup_requires():
+ """Temporarily disable installing setup_requires
- sys.argv = sys.argv[:1] + ['egg_info'] + \
- config_settings["--global-option"]
+ Under PEP 517, the backend reports build dependencies to the frontend,
+ and the frontend is responsible for ensuring they're installed.
+ So setuptools (acting as a backend) should not try to install them.
+ """
+ orig = setuptools._install_setup_requires
+ setuptools._install_setup_requires = lambda attrs: None
try:
- with Distribution.patch():
- _run_setup()
- except SetupRequirementsError as e:
- requirements += e.specifiers
-
- return requirements
+ yield
+ finally:
+ setuptools._install_setup_requires = orig
def _get_immediate_subdirectories(a_dir):
@@ -98,75 +98,207 @@ def _get_immediate_subdirectories(a_dir):
if os.path.isdir(os.path.join(a_dir, name))]
-def get_requires_for_build_wheel(config_settings=None):
- config_settings = _fix_config(config_settings)
- return _get_build_requires(config_settings)
+def _file_with_extension(directory, extension):
+ matching = (
+ f for f in os.listdir(directory)
+ if f.endswith(extension)
+ )
+ try:
+ file, = matching
+ except ValueError:
+ raise ValueError(
+ 'No distribution was found. Ensure that `setup.py` '
+ 'is not empty and that it calls `setup()`.')
+ return file
-def get_requires_for_build_sdist(config_settings=None):
- config_settings = _fix_config(config_settings)
- return _get_build_requires(config_settings)
+def _open_setup_script(setup_script):
+ if not os.path.exists(setup_script):
+ # Supply a default setup.py
+ return io.StringIO(u"from setuptools import setup; setup()")
+ return getattr(tokenize, 'open', open)(setup_script)
-def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):
- sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', metadata_directory]
- _run_setup()
-
- dist_info_directory = metadata_directory
- while True:
- dist_infos = [f for f in os.listdir(dist_info_directory)
- if f.endswith('.dist-info')]
- if len(dist_infos) == 0 and \
- len(_get_immediate_subdirectories(dist_info_directory)) == 1:
- dist_info_directory = os.path.join(
- dist_info_directory, os.listdir(dist_info_directory)[0])
- continue
+@contextlib.contextmanager
+def suppress_known_deprecation():
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', 'setup.py install is deprecated')
+ yield
- assert len(dist_infos) == 1
- break
- # PEP 517 requires that the .dist-info directory be placed in the
- # metadata_directory. To comply, we MUST copy the directory to the root
- if dist_info_directory != metadata_directory:
- shutil.move(
- os.path.join(dist_info_directory, dist_infos[0]),
- metadata_directory)
- shutil.rmtree(dist_info_directory, ignore_errors=True)
+class _BuildMetaBackend:
- return dist_infos[0]
+ @staticmethod
+ def _fix_config(config_settings):
+ """
+ Ensure config settings meet certain expectations.
+
+ >>> fc = _BuildMetaBackend._fix_config
+ >>> fc(None)
+ {'--global-option': []}
+ >>> fc({})
+ {'--global-option': []}
+ >>> fc({'--global-option': 'foo'})
+ {'--global-option': ['foo']}
+ >>> fc({'--global-option': ['foo']})
+ {'--global-option': ['foo']}
+ """
+ config_settings = config_settings or {}
+ config_settings['--global-option'] = list(always_iterable(
+ config_settings.get('--global-option')))
+ return config_settings
+ def _get_build_requires(self, config_settings, requirements):
+ config_settings = self._fix_config(config_settings)
-def build_wheel(wheel_directory, config_settings=None,
- metadata_directory=None):
- config_settings = _fix_config(config_settings)
- wheel_directory = os.path.abspath(wheel_directory)
- sys.argv = sys.argv[:1] + ['bdist_wheel'] + \
- config_settings["--global-option"]
- _run_setup()
- if wheel_directory != 'dist':
- shutil.rmtree(wheel_directory)
- shutil.copytree('dist', wheel_directory)
+ sys.argv = sys.argv[:1] + ['egg_info'] + \
+ config_settings["--global-option"]
+ try:
+ with Distribution.patch():
+ self.run_setup()
+ except SetupRequirementsError as e:
+ requirements += e.specifiers
+
+ return requirements
+
+ def run_setup(self, setup_script='setup.py'):
+ # Note that we can reuse our build directory between calls
+ # Correctness comes first, then optimization later
+ __file__ = setup_script
+ __name__ = '__main__'
+
+ with _open_setup_script(__file__) as f:
+ code = f.read().replace(r'\r\n', r'\n')
+
+ exec(compile(code, __file__, 'exec'), locals())
+
+ def get_requires_for_build_wheel(self, config_settings=None):
+ return self._get_build_requires(
+ config_settings, requirements=['wheel'])
+
+ def get_requires_for_build_sdist(self, config_settings=None):
+ return self._get_build_requires(config_settings, requirements=[])
+
+ def prepare_metadata_for_build_wheel(self, metadata_directory,
+ config_settings=None):
+ sys.argv = sys.argv[:1] + [
+ 'dist_info', '--egg-base', metadata_directory]
+ with no_install_setup_requires():
+ self.run_setup()
+
+ dist_info_directory = metadata_directory
+ while True:
+ dist_infos = [f for f in os.listdir(dist_info_directory)
+ if f.endswith('.dist-info')]
+
+ if (
+ len(dist_infos) == 0 and
+ len(_get_immediate_subdirectories(dist_info_directory)) == 1
+ ):
+
+ dist_info_directory = os.path.join(
+ dist_info_directory, os.listdir(dist_info_directory)[0])
+ continue
+
+ assert len(dist_infos) == 1
+ break
+
+ # PEP 517 requires that the .dist-info directory be placed in the
+ # metadata_directory. To comply, we MUST copy the directory to the root
+ if dist_info_directory != metadata_directory:
+ shutil.move(
+ os.path.join(dist_info_directory, dist_infos[0]),
+ metadata_directory)
+ shutil.rmtree(dist_info_directory, ignore_errors=True)
+
+ return dist_infos[0]
+
+ def _build_with_temp_dir(self, setup_command, result_extension,
+ result_directory, config_settings):
+ config_settings = self._fix_config(config_settings)
+ result_directory = os.path.abspath(result_directory)
+
+ # Build in a temporary directory, then copy to the target.
+ os.makedirs(result_directory, exist_ok=True)
+ with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
+ sys.argv = (sys.argv[:1] + setup_command +
+ ['--dist-dir', tmp_dist_dir] +
+ config_settings["--global-option"])
+ with no_install_setup_requires():
+ self.run_setup()
+
+ result_basename = _file_with_extension(
+ tmp_dist_dir, result_extension)
+ result_path = os.path.join(result_directory, result_basename)
+ if os.path.exists(result_path):
+ # os.rename will fail overwriting on non-Unix.
+ os.remove(result_path)
+ os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
+
+ return result_basename
+
+ def build_wheel(self, wheel_directory, config_settings=None,
+ metadata_directory=None):
+ with suppress_known_deprecation():
+ return self._build_with_temp_dir(['bdist_wheel'], '.whl',
+ wheel_directory, config_settings)
+
+ def build_sdist(self, sdist_directory, config_settings=None):
+ return self._build_with_temp_dir(['sdist', '--formats', 'gztar'],
+ '.tar.gz', sdist_directory,
+ config_settings)
+
+
+class _BuildMetaLegacyBackend(_BuildMetaBackend):
+ """Compatibility backend for setuptools
+
+ This is a version of setuptools.build_meta that endeavors
+ to maintain backwards
+ compatibility with pre-PEP 517 modes of invocation. It
+ exists as a temporary
+ bridge between the old packaging mechanism and the new
+ packaging mechanism,
+ and will eventually be removed.
+ """
+ def run_setup(self, setup_script='setup.py'):
+ # In order to maintain compatibility with scripts assuming that
+ # the setup.py script is in a directory on the PYTHONPATH, inject
+ # '' into sys.path. (pypa/setuptools#1642)
+ sys_path = list(sys.path) # Save the original path
+
+ script_dir = os.path.dirname(os.path.abspath(setup_script))
+ if script_dir not in sys.path:
+ sys.path.insert(0, script_dir)
+
+ # Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
+ # get the directory of the source code. They expect it to refer to the
+ # setup.py script.
+ sys_argv_0 = sys.argv[0]
+ sys.argv[0] = setup_script
- wheels = [f for f in os.listdir(wheel_directory)
- if f.endswith('.whl')]
+ try:
+ super(_BuildMetaLegacyBackend,
+ self).run_setup(setup_script=setup_script)
+ finally:
+ # While PEP 517 frontends should be calling each hook in a fresh
+ # subprocess according to the standard (and thus it should not be
+ # strictly necessary to restore the old sys.path), we'll restore
+ # the original path so that the path manipulation does not persist
+ # within the hook after run_setup is called.
+ sys.path[:] = sys_path
+ sys.argv[0] = sys_argv_0
- assert len(wheels) == 1
- return wheels[0]
+# The primary backend
+_BACKEND = _BuildMetaBackend()
-def build_sdist(sdist_directory, config_settings=None):
- config_settings = _fix_config(config_settings)
- sdist_directory = os.path.abspath(sdist_directory)
- sys.argv = sys.argv[:1] + ['sdist'] + \
- config_settings["--global-option"]
- _run_setup()
- if sdist_directory != 'dist':
- shutil.rmtree(sdist_directory)
- shutil.copytree('dist', sdist_directory)
+get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
+get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
+prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
+build_wheel = _BACKEND.build_wheel
+build_sdist = _BACKEND.build_sdist
- sdists = [f for f in os.listdir(sdist_directory)
- if f.endswith('.tar.gz')]
- assert len(sdists) == 1
- return sdists[0]
+# The legacy backend
+__legacy__ = _BuildMetaLegacyBackend()
diff --git a/setuptools/cli-arm64.exe b/setuptools/cli-arm64.exe
new file mode 100644
index 0000000..7a87ce4
--- /dev/null
+++ b/setuptools/cli-arm64.exe
Binary files differ
diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py
index fe619e2..b966dce 100644
--- a/setuptools/command/__init__.py
+++ b/setuptools/command/__init__.py
@@ -1,16 +1,6 @@
-__all__ = [
- 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
- 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
- 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
- 'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib',
- 'dist_info',
-]
-
from distutils.command.bdist import bdist
import sys
-from setuptools.command import install_scripts
-
if 'egg' not in bdist.format_commands:
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
bdist.format_commands.append('egg')
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index 4532b1c..452a924 100755..100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -1,7 +1,5 @@
from distutils.errors import DistutilsOptionError
-from setuptools.extern.six.moves import map
-
from setuptools.command.setopt import edit_config, option_base, config_file
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 423b818..11a1c6b 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -2,7 +2,6 @@
Build .egg distributions"""
-from distutils.errors import DistutilsSetupError
from distutils.dir_util import remove_tree, mkpath
from distutils import log
from types import CodeType
@@ -12,24 +11,16 @@ import re
import textwrap
import marshal
-from setuptools.extern import six
-
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
+from pkg_resources import get_build_platform, Distribution
from setuptools.extension import Library
from setuptools import Command
+from .._path import ensure_directory
-try:
- # Python 2.7 or >=3.2
- from sysconfig import get_path, get_python_version
+from sysconfig import get_path, get_python_version
- def _get_purelib():
- return get_path("purelib")
-except ImportError:
- from distutils.sysconfig import get_python_lib, get_python_version
- def _get_purelib():
- return get_python_lib(False)
+def _get_purelib():
+ return get_path("purelib")
def strip_module(filename):
@@ -54,10 +45,12 @@ def write_stub(resource, pyfile):
_stub_template = textwrap.dedent("""
def __bootstrap__():
global __bootstrap__, __loader__, __file__
- import sys, pkg_resources, imp
+ import sys, pkg_resources, importlib.util
__file__ = pkg_resources.resource_filename(__name__, %r)
__loader__ = None; del __bootstrap__, __loader__
- imp.load_dynamic(__name__,__file__)
+ spec = importlib.util.spec_from_file_location(__name__,__file__)
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
__bootstrap__()
""").lstrip()
with open(pyfile, 'w') as f:
@@ -158,7 +151,7 @@ class bdist_egg(Command):
self.run_command(cmdname)
return cmd
- def run(self):
+ def run(self): # noqa: C901 # is too complex (14) # FIXME
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
@@ -273,43 +266,7 @@ class bdist_egg(Command):
return analyze_egg(self.bdist_dir, self.stubs)
def gen_header(self):
- epm = EntryPoint.parse_map(self.distribution.entry_points or '')
- ep = epm.get('setuptools.installation', {}).get('eggsecutable')
- if ep is None:
- return 'w' # not an eggsecutable, do it the usual way.
-
- if not ep.attrs or ep.extras:
- raise DistutilsSetupError(
- "eggsecutable entry point (%r) cannot have 'extras' "
- "or refer to a module" % (ep,)
- )
-
- pyver = sys.version[:3]
- pkg = ep.module_name
- full = '.'.join(ep.attrs)
- base = ep.attrs[0]
- basename = os.path.basename(self.egg_output)
-
- header = (
- "#!/bin/sh\n"
- 'if [ `basename $0` = "%(basename)s" ]\n'
- 'then exec python%(pyver)s -c "'
- "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
- "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
- '" "$@"\n'
- 'else\n'
- ' echo $0 is not the correct name for this egg file.\n'
- ' echo Please rename it back to %(basename)s and try again.\n'
- ' exec false\n'
- 'fi\n'
- ) % locals()
-
- if not self.dry_run:
- mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
- f = open(self.egg_output, 'w')
- f.write(header)
- f.close()
- return 'a'
+ return 'w'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
@@ -411,9 +368,7 @@ def scan_module(egg_dir, base, name, stubs):
return True # Extension module
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
- if sys.version_info < (3, 3):
- skip = 8 # skip magic & date
- elif sys.version_info < (3, 7):
+ if sys.version_info < (3, 7):
skip = 12 # skip magic & date & file size
else:
skip = 16 # skip magic & reserved? & date & file size
@@ -444,7 +399,7 @@ def iter_symbols(code):
for name in code.co_names:
yield name
for const in code.co_consts:
- if isinstance(const, six.string_types):
+ if isinstance(const, str):
yield const
elif isinstance(const, CodeType):
for name in iter_symbols(const):
diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py
index 7073092..98bf5de 100755..100644
--- a/setuptools/command/bdist_rpm.py
+++ b/setuptools/command/bdist_rpm.py
@@ -1,4 +1,7 @@
import distutils.command.bdist_rpm as orig
+import warnings
+
+from setuptools import SetuptoolsDeprecationWarning
class bdist_rpm(orig.bdist_rpm):
@@ -8,36 +11,30 @@ class bdist_rpm(orig.bdist_rpm):
1. Run egg_info to ensure the name and version are properly calculated.
2. Always run 'install' using --single-version-externally-managed to
disable eggs in RPM distributions.
- 3. Replace dash with underscore in the version numbers for better RPM
- compatibility.
"""
def run(self):
+ warnings.warn(
+ "bdist_rpm is deprecated and will be removed in a future "
+ "version. Use bdist_wheel (wheel packages) instead.",
+ SetuptoolsDeprecationWarning,
+ )
+
# ensure distro name is up-to-date
self.run_command('egg_info')
orig.bdist_rpm.run(self)
def _make_spec_file(self):
- version = self.distribution.get_version()
- rpmversion = version.replace('-', '_')
spec = orig.bdist_rpm._make_spec_file(self)
- line23 = '%define version ' + version
- line24 = '%define version ' + rpmversion
spec = [
line.replace(
- "Source0: %{name}-%{version}.tar",
- "Source0: %{name}-%{unmangled_version}.tar"
- ).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
- ).replace(line23, line24)
+ )
for line in spec
]
- insert_loc = spec.index(line24) + 1
- unmangled_version = "%define unmangled_version " + version
- spec.insert(insert_loc, unmangled_version)
return spec
diff --git a/setuptools/command/bdist_wininst.py b/setuptools/command/bdist_wininst.py
deleted file mode 100755
index 073de97..0000000
--- a/setuptools/command/bdist_wininst.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import distutils.command.bdist_wininst as orig
-
-
-class bdist_wininst(orig.bdist_wininst):
- def reinitialize_command(self, command, reinit_subcommands=0):
- """
- Supplement reinitialize_command to work around
- http://bugs.python.org/issue20819
- """
- cmd = self.distribution.reinitialize_command(
- command, reinit_subcommands)
- if command in ('install', 'install_lib'):
- cmd.install_lib = None
- return cmd
-
- def run(self):
- self._is_running = True
- try:
- orig.bdist_wininst.run(self)
- finally:
- self._is_running = False
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index 09caff6..67ce244 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -25,9 +25,9 @@ class build_clib(orig.build_clib):
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
- "'sources' must be present and must be "
- "a list of source filenames" % lib_name)
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
sources = list(sources)
log.info("building '%s' library", lib_name)
@@ -38,9 +38,9 @@ class build_clib(orig.build_clib):
obj_deps = build_info.get('obj_deps', dict())
if not isinstance(obj_deps, dict):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
- "'obj_deps' must be a dictionary of "
- "type 'source: list'" % lib_name)
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
dependencies = []
# Get the global dependencies that are specified by the '' key.
@@ -48,9 +48,9 @@ class build_clib(orig.build_clib):
global_deps = obj_deps.get('', list())
if not isinstance(global_deps, (list, tuple)):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
- "'obj_deps' must be a dictionary of "
- "type 'source: list'" % lib_name)
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
# Build the list to be used by newer_pairwise_group
# each source will be auto-added to its dependencies.
@@ -60,39 +60,42 @@ class build_clib(orig.build_clib):
extra_deps = obj_deps.get(source, list())
if not isinstance(extra_deps, (list, tuple)):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
- "'obj_deps' must be a dictionary of "
- "type 'source: list'" % lib_name)
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
src_deps.extend(extra_deps)
dependencies.append(src_deps)
expected_objects = self.compiler.object_filenames(
- sources,
- output_dir=self.build_temp
- )
+ sources,
+ output_dir=self.build_temp,
+ )
- if newer_pairwise_group(dependencies, expected_objects) != ([], []):
+ if (
+ newer_pairwise_group(dependencies, expected_objects)
+ != ([], [])
+ ):
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
cflags = build_info.get('cflags')
- objects = self.compiler.compile(
- sources,
- output_dir=self.build_temp,
- macros=macros,
- include_dirs=include_dirs,
- extra_postargs=cflags,
- debug=self.debug
- )
+ self.compiler.compile(
+ sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ extra_postargs=cflags,
+ debug=self.debug
+ )
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(
- expected_objects,
- lib_name,
- output_dir=self.build_clib,
- debug=self.debug
- )
+ expected_objects,
+ lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug
+ )
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index ea97b37..c59eff8 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -1,7 +1,7 @@
import os
import sys
import itertools
-import imp
+from importlib.machinery import EXTENSION_SUFFIXES
from distutils.command.build_ext import build_ext as _du_build_ext
from distutils.file_util import copy_file
from distutils.ccompiler import new_compiler
@@ -10,7 +10,6 @@ from distutils.errors import DistutilsError
from distutils import log
from setuptools.extension import Library
-from setuptools.extern import six
try:
# Attempt to use Cython for building extensions, if available
@@ -23,7 +22,7 @@ except ImportError:
# make sure _config_vars is initialized
get_config_var("LDSHARED")
-from distutils.sysconfig import _config_vars as _CONFIG_VARS
+from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
def _customize_compiler_for_shlib(compiler):
@@ -59,12 +58,14 @@ elif os.name != 'nt':
except ImportError:
pass
-if_dl = lambda s: s if have_rtld else ''
+
+def if_dl(s):
+ return s if have_rtld else ''
def get_abi3_suffix():
"""Return the file extension for an abi3-compliant Extension()"""
- for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION):
+ for suffix in EXTENSION_SUFFIXES:
if '.abi3' in suffix: # Unix
return suffix
elif suffix == '.pyd': # Windows
@@ -103,18 +104,20 @@ class build_ext(_build_ext):
self.write_stub(package_dir or os.curdir, ext, True)
def get_ext_filename(self, fullname):
- filename = _build_ext.get_ext_filename(self, fullname)
+ so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
+ if so_ext:
+ filename = os.path.join(*fullname.split('.')) + so_ext
+ else:
+ filename = _build_ext.get_ext_filename(self, fullname)
+ so_ext = get_config_var('EXT_SUFFIX')
+
if fullname in self.ext_map:
ext = self.ext_map[fullname]
- use_abi3 = (
- six.PY3
- and getattr(ext, 'py_limited_api')
- and get_abi3_suffix()
- )
+ use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix()
if use_abi3:
- so_ext = _get_config_var_837('EXT_SUFFIX')
filename = filename[:-len(so_ext)]
- filename = filename + get_abi3_suffix()
+ so_ext = get_abi3_suffix()
+ filename = filename + so_ext
if isinstance(ext, Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn, libtype)
@@ -245,7 +248,8 @@ class build_ext(_build_ext):
'\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
- " import sys, os, pkg_resources, imp" + if_dl(", dl"),
+ " import sys, os, pkg_resources, importlib.util" +
+ if_dl(", dl"),
" __file__ = pkg_resources.resource_filename"
"(__name__,%r)"
% os.path.basename(ext._file_name),
@@ -257,7 +261,10 @@ class build_ext(_build_ext):
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
- " imp.load_dynamic(__name__,__file__)",
+ " spec = importlib.util.spec_from_file_location(",
+ " __name__, __file__)",
+ " mod = importlib.util.module_from_spec(spec)",
+ " spec.loader.exec_module(mod)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
@@ -319,13 +326,3 @@ else:
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)
-
-
-def _get_config_var_837(name):
- """
- In https://github.com/pypa/setuptools/pull/837, we discovered
- Python 3.3.0 exposes the extension suffix under the name 'SO'.
- """
- if sys.version_info < (3, 3, 1):
- name = 'SO'
- return get_config_var(name)
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index b0314fd..c3fdc09 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -7,20 +7,15 @@ import textwrap
import io
import distutils.errors
import itertools
+import stat
+from setuptools.extern.more_itertools import unique_everseen
-from setuptools.extern import six
-from setuptools.extern.six.moves import map, filter, filterfalse
-try:
- from setuptools.lib2to3_ex import Mixin2to3
-except ImportError:
+def make_writable(target):
+ os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
- class Mixin2to3:
- def run_2to3(self, files, doctests=True):
- "do nothing"
-
-class build_py(orig.build_py, Mixin2to3):
+class build_py(orig.build_py):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
@@ -33,12 +28,10 @@ class build_py(orig.build_py, Mixin2to3):
def finalize_options(self):
orig.build_py.finalize_options(self)
self.package_data = self.distribution.package_data
- self.exclude_package_data = (self.distribution.exclude_package_data or
- {})
+ self.exclude_package_data = self.distribution.exclude_package_data or {}
if 'data_files' in self.__dict__:
del self.__dict__['data_files']
self.__updated_files = []
- self.__doctests_2to3 = []
def run(self):
"""Build modules, packages, and copy data files to build directory"""
@@ -52,10 +45,6 @@ class build_py(orig.build_py, Mixin2to3):
self.build_packages()
self.build_package_data()
- self.run_2to3(self.__updated_files, False)
- self.run_2to3(self.__updated_files, True)
- self.run_2to3(self.__doctests_2to3, True)
-
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
@@ -68,11 +57,7 @@ class build_py(orig.build_py, Mixin2to3):
return orig.build_py.__getattr__(self, attr)
def build_module(self, module, module_file, package):
- if six.PY2 and isinstance(package, six.string_types):
- # avoid errors on Python 2 when unicode is passed (#190)
- package = package.split('.')
- outfile, copied = orig.build_py.build_module(self, module, module_file,
- package)
+ outfile, copied = orig.build_py.build_module(self, module, module_file, package)
if copied:
self.__updated_files.append(outfile)
return outfile, copied
@@ -82,6 +67,16 @@ class build_py(orig.build_py, Mixin2to3):
self.analyze_manifest()
return list(map(self._get_pkg_data_files, self.packages or ()))
+ def get_data_files_without_manifest(self):
+ """
+ Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
+ but without triggering any attempt to analyze or build the manifest.
+ """
+ # Prevent eventual errors from unset `manifest_files`
+ # (that would otherwise be set by `analyze_manifest`)
+ self.__dict__.setdefault('manifest_files', {})
+ return list(map(self._get_pkg_data_files, self.packages or ()))
+
def _get_pkg_data_files(self, package):
# Locate package source directory
src_dir = self.get_package_dir(package)
@@ -121,10 +116,8 @@ class build_py(orig.build_py, Mixin2to3):
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
+ make_writable(target)
srcfile = os.path.abspath(srcfile)
- if (copied and
- srcfile in self.distribution.convert_2to3_doctests):
- self.__doctests_2to3.append(outf)
def analyze_manifest(self):
self.manifest_files = mf = {}
@@ -201,20 +194,13 @@ class build_py(orig.build_py, Mixin2to3):
package,
src_dir,
)
- match_groups = (
- fnmatch.filter(files, pattern)
- for pattern in patterns
- )
+ match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
# flatten the groups of matches into an iterable of matches
matches = itertools.chain.from_iterable(match_groups)
bad = set(matches)
- keepers = (
- fn
- for fn in files
- if fn not in bad
- )
+ keepers = (fn for fn in files if fn not in bad)
# ditch dupes
- return list(_unique_everseen(keepers))
+ return list(unique_everseen(keepers))
@staticmethod
def _get_platform_patterns(spec, package, src_dir):
@@ -235,36 +221,22 @@ class build_py(orig.build_py, Mixin2to3):
)
-# from Python docs
-def _unique_everseen(iterable, key=None):
- "List unique elements, preserving order. Remember all elements ever seen."
- # unique_everseen('AAAABBBCCDAABBB') --> A B C D
- # unique_everseen('ABBCcAD', str.lower) --> A B C D
- seen = set()
- seen_add = seen.add
- if key is None:
- for element in filterfalse(seen.__contains__, iterable):
- seen_add(element)
- yield element
- else:
- for element in iterable:
- k = key(element)
- if k not in seen:
- seen_add(k)
- yield element
-
-
def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
- msg = textwrap.dedent("""
+ msg = (
+ textwrap.dedent(
+ """
Error: setup script specifies an absolute path:
%s
setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths.
- """).lstrip() % path
+ """
+ ).lstrip()
+ % path
+ )
raise DistutilsSetupError(msg)
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 959c932..24fb0a7 100755..100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -5,9 +5,7 @@ import os
import glob
import io
-from setuptools.extern import six
-
-from pkg_resources import Distribution, PathMetadata, normalize_path
+import pkg_resources
from setuptools.command.easy_install import easy_install
from setuptools import namespaces
import setuptools
@@ -63,9 +61,10 @@ class develop(namespaces.DevelopInstaller, easy_install):
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
- target = normalize_path(self.egg_base)
- egg_path = normalize_path(os.path.join(self.install_dir,
- self.egg_path))
+ target = pkg_resources.normalize_path(self.egg_base)
+ egg_path = pkg_resources.normalize_path(
+ os.path.join(self.install_dir, self.egg_path)
+ )
if egg_path != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
@@ -73,10 +72,10 @@ class develop(namespaces.DevelopInstaller, easy_install):
)
# Make a distribution for the package's source
- self.dist = Distribution(
+ self.dist = pkg_resources.Distribution(
target,
- PathMetadata(target, os.path.abspath(ei.egg_info)),
- project_name=ei.egg_name
+ pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
+ project_name=ei.egg_name,
)
self.setup_path = self._resolve_setup_path(
@@ -95,47 +94,25 @@ class develop(namespaces.DevelopInstaller, easy_install):
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
if path_to_setup != os.curdir:
path_to_setup = '../' * (path_to_setup.count('/') + 1)
- resolved = normalize_path(
+ resolved = pkg_resources.normalize_path(
os.path.join(install_dir, egg_path, path_to_setup)
)
- if resolved != normalize_path(os.curdir):
+ if resolved != pkg_resources.normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
- " installation directory", resolved, normalize_path(os.curdir))
+ " installation directory",
+ resolved,
+ pkg_resources.normalize_path(os.curdir),
+ )
return path_to_setup
def install_for_development(self):
- if six.PY3 and getattr(self.distribution, 'use_2to3', False):
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
-
- # Fixup egg-link and easy-install.pth
- ei_cmd = self.get_finalized_command("egg_info")
- self.egg_path = build_path
- self.dist.location = build_path
- # XXX
- self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
+ self.run_command('egg_info')
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
- self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None
@@ -157,8 +134,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
- if contents not in ([self.egg_path],
- [self.egg_path, self.setup_path]):
+ if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
@@ -192,12 +168,13 @@ class develop(namespaces.DevelopInstaller, easy_install):
return easy_install.install_wrapper_scripts(self, dist)
-class VersionlessRequirement(object):
+class VersionlessRequirement:
"""
Adapt a pkg_resources.Distribution to simply return the project
name as the 'requirement' so that scripts will work across
multiple versions.
+ >>> from pkg_resources import Distribution
>>> dist = Distribution(project_name='foo', version='1.0')
>>> str(dist.as_requirement())
'foo==1.0'
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 85ee40f..107850a 100755..100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""
Easy Install
------------
@@ -7,7 +6,7 @@ A tool for doing automatic download/extract/build of distutils-based Python
packages. For detailed documentation, see the accompanying EasyInstall.txt
file, or visit the `EasyInstall home page`__.
-__ https://setuptools.readthedocs.io/en/latest/easy_install.html
+__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
"""
@@ -18,10 +17,10 @@ from distutils.errors import (
DistutilsArgError, DistutilsOptionError,
DistutilsError, DistutilsPlatformError,
)
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
from distutils.spawn import find_executable
+from distutils.command import install
import sys
import os
import zipimport
@@ -39,14 +38,16 @@ import contextlib
import subprocess
import shlex
import io
+import configparser
+import sysconfig
-from setuptools.extern import six
-from setuptools.extern.six.moves import configparser, map
+
+from sysconfig import get_path
+
+from setuptools import SetuptoolsDeprecationWarning
from setuptools import Command
from setuptools.sandbox import run_setup
-from setuptools.py31compat import get_path, get_config_vars
-from setuptools.py27compat import rmtree_safe
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import (
@@ -55,19 +56,22 @@ from setuptools.package_index import (
from setuptools.command import bdist_egg, egg_info
from setuptools.wheel import Wheel
from pkg_resources import (
- yield_lines, normalize_path, resource_string, ensure_directory,
+ normalize_path, resource_string,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
-import pkg_resources.py31compat
+import pkg_resources
+from .._path import ensure_directory
+from ..extern.jaraco.text import yield_lines
+
# Turn on PEP440Warnings
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
__all__ = [
- 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
- 'main', 'get_exe_prefixes',
+ 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+ 'get_exe_prefixes',
]
@@ -75,47 +79,20 @@ def is_64bit():
return struct.calcsize("P") == 8
-def samefile(p1, p2):
- """
- Determine if two paths reference the same file.
-
- Augments os.path.samefile to work on Windows and
- suppresses errors if the path doesn't exist.
- """
- both_exist = os.path.exists(p1) and os.path.exists(p2)
- use_samefile = hasattr(os.path, 'samefile') and both_exist
- if use_samefile:
- return os.path.samefile(p1, p2)
- norm_p1 = os.path.normpath(os.path.normcase(p1))
- norm_p2 = os.path.normpath(os.path.normcase(p2))
- return norm_p1 == norm_p2
-
-
-if six.PY2:
-
- def _to_ascii(s):
- return s
+def _to_bytes(s):
+ return s.encode('utf8')
- def isascii(s):
- try:
- six.text_type(s, 'ascii')
- return True
- except UnicodeError:
- return False
-else:
- def _to_ascii(s):
- return s.encode('ascii')
-
- def isascii(s):
- try:
- s.encode('ascii')
- return True
- except UnicodeError:
- return False
+def isascii(s):
+ try:
+ s.encode('ascii')
+ return True
+ except UnicodeError:
+ return False
-_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
+def _one_liner(text):
+ return textwrap.dedent(text).strip().replace('\n', '; ')
class easy_install(Command):
@@ -150,23 +127,26 @@ class easy_install(Command):
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
- "Don't load find-links defined in packages being installed")
+ "Don't load find-links defined in packages being installed"),
+ ('user', None, "install in user site-package '%s'" % site.USER_SITE)
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
- 'no-deps', 'local-snapshots-ok', 'version'
+ 'no-deps', 'local-snapshots-ok', 'version',
+ 'user'
]
- if site.ENABLE_USER_SITE:
- help_msg = "install in user site-package '%s'" % site.USER_SITE
- user_options.append(('user', None, help_msg))
- boolean_options.append('user')
-
negative_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
+ warnings.warn(
+ "easy_install command is deprecated. "
+ "Use build and pip and other standards-based tools.",
+ EasyInstallDeprecationWarning,
+ )
+
# the --user option seems to be an opt-in one,
# so the default should be False.
self.user = 0
@@ -202,7 +182,6 @@ class easy_install(Command):
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
- self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
@@ -235,37 +214,52 @@ class easy_install(Command):
"""
Render the Setuptools version and installation details, then exit.
"""
- ver = sys.version[:3]
+ ver = '{}.{}'.format(*sys.version_info)
dist = get_distribution('setuptools')
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
print(tmpl.format(**locals()))
raise SystemExit()
- def finalize_options(self):
+ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME
self.version and self._render_version()
py_version = sys.version.split()[0]
- prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
- self.config_vars = {
+ self.config_vars = dict(sysconfig.get_config_vars())
+
+ self.config_vars.update({
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
- 'py_version_short': py_version[0:3],
- 'py_version_nodot': py_version[0] + py_version[2],
- 'sys_prefix': prefix,
- 'prefix': prefix,
- 'sys_exec_prefix': exec_prefix,
- 'exec_prefix': exec_prefix,
+ 'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
+ 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
+ 'sys_prefix': self.config_vars['prefix'],
+ 'sys_exec_prefix': self.config_vars['exec_prefix'],
# Only python 3.2+ has abiflags
'abiflags': getattr(sys, 'abiflags', ''),
- }
+ 'platlibdir': getattr(sys, 'platlibdir', 'lib'),
+ })
+ with contextlib.suppress(AttributeError):
+ # only for distutils outside stdlib
+ self.config_vars.update({
+ 'implementation_lower': install._get_implementation().lower(),
+ 'implementation': install._get_implementation(),
+ })
+
+ # pypa/distutils#113 Python 3.9 compat
+ self.config_vars.setdefault(
+ 'py_version_nodot_plat',
+ getattr(sys, 'windir', '').replace('.', ''),
+ )
if site.ENABLE_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
+ elif self.user:
+ log.warn("WARNING: The user site-packages directory is disabled.")
+
self._fix_install_dir_for_user_site()
self.expand_basedirs()
@@ -299,24 +293,9 @@ class easy_install(Command):
self.script_dir = self.install_scripts
# default --record from the install command
self.set_undefined_options('install', ('record', 'record'))
- # Should this be moved to the if statement below? It's not used
- # elsewhere
- normpath = map(normalize_path, sys.path)
self.all_site_dirs = get_site_dirs()
- if self.site_dirs is not None:
- site_dirs = [
- os.path.expanduser(s.strip()) for s in
- self.site_dirs.split(',')
- ]
- for d in site_dirs:
- if not os.path.isdir(d):
- log.warn("%s (in --site-dirs) does not exist", d)
- elif normalize_path(d) not in normpath:
- raise DistutilsOptionError(
- d + " (in --site-dirs) is not on sys.path"
- )
- else:
- self.all_site_dirs.append(normalize_path(d))
+ self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs))
+
if not self.editable:
self.check_site_dir()
self.index_url = self.index_url or "https://pypi.org/simple/"
@@ -336,7 +315,7 @@ class easy_install(Command):
self.local_index = Environment(self.shadow_path + sys.path)
if self.find_links is not None:
- if isinstance(self.find_links, six.string_types):
+ if isinstance(self.find_links, str):
self.find_links = self.find_links.split()
else:
self.find_links = []
@@ -345,13 +324,7 @@ class easy_install(Command):
if not self.no_find_links:
self.package_index.add_find_links(self.find_links)
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
- if not isinstance(self.optimize, int):
- try:
- self.optimize = int(self.optimize)
- if not (0 <= self.optimize <= 2):
- raise ValueError
- except ValueError:
- raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+ self.optimize = self._validate_optimize(self.optimize)
if self.editable and not self.build_directory:
raise DistutilsArgError(
@@ -363,6 +336,39 @@ class easy_install(Command):
self.outputs = []
+ @staticmethod
+ def _process_site_dirs(site_dirs):
+ if site_dirs is None:
+ return
+
+ normpath = map(normalize_path, sys.path)
+ site_dirs = [
+ os.path.expanduser(s.strip()) for s in
+ site_dirs.split(',')
+ ]
+ for d in site_dirs:
+ if not os.path.isdir(d):
+ log.warn("%s (in --site-dirs) does not exist", d)
+ elif normalize_path(d) not in normpath:
+ raise DistutilsOptionError(
+ d + " (in --site-dirs) is not on sys.path"
+ )
+ else:
+ yield normalize_path(d)
+
+ @staticmethod
+ def _validate_optimize(value):
+ try:
+ value = int(value)
+ if value not in range(3):
+ raise ValueError
+ except ValueError as e:
+ raise DistutilsOptionError(
+ "--optimize must be 0, 1, or 2"
+ ) from e
+
+ return value
+
def _fix_install_dir_for_user_site(self):
"""
Fix the install_dir if "--user" was used.
@@ -375,7 +381,7 @@ class easy_install(Command):
msg = "User base directory is not specified"
raise DistutilsPlatformError(msg)
self.install_base = self.install_platbase = self.install_userbase
- scheme_name = os.name.replace('posix', 'unix') + '_user'
+ scheme_name = f'{os.name}_user'
self.select_scheme(scheme_name)
def _expand_attrs(self, attrs):
@@ -404,7 +410,13 @@ class easy_install(Command):
]
self._expand_attrs(dirs)
- def run(self):
+ def run(self, show_deprecation=True):
+ if show_deprecation:
+ self.announce(
+ "WARNING: The easy_install command is deprecated "
+ "and will be removed in a future version.",
+ log.WARN,
+ )
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
try:
@@ -441,12 +453,18 @@ class easy_install(Command):
def warn_deprecated_options(self):
pass
- def check_site_dir(self):
+ def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
pth_file = os.path.join(instdir, 'easy-install.pth')
+ if not os.path.exists(instdir):
+ try:
+ os.makedirs(instdir)
+ except (OSError, IOError):
+ self.cant_write_to_target()
+
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
is_site_dir = instdir in self.all_site_dirs
@@ -466,8 +484,9 @@ class easy_install(Command):
self.cant_write_to_target()
if not is_site_dir and not self.multi_version:
- # Can't install non-multi to non-site dir
- raise DistutilsError(self.no_default_version_msg())
+ # Can't install non-multi to non-site dir with easy_install
+ pythonpath = os.environ.get('PYTHONPATH', '')
+ log.warn(self.__no_default_msg, self.install_dir, pythonpath)
if is_site_dir:
if self.pth_file is None:
@@ -475,12 +494,8 @@ class easy_install(Command):
else:
self.pth_file = None
- if instdir not in map(normalize_path, _pythonpath()):
- # only PYTHONPATH dirs need a site.py, so pretend it's there
- self.sitepy_installed = True
- elif self.multi_version and not os.path.exists(pth_file):
- self.sitepy_installed = True # don't need site.py in this case
- self.pth_file = None # and don't create a .pth file
+ if self.multi_version and not os.path.exists(pth_file):
+ self.pth_file = None # don't create a .pth file
self.install_dir = instdir
__cant_write_msg = textwrap.dedent("""
@@ -495,13 +510,13 @@ class easy_install(Command):
the distutils default setting) was:
%s
- """).lstrip()
+ """).lstrip() # noqa
__not_exists_id = textwrap.dedent("""
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
- """).lstrip()
+ """).lstrip() # noqa
__access_msg = textwrap.dedent("""
Perhaps your account does not have write access to this directory? If the
@@ -514,10 +529,10 @@ class easy_install(Command):
For information on other options, you may wish to consult the
documentation at:
- https://setuptools.readthedocs.io/en/latest/easy_install.html
+ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
Please make the appropriate changes for your system and try again.
- """).lstrip()
+ """).lstrip() # noqa
def cant_write_to_target(self):
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
@@ -545,7 +560,7 @@ class easy_install(Command):
if ok_exists:
os.unlink(ok_file)
dirname = os.path.dirname(ok_file)
- pkg_resources.py31compat.makedirs(dirname, exist_ok=True)
+ os.makedirs(dirname, exist_ok=True)
f = open(pth_file, 'w')
except (OSError, IOError):
self.cant_write_to_target()
@@ -629,17 +644,14 @@ class easy_install(Command):
@contextlib.contextmanager
def _tmpdir(self):
- tmpdir = tempfile.mkdtemp(prefix=six.u("easy_install-"))
+ tmpdir = tempfile.mkdtemp(prefix=u"easy_install-")
try:
# cast to str as workaround for #709 and #710 and #712
yield str(tmpdir)
finally:
- os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir))
+ os.path.exists(tmpdir) and rmtree(tmpdir)
def easy_install(self, spec, deps=False):
- if not self.editable:
- self.install_site_py()
-
with self._tmpdir() as tmpdir:
if not isinstance(spec, Requirement):
if URL_SCHEME(spec):
@@ -709,15 +721,16 @@ class easy_install(Command):
return dist
def select_scheme(self, name):
- """Sets the install directories by applying the install schemes."""
- # it's the caller's problem if they supply a bad name!
- scheme = INSTALL_SCHEMES[name]
- for key in SCHEME_KEYS:
- attrname = 'install_' + key
- if getattr(self, attrname) is None:
- setattr(self, attrname, scheme[key])
-
- def process_distribution(self, requirement, dist, deps=True, *info):
+ try:
+ install._select_scheme(self, name)
+ except AttributeError:
+ # stdlib distutils
+ install.install.select_scheme(self, name.replace('posix', 'unix'))
+
+ # FIXME: 'easy_install.process_distribution' is too complex (12)
+ def process_distribution( # noqa: C901
+ self, requirement, dist, deps=True, *info,
+ ):
self.update_pth(dist)
self.package_index.add(dist)
if dist in self.local_index[dist.key]:
@@ -746,9 +759,9 @@ class easy_install(Command):
[requirement], self.local_index, self.easy_install
)
except DistributionNotFound as e:
- raise DistutilsError(str(e))
+ raise DistutilsError(str(e)) from e
except VersionConflict as e:
- raise DistutilsError(e.report())
+ raise DistutilsError(e.report()) from e
if self.always_copy or self.always_copy_from:
# Force all the relevant distros to be copied or activated
for dist in distros:
@@ -802,7 +815,7 @@ class easy_install(Command):
if is_script:
body = self._load_template(dev_path) % locals()
script_text = ScriptWriter.get_header(script_text) + body
- self.write_script(script_name, _to_ascii(script_text), 'b')
+ self.write_script(script_name, _to_bytes(script_text), 'b')
@staticmethod
def _load_template(dev_path):
@@ -841,12 +854,19 @@ class easy_install(Command):
def install_eggs(self, spec, dist_filename, tmpdir):
# .egg dirs or files are already built, so just return them
- if dist_filename.lower().endswith('.egg'):
- return [self.install_egg(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.exe'):
- return [self.install_exe(dist_filename, tmpdir)]
- elif dist_filename.lower().endswith('.whl'):
- return [self.install_wheel(dist_filename, tmpdir)]
+ installer_map = {
+ '.egg': self.install_egg,
+ '.exe': self.install_exe,
+ '.whl': self.install_wheel,
+ }
+ try:
+ install_dist = installer_map[
+ dist_filename.lower()[-4:]
+ ]
+ except KeyError:
+ pass
+ else:
+ return [install_dist(dist_filename, tmpdir)]
# Anything else, try to extract and build
setup_base = tmpdir
@@ -891,7 +911,8 @@ class easy_install(Command):
metadata = EggMetadata(zipimport.zipimporter(egg_path))
return Distribution.from_filename(egg_path, metadata=metadata)
- def install_egg(self, egg_path, tmpdir):
+ # FIXME: 'easy_install.install_egg' is too complex (11)
+ def install_egg(self, egg_path, tmpdir): # noqa: C901
destination = os.path.join(
self.install_dir,
os.path.basename(egg_path),
@@ -901,7 +922,9 @@ class easy_install(Command):
ensure_directory(destination)
dist = self.egg_distribution(egg_path)
- if not samefile(egg_path, destination):
+ if not (
+ os.path.exists(destination) and os.path.samefile(egg_path, destination)
+ ):
if os.path.isdir(destination) and not os.path.islink(destination):
dir_util.remove_tree(destination, dry_run=self.dry_run)
elif os.path.exists(destination):
@@ -990,7 +1013,8 @@ class easy_install(Command):
# install the .egg
return self.install_egg(egg_path, tmpdir)
- def exe_to_egg(self, dist_filename, egg_tmp):
+ # FIXME: 'easy_install.exe_to_egg' is too complex (12)
+ def exe_to_egg(self, dist_filename, egg_tmp): # noqa: C901
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes = get_exe_prefixes(dist_filename)
@@ -1081,13 +1105,13 @@ class easy_install(Command):
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
- """).lstrip()
+ """).lstrip() # noqa
__id_warning = textwrap.dedent("""
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
- """)
+ """) # noqa
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
@@ -1112,7 +1136,7 @@ class easy_install(Command):
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
- """).lstrip()
+ """).lstrip() # noqa
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
@@ -1137,7 +1161,9 @@ class easy_install(Command):
try:
run_setup(setup_script, args)
except SystemExit as v:
- raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+ raise DistutilsError(
+ "Setup script exited with %s" % (v.args[0],)
+ ) from v
def build_and_install(self, setup_script, setup_base):
args = ['bdist_egg', '--dist-dir']
@@ -1174,29 +1200,30 @@ class easy_install(Command):
# to the setup.cfg file.
ei_opts = self.distribution.get_option_dict('easy_install').copy()
fetch_directives = (
- 'find_links', 'site_dirs', 'index_url', 'optimize',
- 'site_dirs', 'allow_hosts',
+ 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts',
)
fetch_options = {}
for key, val in ei_opts.items():
if key not in fetch_directives:
continue
- fetch_options[key.replace('_', '-')] = val[1]
+ fetch_options[key] = val[1]
# create a settings dictionary suitable for `edit_config`
settings = dict(easy_install=fetch_options)
cfg_filename = os.path.join(base, 'setup.cfg')
setopt.edit_config(cfg_filename, settings)
- def update_pth(self, dist):
+ def update_pth(self, dist): # noqa: C901 # is too complex (11) # FIXME
if self.pth_file is None:
return
for d in self.pth_file[dist.key]: # drop old entries
- if self.multi_version or d.location != dist.location:
- log.info("Removing %s from easy-install.pth file", d)
- self.pth_file.remove(d)
- if d.location in self.shadow_path:
- self.shadow_path.remove(d.location)
+ if not self.multi_version and d.location == dist.location:
+ continue
+
+ log.info("Removing %s from easy-install.pth file", d)
+ self.pth_file.remove(d)
+ if d.location in self.shadow_path:
+ self.shadow_path.remove(d.location)
if not self.multi_version:
if dist.location in self.pth_file.paths:
@@ -1210,19 +1237,21 @@ class easy_install(Command):
if dist.location not in self.shadow_path:
self.shadow_path.append(dist.location)
- if not self.dry_run:
+ if self.dry_run:
+ return
- self.pth_file.save()
+ self.pth_file.save()
- if dist.key == 'setuptools':
- # Ensure that setuptools itself never becomes unavailable!
- # XXX should this check for latest version?
- filename = os.path.join(self.install_dir, 'setuptools.pth')
- if os.path.islink(filename):
- os.unlink(filename)
- f = open(filename, 'wt')
- f.write(self.pth_file.make_relative(dist.location) + '\n')
- f.close()
+ if dist.key != 'setuptools':
+ return
+
+ # Ensure that setuptools itself never becomes unavailable!
+ # XXX should this check for latest version?
+ filename = os.path.join(self.install_dir, 'setuptools.pth')
+ if os.path.islink(filename):
+ os.unlink(filename)
+ with open(filename, 'wt') as f:
+ f.write(self.pth_file.make_relative(dist.location) + '\n')
def unpack_progress(self, src, dst):
# Progress filter for unpacking
@@ -1293,53 +1322,18 @@ class easy_install(Command):
* You can set up the installation directory to support ".pth" files by
using one of the approaches described here:
- https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations
+ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations
- Please make the appropriate changes for your system and try again.""").lstrip()
-
- def no_default_version_msg(self):
- template = self.__no_default_msg
- return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
-
- def install_site_py(self):
- """Make sure there's a site.py in the target dir, if needed"""
-
- if self.sitepy_installed:
- return # already did it, or don't need to
-
- sitepy = os.path.join(self.install_dir, "site.py")
- source = resource_string("setuptools", "site-patch.py")
- source = source.decode('utf-8')
- current = ""
-
- if os.path.exists(sitepy):
- log.debug("Checking existing site.py in %s", self.install_dir)
- with io.open(sitepy) as strm:
- current = strm.read()
-
- if not current.startswith('def __boot():'):
- raise DistutilsError(
- "%s is not a setuptools-generated site.py; please"
- " remove it." % sitepy
- )
-
- if current != source:
- log.info("Creating %s", sitepy)
- if not self.dry_run:
- ensure_directory(sitepy)
- with io.open(sitepy, 'w', encoding='utf-8') as strm:
- strm.write(source)
- self.byte_compile([sitepy])
-
- self.sitepy_installed = True
+ Please make the appropriate changes for your system and try again.
+ """).strip()
def create_home_path(self):
"""Create directories under ~."""
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
- for name, path in six.iteritems(self.config_vars):
+ for path in only_strs(self.config_vars.values()):
if path.startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
@@ -1361,7 +1355,7 @@ class easy_install(Command):
if self.prefix:
# Set default install_dir/scripts from --prefix
- config_vars = config_vars.copy()
+ config_vars = dict(config_vars)
config_vars['base'] = self.prefix
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
for attr, val in scheme.items():
@@ -1398,58 +1392,63 @@ def get_site_dirs():
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
- if prefix:
- if sys.platform in ('os2emx', 'riscos'):
- sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
- elif os.sep == '/':
- sitedirs.extend([
- os.path.join(
- prefix,
- "lib",
- "python" + sys.version[:3],
- "site-packages",
- ),
- os.path.join(prefix, "lib", "site-python"),
- ])
- else:
- sitedirs.extend([
+ if not prefix:
+ continue
+
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif os.sep == '/':
+ sitedirs.extend([
+ os.path.join(
prefix,
- os.path.join(prefix, "lib", "site-packages"),
- ])
- if sys.platform == 'darwin':
- # for framework builds *only* we add the standard Apple
- # locations. Currently only per-user, but /Library and
- # /Network/Library could be added too
- if 'Python.framework' in prefix:
- home = os.environ.get('HOME')
- if home:
- home_sp = os.path.join(
- home,
- 'Library',
- 'Python',
- sys.version[:3],
- 'site-packages',
- )
- sitedirs.append(home_sp)
+ "lib",
+ "python{}.{}".format(*sys.version_info),
+ "site-packages",
+ ),
+ os.path.join(prefix, "lib", "site-python"),
+ ])
+ else:
+ sitedirs.extend([
+ prefix,
+ os.path.join(prefix, "lib", "site-packages"),
+ ])
+ if sys.platform != 'darwin':
+ continue
+
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' not in prefix:
+ continue
+
+ home = os.environ.get('HOME')
+ if not home:
+ continue
+
+ home_sp = os.path.join(
+ home,
+ 'Library',
+ 'Python',
+ '{}.{}'.format(*sys.version_info),
+ 'site-packages',
+ )
+ sitedirs.append(home_sp)
lib_paths = get_path('purelib'), get_path('platlib')
- for site_lib in lib_paths:
- if site_lib not in sitedirs:
- sitedirs.append(site_lib)
+
+ sitedirs.extend(s for s in lib_paths if s not in sitedirs)
if site.ENABLE_USER_SITE:
sitedirs.append(site.USER_SITE)
- try:
+ with contextlib.suppress(AttributeError):
sitedirs.extend(site.getsitepackages())
- except AttributeError:
- pass
sitedirs = list(map(normalize_path, sitedirs))
return sitedirs
-def expand_paths(inputs):
+def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME
"""Yield sys.path directories that might contain "old-style" packages"""
seen = {}
@@ -1481,13 +1480,18 @@ def expand_paths(inputs):
# Yield existing non-dupe, non-import directory lines from it
for line in lines:
- if not line.startswith("import"):
- line = normalize_path(line.rstrip())
- if line not in seen:
- seen[line] = 1
- if not os.path.isdir(line):
- continue
- yield line, os.listdir(line)
+ if line.startswith("import"):
+ continue
+
+ line = normalize_path(line.rstrip())
+ if line in seen:
+ continue
+
+ seen[line] = 1
+ if not os.path.isdir(line):
+ continue
+
+ yield line, os.listdir(line)
def extract_wininst_cfg(dist_filename):
@@ -1520,7 +1524,7 @@ def extract_wininst_cfg(dist_filename):
# Now the config is in bytes, but for RawConfigParser, it should
# be text, so decode it.
config = config.decode(sys.getfilesystemencoding())
- cfg.readfp(six.StringIO(config))
+ cfg.read_file(io.StringIO(config))
except configparser.Error:
return None
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
@@ -1555,9 +1559,7 @@ def get_exe_prefixes(exe_filename):
if name.endswith('-nspkg.pth'):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
- contents = z.read(name)
- if six.PY3:
- contents = contents.decode()
+ contents = z.read(name).decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
if not pth.startswith('import'):
@@ -1580,7 +1582,7 @@ class PthDistributions(Environment):
self.sitedirs = list(map(normalize_path, sitedirs))
self.basedir = normalize_path(os.path.dirname(self.filename))
self._load()
- Environment.__init__(self, [], None, None)
+ super().__init__([], None, None)
for path in yield_lines(self.paths):
list(map(self.add, find_distributions(path, True)))
@@ -1653,14 +1655,14 @@ class PthDistributions(Environment):
if new_path:
self.paths.append(dist.location)
self.dirty = True
- Environment.add(self, dist)
+ super().add(dist)
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
- Environment.remove(self, dist)
+ super().remove(dist)
def make_relative(self, path):
npath, last = os.path.split(normalize_path(path))
@@ -1721,7 +1723,8 @@ def auto_chmod(func, arg, exc):
chmod(arg, stat.S_IWRITE)
return func(arg)
et, ev, _ = sys.exc_info()
- six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
+ # TODO: This code doesn't make sense. What is it trying to do?
+ raise (ev[0], ev[1] + (" %s %s" % (func, arg)))
def update_dist_caches(dist_path, fix_zipimporter_caches):
@@ -2049,7 +2052,7 @@ class WindowsCommandSpec(CommandSpec):
split_args = dict(posix=False)
-class ScriptWriter(object):
+class ScriptWriter:
"""
Encapsulates behavior around writing entry point scripts for console and
gui apps.
@@ -2057,24 +2060,45 @@ class ScriptWriter(object):
template = textwrap.dedent(r"""
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
- __requires__ = %(spec)r
import re
import sys
- from pkg_resources import load_entry_point
+
+ # for compatibility with easy_install; see #2198
+ __requires__ = %(spec)r
+
+ try:
+ from importlib.metadata import distribution
+ except ImportError:
+ try:
+ from importlib_metadata import distribution
+ except ImportError:
+ from pkg_resources import load_entry_point
+
+
+ def importlib_load_entry_point(spec, group, name):
+ dist_name, _, _ = spec.partition('==')
+ matches = (
+ entry_point
+ for entry_point in distribution(dist_name).entry_points
+ if entry_point.group == group and entry_point.name == name
+ )
+ return next(matches).load()
+
+
+ globals().setdefault('load_entry_point', importlib_load_entry_point)
+
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
- sys.exit(
- load_entry_point(%(spec)r, %(group)r, %(name)r)()
- )
- """).lstrip()
+ sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
+ """).lstrip()
command_spec_class = CommandSpec
@classmethod
def get_script_args(cls, dist, executable=None, wininst=False):
# for backward compatibility
- warnings.warn("Use get_args", DeprecationWarning)
+ warnings.warn("Use get_args", EasyInstallDeprecationWarning)
writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
header = cls.get_script_header("", executable, wininst)
return writer.get_args(dist, header)
@@ -2082,12 +2106,11 @@ class ScriptWriter(object):
@classmethod
def get_script_header(cls, script_text, executable=None, wininst=False):
# for backward compatibility
- warnings.warn("Use get_header", DeprecationWarning)
+ warnings.warn(
+ "Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
if wininst:
executable = "python.exe"
- cmd = cls.command_spec_class.best().from_param(executable)
- cmd.install_options(script_text)
- return cmd.as_header()
+ return cls.get_header(script_text, executable)
@classmethod
def get_args(cls, dist, header=None):
@@ -2119,7 +2142,7 @@ class ScriptWriter(object):
@classmethod
def get_writer(cls, force_windows):
# for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
return WindowsScriptWriter.best() if force_windows else cls.best()
@classmethod
@@ -2151,7 +2174,7 @@ class WindowsScriptWriter(ScriptWriter):
@classmethod
def get_writer(cls):
# for backward compatibility
- warnings.warn("Use best", DeprecationWarning)
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
return cls.best()
@classmethod
@@ -2186,7 +2209,7 @@ class WindowsScriptWriter(ScriptWriter):
@classmethod
def _adjust_header(cls, type_, orig_header):
"""
- Make sure 'pythonw' is used for gui and and 'python' is used for
+ Make sure 'pythonw' is used for gui and 'python' is used for
console (regardless of what sys.executable is).
"""
pattern = 'pythonw.exe'
@@ -2256,7 +2279,10 @@ def get_win_launcher(type):
"""
launcher_fn = '%s.exe' % type
if is_64bit():
- launcher_fn = launcher_fn.replace(".", "-64.")
+ if get_platform() == "win-arm64":
+ launcher_fn = launcher_fn.replace(".", "-arm64.")
+ else:
+ launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
@@ -2264,10 +2290,7 @@ def get_win_launcher(type):
def load_launcher_manifest(name):
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
- if six.PY2:
- return manifest % vars()
- else:
- return manifest.decode('utf-8') % vars()
+ return manifest.decode('utf-8') % vars()
def rmtree(path, ignore_errors=False, onerror=auto_chmod):
@@ -2280,55 +2303,14 @@ def current_umask():
return tmp
-def bootstrap():
- # This function is called when setuptools*.egg is run using /bin/sh
- import setuptools
-
- argv0 = os.path.dirname(setuptools.__path__[0])
- sys.argv[0] = argv0
- sys.argv.append(argv0)
- main()
-
-
-def main(argv=None, **kw):
- from setuptools import setup
- from setuptools.dist import Distribution
-
- class DistributionWithoutHelpCommands(Distribution):
- common_usage = ""
-
- def _show_help(self, *args, **kw):
- with _patch_usage():
- Distribution._show_help(self, *args, **kw)
-
- if argv is None:
- argv = sys.argv[1:]
-
- with _patch_usage():
- setup(
- script_args=['-q', 'easy_install', '-v'] + argv,
- script_name=sys.argv[0] or 'easy_install',
- distclass=DistributionWithoutHelpCommands,
- **kw
- )
-
-
-@contextlib.contextmanager
-def _patch_usage():
- import distutils.core
- USAGE = textwrap.dedent("""
- usage: %(script)s [options] requirement_or_url ...
- or: %(script)s --help
- """).lstrip()
+def only_strs(values):
+ """
+ Exclude non-str values. Ref #3063.
+ """
+ return filter(lambda val: isinstance(val, str), values)
- def gen_usage(script_name):
- return USAGE % dict(
- script=os.path.basename(script_name),
- )
- saved = distutils.core.gen_usage
- distutils.core.gen_usage = gen_usage
- try:
- yield
- finally:
- distutils.core.gen_usage = saved
+class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
+ """
+ Warning for EasyInstall deprecations, bypassing suppression.
+ """
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index f3e604d..6338965 100755..100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -8,6 +8,7 @@ from distutils.util import convert_path
from distutils import log
import distutils.errors
import distutils.filelist
+import functools
import os
import re
import sys
@@ -16,8 +17,8 @@ import warnings
import time
import collections
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
+from .._importlib import metadata
+from .. import _entry_points
from setuptools import Command
from setuptools.command.sdist import sdist
@@ -25,15 +26,17 @@ from setuptools.command.sdist import walk_revctrl
from setuptools.command.setopt import edit_config
from setuptools.command import bdist_egg
from pkg_resources import (
- parse_requirements, safe_name, parse_version,
- safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
+ Requirement, safe_name, parse_version,
+ safe_version, to_filename)
import setuptools.unicode_utils as unicode_utils
from setuptools.glob import glob
from setuptools.extern import packaging
+from setuptools.extern.jaraco.text import yield_lines
+from setuptools import SetuptoolsDeprecationWarning
-def translate_pattern(glob):
+def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
"""
Translate a file path glob like '*.txt' in to a regular expression.
This differs from fnmatch.translate which allows wildcards to match
@@ -113,10 +116,41 @@ def translate_pattern(glob):
pat += sep
pat += r'\Z'
- return re.compile(pat, flags=re.MULTILINE|re.DOTALL)
+ return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
-class egg_info(Command):
+class InfoCommon:
+ tag_build = None
+ tag_date = None
+
+ @property
+ def name(self):
+ return safe_name(self.distribution.get_name())
+
+ def tagged_version(self):
+ return safe_version(self._maybe_tag(self.distribution.get_version()))
+
+ def _maybe_tag(self, version):
+ """
+ egg_info may be called more than once for a distribution,
+ in which case the version string already contains all tags.
+ """
+ return (
+ version if self.vtags and version.endswith(self.vtags)
+ else version + self.vtags
+ )
+
+ def tags(self):
+ version = ''
+ if self.tag_build:
+ version += self.tag_build
+ if self.tag_date:
+ version += time.strftime("-%Y%m%d")
+ return version
+ vtags = property(tags)
+
+
+class egg_info(InfoCommon, Command):
description = "create a distribution's .egg-info directory"
user_options = [
@@ -133,14 +167,11 @@ class egg_info(Command):
}
def initialize_options(self):
- self.egg_name = None
- self.egg_version = None
self.egg_base = None
+ self.egg_name = None
self.egg_info = None
- self.tag_build = None
- self.tag_date = 0
+ self.egg_version = None
self.broken_egg_info = False
- self.vtags = None
####################################
# allow the 'tag_svn_revision' to be detected and
@@ -168,25 +199,23 @@ class egg_info(Command):
edit_config(filename, dict(egg_info=egg_info))
def finalize_options(self):
- self.egg_name = safe_name(self.distribution.get_name())
- self.vtags = self.tags()
+ # Note: we need to capture the current value returned
+ # by `self.tagged_version()`, so we can later update
+ # `self.distribution.metadata.version` without
+ # repercussions.
+ self.egg_name = self.name
self.egg_version = self.tagged_version()
-
parsed_version = parse_version(self.egg_version)
try:
is_version = isinstance(parsed_version, packaging.version.Version)
- spec = (
- "%s==%s" if is_version else "%s===%s"
- )
- list(
- parse_requirements(spec % (self.egg_name, self.egg_version))
- )
- except ValueError:
+ spec = "%s==%s" if is_version else "%s===%s"
+ Requirement(spec % (self.egg_name, self.egg_version))
+ except ValueError as e:
raise distutils.errors.DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" %
(self.egg_name, self.egg_version)
- )
+ ) from e
if self.egg_base is None:
dirs = self.distribution.package_dir
@@ -241,8 +270,7 @@ class egg_info(Command):
to the file.
"""
log.info("writing %s to %s", what, filename)
- if six.PY3:
- data = data.encode("utf-8")
+ data = data.encode("utf-8")
if not self.dry_run:
f = open(filename, 'wb')
f.write(data)
@@ -254,20 +282,12 @@ class egg_info(Command):
if not self.dry_run:
os.unlink(filename)
- def tagged_version(self):
- version = self.distribution.get_version()
- # egg_info may be called more than once for a distribution,
- # in which case the version string already contains all tags.
- if self.vtags and version.endswith(self.vtags):
- return safe_version(version)
- return safe_version(version + self.vtags)
-
def run(self):
self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in iter_entry_points('egg_info.writers'):
- ep.require(installer=installer)
- writer = ep.resolve()
+ os.utime(self.egg_info, None)
+ for ep in metadata.entry_points(group='egg_info.writers'):
+ self.distribution._install_dependencies(ep)
+ writer = ep.load()
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
# Get rid of native_libs.txt if it was put there by older bdist_egg
@@ -277,14 +297,6 @@ class egg_info(Command):
self.find_sources()
- def tags(self):
- version = ''
- if self.tag_build:
- version += self.tag_build
- if self.tag_date:
- version += time.strftime("-%Y%m%d")
- return version
-
def find_sources(self):
"""Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
@@ -320,70 +332,74 @@ class FileList(_FileList):
# patterns, (dir and patterns), or (dir_pattern).
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+ action_map = {
+ 'include': self.include,
+ 'exclude': self.exclude,
+ 'global-include': self.global_include,
+ 'global-exclude': self.global_exclude,
+ 'recursive-include': functools.partial(
+ self.recursive_include, dir,
+ ),
+ 'recursive-exclude': functools.partial(
+ self.recursive_exclude, dir,
+ ),
+ 'graft': self.graft,
+ 'prune': self.prune,
+ }
+ log_map = {
+ 'include': "warning: no files found matching '%s'",
+ 'exclude': (
+ "warning: no previously-included files found "
+ "matching '%s'"
+ ),
+ 'global-include': (
+ "warning: no files found matching '%s' "
+ "anywhere in distribution"
+ ),
+ 'global-exclude': (
+ "warning: no previously-included files matching "
+ "'%s' found anywhere in distribution"
+ ),
+ 'recursive-include': (
+ "warning: no files found matching '%s' "
+ "under directory '%s'"
+ ),
+ 'recursive-exclude': (
+ "warning: no previously-included files matching "
+ "'%s' found under directory '%s'"
+ ),
+ 'graft': "warning: no directories found matching '%s'",
+ 'prune': "no previously-included directories found matching '%s'",
+ }
+
+ try:
+ process_action = action_map[action]
+ except KeyError:
+ raise DistutilsInternalError(
+ "this cannot happen: invalid action '{action!s}'".
+ format(action=action),
+ )
+
# OK, now we know that the action is valid and we have the
# right number of words on the line for that action -- so we
# can proceed with minimal error-checking.
- if action == 'include':
- self.debug_print("include " + ' '.join(patterns))
- for pattern in patterns:
- if not self.include(pattern):
- log.warn("warning: no files found matching '%s'", pattern)
-
- elif action == 'exclude':
- self.debug_print("exclude " + ' '.join(patterns))
- for pattern in patterns:
- if not self.exclude(pattern):
- log.warn(("warning: no previously-included files "
- "found matching '%s'"), pattern)
-
- elif action == 'global-include':
- self.debug_print("global-include " + ' '.join(patterns))
- for pattern in patterns:
- if not self.global_include(pattern):
- log.warn(("warning: no files found matching '%s' "
- "anywhere in distribution"), pattern)
-
- elif action == 'global-exclude':
- self.debug_print("global-exclude " + ' '.join(patterns))
- for pattern in patterns:
- if not self.global_exclude(pattern):
- log.warn(("warning: no previously-included files matching "
- "'%s' found anywhere in distribution"),
- pattern)
-
- elif action == 'recursive-include':
- self.debug_print("recursive-include %s %s" %
- (dir, ' '.join(patterns)))
- for pattern in patterns:
- if not self.recursive_include(dir, pattern):
- log.warn(("warning: no files found matching '%s' "
- "under directory '%s'"),
- pattern, dir)
-
- elif action == 'recursive-exclude':
- self.debug_print("recursive-exclude %s %s" %
- (dir, ' '.join(patterns)))
- for pattern in patterns:
- if not self.recursive_exclude(dir, pattern):
- log.warn(("warning: no previously-included files matching "
- "'%s' found under directory '%s'"),
- pattern, dir)
-
- elif action == 'graft':
- self.debug_print("graft " + dir_pattern)
- if not self.graft(dir_pattern):
- log.warn("warning: no directories found matching '%s'",
- dir_pattern)
-
- elif action == 'prune':
- self.debug_print("prune " + dir_pattern)
- if not self.prune(dir_pattern):
- log.warn(("no previously-included directories found "
- "matching '%s'"), dir_pattern)
-
- else:
- raise DistutilsInternalError(
- "this cannot happen: invalid action '%s'" % action)
+
+ action_is_recursive = action.startswith('recursive-')
+ if action in {'graft', 'prune'}:
+ patterns = [dir_pattern]
+ extra_log_args = (dir, ) if action_is_recursive else ()
+ log_tmpl = log_map[action]
+
+ self.debug_print(
+ ' '.join(
+ [action] +
+ ([dir] if action_is_recursive else []) +
+ patterns,
+ )
+ )
+ for pattern in patterns:
+ if not process_action(pattern):
+ log.warn(log_tmpl, pattern, *extra_log_args)
def _remove_files(self, predicate):
"""
@@ -524,6 +540,7 @@ class manifest_maker(sdist):
self.add_defaults()
if os.path.exists(self.template):
self.read_template()
+ self.add_license_files()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
@@ -565,9 +582,22 @@ class manifest_maker(sdist):
self.filelist.extend(rcfiles)
elif os.path.exists(self.manifest):
self.read_manifest()
+
+ if os.path.exists("setup.py"):
+ # setup.py should be included by default, even if it's not
+ # the script called to create the sdist
+ self.filelist.append("setup.py")
+
ei_cmd = self.get_finalized_command('egg_info')
self.filelist.graft(ei_cmd.egg_info)
+ def add_license_files(self):
+ license_files = self.distribution.metadata.license_files or []
+ for lf in license_files:
+ log.info("adding license file '%s'", lf)
+ pass
+ self.filelist.extend(license_files)
+
def prune_file_list(self):
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
@@ -577,6 +607,27 @@ class manifest_maker(sdist):
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
is_regex=1)
+ def _safe_data_files(self, build_py):
+ """
+ The parent class implementation of this method
+ (``sdist``) will try to include data files, which
+ might cause recursion problems when
+ ``include_package_data=True``.
+
+ Therefore, avoid triggering any attempt of
+ analyzing/building the manifest again.
+ """
+ if hasattr(build_py, 'get_data_files_without_manifest'):
+ return build_py.get_data_files_without_manifest()
+
+ warnings.warn(
+ "Custom 'build_py' does not implement "
+ "'get_data_files_without_manifest'.\nPlease extend command classes"
+ " from setuptools instead of distutils.",
+ SetuptoolsDeprecationWarning
+ )
+ return build_py.get_data_files()
+
def write_file(filename, contents):
"""Create a file with the specified name and write 'contents' (a
@@ -620,14 +671,16 @@ def warn_depends_obsolete(cmd, basename, filename):
def _write_requirements(stream, reqs):
lines = yield_lines(reqs or ())
- append_cr = lambda line: line + '\n'
+
+ def append_cr(line):
+ return line + '\n'
lines = map(append_cr, lines)
stream.writelines(lines)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
- data = six.StringIO()
+ data = io.StringIO()
_write_requirements(data, dist.install_requires)
extras_require = dist.extras_require or {}
for extra in sorted(extras_require):
@@ -665,20 +718,9 @@ def write_arg(cmd, basename, filename, force=False):
def write_entries(cmd, basename, filename):
- ep = cmd.distribution.entry_points
-
- if isinstance(ep, six.string_types) or ep is None:
- data = ep
- elif ep is not None:
- data = []
- for section, contents in sorted(ep.items()):
- if not isinstance(contents, six.string_types):
- contents = EntryPoint.parse_group(section, contents)
- contents = '\n'.join(sorted(map(str, contents.values())))
- data.append('[%s]\n%s\n\n' % (section, contents))
- data = ''.join(data)
-
- cmd.write_or_delete_file('entry points', filename, data, True)
+ eps = _entry_points.load(cmd.distribution.entry_points)
+ defn = _entry_points.render(eps)
+ cmd.write_or_delete_file('entry points', filename, defn, True)
def get_pkg_info_revision():
@@ -686,7 +728,8 @@ def get_pkg_info_revision():
Get a -r### off of PKG-INFO Version in case this is an sdist of
a subversion revision.
"""
- warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
+ warnings.warn(
+ "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
if os.path.exists('PKG-INFO'):
with io.open('PKG-INFO') as f:
for line in f:
@@ -694,3 +737,7 @@ def get_pkg_info_revision():
if match:
return int(match.group(1))
return 0
+
+
+class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Deprecated behavior warning for EggInfo, bypassing suppression."""
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 31a5ddb..35e54d2 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -30,6 +30,13 @@ class install(orig.install):
_nc = dict(new_commands)
def initialize_options(self):
+
+ warnings.warn(
+ "setup.py install is deprecated. "
+ "Use build and pip and other standards-based tools.",
+ setuptools.SetuptoolsDeprecationWarning,
+ )
+
orig.install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
@@ -114,7 +121,7 @@ class install(orig.install):
args.insert(0, setuptools.bootstrap_install_from)
cmd.args = args
- cmd.run()
+ cmd.run(show_deprecation=False)
setuptools.bootstrap_install_from = None
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index edc4718..65ede40 100755..100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -4,6 +4,7 @@ import os
from setuptools import Command
from setuptools import namespaces
from setuptools.archive_util import unpack_archive
+from .._path import ensure_directory
import pkg_resources
@@ -37,7 +38,7 @@ class install_egg_info(namespaces.Installer, Command):
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
- pkg_resources.ensure_directory(self.target)
+ ensure_directory(self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 2b31c3e..2e9d875 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -1,5 +1,5 @@
import os
-import imp
+import sys
from itertools import product, starmap
import distutils.command.install_lib as orig
@@ -74,10 +74,11 @@ class install_lib(orig.install_lib):
yield '__init__.pyc'
yield '__init__.pyo'
- if not hasattr(imp, 'get_tag'):
+ if not hasattr(sys, 'implementation'):
return
- base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
+ base = os.path.join(
+ '__pycache__', '__init__.' + sys.implementation.cache_tag)
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 1623427..aeb0e42 100755..100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -1,9 +1,11 @@
from distutils import log
import distutils.command.install_scripts as orig
+from distutils.errors import DistutilsModuleError
import os
import sys
-from pkg_resources import Distribution, PathMetadata, ensure_directory
+from pkg_resources import Distribution, PathMetadata
+from .._path import ensure_directory
class install_scripts(orig.install_scripts):
@@ -32,8 +34,11 @@ class install_scripts(orig.install_scripts):
)
bs_cmd = self.get_finalized_command('build_scripts')
exec_param = getattr(bs_cmd, 'executable', None)
- bw_cmd = self.get_finalized_command("bdist_wininst")
- is_wininst = getattr(bw_cmd, '_is_running', False)
+ try:
+ bw_cmd = self.get_finalized_command("bdist_wininst")
+ is_wininst = getattr(bw_cmd, '_is_running', False)
+ except (ImportError, DistutilsModuleError):
+ is_wininst = False
writer = ei.ScriptWriter
if is_wininst:
exec_param = "python.exe"
diff --git a/setuptools/command/py36compat.py b/setuptools/command/py36compat.py
index 61063e7..343547a 100644
--- a/setuptools/command/py36compat.py
+++ b/setuptools/command/py36compat.py
@@ -3,8 +3,6 @@ from glob import glob
from distutils.util import convert_path
from distutils.command import sdist
-from setuptools.extern.six.moves import filter
-
class sdist_add_defaults:
"""
@@ -132,5 +130,5 @@ class sdist_add_defaults:
if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream
- class sdist_add_defaults:
+ class sdist_add_defaults: # noqa
pass
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
index 8d6336a..b8266b9 100755..100644
--- a/setuptools/command/register.py
+++ b/setuptools/command/register.py
@@ -1,10 +1,18 @@
+from distutils import log
import distutils.command.register as orig
+from setuptools.errors import RemovedCommandError
+
class register(orig.register):
- __doc__ = orig.register.__doc__
+ """Formerly used to register packages on PyPI."""
def run(self):
- # Make sure that we are using valid current name/version info
- self.run_command('egg_info')
- orig.register.run(self)
+ msg = (
+ "The register command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
+ )
+
+ self.announce("ERROR: " + msg, log.ERROR)
+
+ raise RemovedCommandError(msg)
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index b89353f..74795ba 100755..100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -4,8 +4,6 @@ from distutils.errors import DistutilsOptionError
import os
import shutil
-from setuptools.extern import six
-
from setuptools import Command
@@ -36,9 +34,9 @@ class rotate(Command):
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
- except ValueError:
- raise DistutilsOptionError("--keep must be an integer")
- if isinstance(self.match, six.string_types):
+ except ValueError as e:
+ raise DistutilsOptionError("--keep must be an integer") from e
+ if isinstance(self.match, str):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
diff --git a/setuptools/command/saveopts.py b/setuptools/command/saveopts.py
index 611cec5..611cec5 100755..100644
--- a/setuptools/command/saveopts.py
+++ b/setuptools/command/saveopts.py
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index bcfae4d..0ffeacf 100755..100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -5,18 +5,16 @@ import sys
import io
import contextlib
-from setuptools.extern import six
-
from .py36compat import sdist_add_defaults
-import pkg_resources
+from .._importlib import metadata
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
- for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+ for ep in metadata.entry_points(group='setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
@@ -33,6 +31,10 @@ class sdist(sdist_add_defaults, orig.sdist):
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file [default: current group]"),
]
negative_opt = {}
@@ -98,49 +100,44 @@ class sdist(sdist_add_defaults, orig.sdist):
if orig_val is not NoValue:
setattr(os, 'link', orig_val)
- def __read_template_hack(self):
- # This grody hack closes the template file (MANIFEST.in) if an
- # exception occurs during read_template.
- # Doing so prevents an error when easy_install attempts to delete the
- # file.
- try:
- orig.sdist.read_template(self)
- except Exception:
- _, _, tb = sys.exc_info()
- tb.tb_next.tb_frame.f_locals['template'].close()
- raise
-
- # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
- # has been fixed, so only override the method if we're using an earlier
- # Python.
- has_leaky_handle = (
- sys.version_info < (2, 7, 2)
- or (3, 0) <= sys.version_info < (3, 1, 4)
- or (3, 2) <= sys.version_info < (3, 2, 1)
- )
- if has_leaky_handle:
- read_template = __read_template_hack
+ def _add_defaults_optional(self):
+ super()._add_defaults_optional()
+ if os.path.isfile('pyproject.toml'):
+ self.filelist.append('pyproject.toml')
def _add_defaults_python(self):
"""getting python files"""
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
- # This functionality is incompatible with include_package_data, and
- # will in fact create an infinite recursion if include_package_data
- # is True. Use of include_package_data will imply that
- # distutils-style automatic handling of package_data is disabled
- if not self.distribution.include_package_data:
- for _, src_dir, _, filenames in build_py.data_files:
- self.filelist.extend([os.path.join(src_dir, filename)
- for filename in filenames])
+ self._add_data_files(self._safe_data_files(build_py))
+
+ def _safe_data_files(self, build_py):
+ """
+ Since the ``sdist`` class is also used to compute the MANIFEST
+ (via :obj:`setuptools.command.egg_info.manifest_maker`),
+ there might be recursion problems when trying to obtain the list of
+ data_files and ``include_package_data=True`` (which in turn depends on
+ the files included in the MANIFEST).
+
+ To avoid that, ``manifest_maker`` should be able to overwrite this
+ method and avoid recursive attempts to build/analyze the MANIFEST.
+ """
+ return build_py.data_files
+
+ def _add_data_files(self, data_files):
+ """
+ Add data files as found in build_py.data_files.
+ """
+ self.filelist.extend(
+ os.path.join(src_dir, name)
+ for _, src_dir, _, filenames in data_files
+ for name in filenames
+ )
def _add_defaults_data_files(self):
try:
- if six.PY2:
- sdist_add_defaults._add_defaults_data_files(self)
- else:
- super()._add_defaults_data_files()
+ super()._add_defaults_data_files()
except TypeError:
log.warn("data_files contains unexpected objects")
@@ -186,12 +183,11 @@ class sdist(sdist_add_defaults, orig.sdist):
manifest = open(self.manifest, 'rb')
for line in manifest:
# The manifest must contain UTF-8. See #303.
- if six.PY3:
- try:
- line = line.decode('UTF-8')
- except UnicodeDecodeError:
- log.warn("%r not UTF-8 decodable -- skipping" % line)
- continue
+ try:
+ line = line.decode('UTF-8')
+ except UnicodeDecodeError:
+ log.warn("%r not UTF-8 decodable -- skipping" % line)
+ continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 7e57cc0..6358c04 100755..100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -3,8 +3,7 @@ from distutils import log
from distutils.errors import DistutilsOptionError
import distutils
import os
-
-from setuptools.extern.six.moves import configparser
+import configparser
from setuptools import Command
@@ -40,6 +39,7 @@ def edit_config(filename, settings, dry_run=False):
"""
log.debug("Reading configuration from %s", filename)
opts = configparser.RawConfigParser()
+ opts.optionxform = lambda x: x
opts.read([filename])
for section, options in settings.items():
if options is None:
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index 51aee1f..652f3e4 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -8,17 +8,22 @@ from distutils.errors import DistutilsError, DistutilsOptionError
from distutils import log
from unittest import TestLoader
-from setuptools.extern import six
-from setuptools.extern.six.moves import map, filter
-
-from pkg_resources import (resource_listdir, resource_exists, normalize_path,
- working_set, _namespace_packages, evaluate_marker,
- add_activation_listener, require, EntryPoint)
+from pkg_resources import (
+ resource_listdir,
+ resource_exists,
+ normalize_path,
+ working_set,
+ evaluate_marker,
+ add_activation_listener,
+ require,
+)
+from .._importlib import metadata
from setuptools import Command
+from setuptools.extern.more_itertools import unique_everseen
+from setuptools.extern.jaraco.functools import pass_none
class ScanningLoader(TestLoader):
-
def __init__(self):
TestLoader.__init__(self)
self._visited = set()
@@ -58,7 +63,7 @@ class ScanningLoader(TestLoader):
# adapted from jaraco.classes.properties:NonDataProperty
-class NonDataProperty(object):
+class NonDataProperty:
def __init__(self, fget):
self.fget = fget
@@ -71,12 +76,15 @@ class NonDataProperty(object):
class test(Command):
"""Command to run unit tests after in-place build"""
- description = "run unit tests after in-place build"
+ description = "run unit tests after in-place build (deprecated)"
user_options = [
('test-module=', 'm', "Run 'test_suite' in specified module"),
- ('test-suite=', 's',
- "Run single test, case or suite (e.g. 'module.test_suite')"),
+ (
+ 'test-suite=',
+ 's',
+ "Run single test, case or suite (e.g. 'module.test_suite')",
+ ),
('test-runner=', 'r', "Test runner to use"),
]
@@ -126,30 +134,11 @@ class test(Command):
@contextlib.contextmanager
def project_on_sys_path(self, include_dists=[]):
- with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
-
- if with_2to3:
- # If we run 2to3 we can not do this inplace:
+ self.run_command('egg_info')
- # Ensure metadata is up-to-date
- self.reinitialize_command('build_py', inplace=0)
- self.run_command('build_py')
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
-
- # Build extensions
- self.reinitialize_command('egg_info', egg_base=build_path)
- self.run_command('egg_info')
-
- self.reinitialize_command('build_ext', inplace=0)
- self.run_command('build_ext')
- else:
- # Without 2to3 inplace works fine:
- self.run_command('egg_info')
-
- # Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
- self.run_command('build_ext')
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
ei_cmd = self.get_finalized_command("egg_info")
@@ -184,7 +173,7 @@ class test(Command):
orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
current_pythonpath = os.environ.get('PYTHONPATH', '')
try:
- prefix = os.pathsep.join(paths)
+ prefix = os.pathsep.join(unique_everseen(paths))
to_join = filter(None, [prefix, current_pythonpath])
new_path = os.pathsep.join(to_join)
if new_path:
@@ -205,12 +194,21 @@ class test(Command):
ir_d = dist.fetch_build_eggs(dist.install_requires)
tr_d = dist.fetch_build_eggs(dist.tests_require or [])
er_d = dist.fetch_build_eggs(
- v for k, v in dist.extras_require.items()
+ v
+ for k, v in dist.extras_require.items()
if k.startswith(':') and evaluate_marker(k[1:])
)
return itertools.chain(ir_d, tr_d, er_d)
def run(self):
+ self.announce(
+ "WARNING: Testing via this command is deprecated and will be "
+ "removed in a future version. Users looking for a generic test "
+ "entry point independent of test runner are encouraged to use "
+ "tox.",
+ log.WARN,
+ )
+
installed_dists = self.install_dists(self.distribution)
cmd = ' '.join(self._argv)
@@ -226,23 +224,10 @@ class test(Command):
self.run_tests()
def run_tests(self):
- # Purge modules under test from sys.modules. The test loader will
- # re-import them from the build location. Required when 2to3 is used
- # with namespace packages.
- if six.PY3 and getattr(self.distribution, 'use_2to3', False):
- module = self.test_suite.split('.')[0]
- if module in _namespace_packages:
- del_modules = []
- if module in sys.modules:
- del_modules.append(module)
- module += '.'
- for name in sys.modules:
- if name.startswith(module):
- del_modules.append(name)
- list(map(sys.modules.__delitem__, del_modules))
-
test = unittest.main(
- None, None, self._argv,
+ None,
+ None,
+ self._argv,
testLoader=self._resolve_as_ep(self.test_loader),
testRunner=self._resolve_as_ep(self.test_runner),
exit=False,
@@ -257,12 +242,10 @@ class test(Command):
return ['unittest'] + self.test_args
@staticmethod
+ @pass_none
def _resolve_as_ep(val):
"""
Load the indicated attribute value, called, as a as if it were
specified as an entry point.
"""
- if val is None:
- return
- parsed = EntryPoint.parse("x=" + val)
- return parsed.resolve()()
+ return metadata.EntryPoint(value=val, name=None, group=None).load()()
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
index a44173a..ec7f81e 100644
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -1,42 +1,17 @@
-import getpass
+from distutils import log
from distutils.command import upload as orig
+from setuptools.errors import RemovedCommandError
+
class upload(orig.upload):
- """
- Override default upload behavior to obtain password
- in a variety of different ways.
- """
+ """Formerly used to upload packages to PyPI."""
- def finalize_options(self):
- orig.upload.finalize_options(self)
- self.username = (
- self.username or
- getpass.getuser()
- )
- # Attempt to obtain password. Short circuit evaluation at the first
- # sign of success.
- self.password = (
- self.password or
- self._load_password_from_keyring() or
- self._prompt_for_password()
+ def run(self):
+ msg = (
+ "The upload command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
)
- def _load_password_from_keyring(self):
- """
- Attempt to load password from keyring. Suppress Exceptions.
- """
- try:
- keyring = __import__('keyring')
- return keyring.get_password(self.repository, self.username)
- except Exception:
- pass
-
- def _prompt_for_password(self):
- """
- Prompt for a password on the tty. Suppress Exceptions.
- """
- try:
- return getpass.getpass()
- except (Exception, KeyboardInterrupt):
- pass
+ self.announce("ERROR: " + msg, log.ERROR)
+ raise RemovedCommandError(msg)
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
index 07aa564..a548000 100644
--- a/setuptools/command/upload_docs.py
+++ b/setuptools/command/upload_docs.py
@@ -2,7 +2,7 @@
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
-PyPI's pythonhosted.org).
+sites other than PyPi such as devpi).
"""
from base64 import standard_b64encode
@@ -15,17 +15,18 @@ import tempfile
import shutil
import itertools
import functools
+import http.client
+import urllib.parse
+import warnings
-from setuptools.extern import six
-from setuptools.extern.six.moves import http_client, urllib
+from .._importlib import metadata
+from .. import SetuptoolsDeprecationWarning
-from pkg_resources import iter_entry_points
from .upload import upload
def _encode(s):
- errors = 'surrogateescape' if six.PY3 else 'strict'
- return s.encode('utf-8', errors)
+ return s.encode('utf-8', 'surrogateescape')
class upload_docs(upload):
@@ -33,7 +34,7 @@ class upload_docs(upload):
# supported by Warehouse (and won't be).
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
- description = 'Upload documentation to PyPI'
+ description = 'Upload documentation to sites other than PyPi such as devpi'
user_options = [
('repository=', 'r',
@@ -45,9 +46,10 @@ class upload_docs(upload):
boolean_options = upload.boolean_options
def has_sphinx(self):
- if self.upload_dir is None:
- for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
- return True
+ return bool(
+ self.upload_dir is None
+ and metadata.entry_points(group='distutils.commands', name='build_sphinx')
+ )
sub_commands = [('build_sphinx', has_sphinx)]
@@ -61,7 +63,7 @@ class upload_docs(upload):
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
- self.target_dir = build_sphinx.builder_target_dir
+ self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
@@ -69,7 +71,7 @@ class upload_docs(upload):
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
if 'pypi.python.org' in self.repository:
- log.warn("Upload_docs command is deprecated. Use RTD instead.")
+ log.warn("Upload_docs command is deprecated for PyPi. Use RTD instead.")
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
@@ -89,6 +91,12 @@ class upload_docs(upload):
zip_file.close()
def run(self):
+ warnings.warn(
+ "upload_docs is deprecated and will be removed in a future "
+ "version. Use tools like httpie or curl instead.",
+ SetuptoolsDeprecationWarning,
+ )
+
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
@@ -127,8 +135,8 @@ class upload_docs(upload):
"""
Build up the MIME payload for the POST data
"""
- boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
- sep_boundary = b'\n--' + boundary
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = b'\n--' + boundary.encode('ascii')
end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n",
builder = functools.partial(
@@ -138,7 +146,7 @@ class upload_docs(upload):
part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items)
- content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')
+ content_type = 'multipart/form-data; boundary=%s' % boundary
return b''.join(body_items), content_type
def upload_file(self, filename):
@@ -152,9 +160,7 @@ class upload_docs(upload):
}
# set up the authentication
credentials = _encode(self.username + ':' + self.password)
- credentials = standard_b64encode(credentials)
- if six.PY3:
- credentials = credentials.decode('ascii')
+ credentials = standard_b64encode(credentials).decode('ascii')
auth = "Basic " + credentials
body, ct = self._build_multipart(data)
@@ -169,9 +175,9 @@ class upload_docs(upload):
urllib.parse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
- conn = http_client.HTTPConnection(netloc)
+ conn = http.client.HTTPConnection(netloc)
elif schema == 'https':
- conn = http_client.HTTPSConnection(netloc)
+ conn = http.client.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema " + schema)
diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py
new file mode 100644
index 0000000..35458d8
--- /dev/null
+++ b/setuptools/config/__init__.py
@@ -0,0 +1,35 @@
+"""For backward compatibility, expose main functions from
+``setuptools.config.setupcfg``
+"""
+import warnings
+from functools import wraps
+from textwrap import dedent
+from typing import Callable, TypeVar, cast
+
+from .._deprecation_warning import SetuptoolsDeprecationWarning
+from . import setupcfg
+
+Fn = TypeVar("Fn", bound=Callable)
+
+__all__ = ('parse_configuration', 'read_configuration')
+
+
+def _deprecation_notice(fn: Fn) -> Fn:
+ @wraps(fn)
+ def _wrapper(*args, **kwargs):
+ msg = f"""\
+ As setuptools moves its configuration towards `pyproject.toml`,
+ `{__name__}.{fn.__name__}` became deprecated.
+
+ For the time being, you can use the `{setupcfg.__name__}` module
+ to access a backward compatible API, but this module is provisional
+ and might be removed in the future.
+ """
+ warnings.warn(dedent(msg), SetuptoolsDeprecationWarning)
+ return fn(*args, **kwargs)
+
+ return cast(Fn, _wrapper)
+
+
+read_configuration = _deprecation_notice(setupcfg.read_configuration)
+parse_configuration = _deprecation_notice(setupcfg.parse_configuration)
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
new file mode 100644
index 0000000..c8ddab4
--- /dev/null
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -0,0 +1,249 @@
+"""Translation layer between pyproject config and setuptools distribution and
+metadata objects.
+
+The distribution and metadata objects are modeled after (an old version of)
+core metadata, therefore configs in the format specified for ``pyproject.toml``
+need to be processed before being applied.
+"""
+import logging
+import os
+from collections.abc import Mapping
+from email.headerregistry import Address
+from functools import partial
+from itertools import chain
+from types import MappingProxyType
+from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple,
+ Type, Union)
+
+if TYPE_CHECKING:
+ from setuptools._importlib import metadata # noqa
+ from setuptools.dist import Distribution # noqa
+
+EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
+_Path = Union[os.PathLike, str]
+_DictOrStr = Union[dict, str]
+_CorrespFn = Callable[["Distribution", Any, _Path], None]
+_Correspondence = Union[str, _CorrespFn]
+
+_logger = logging.getLogger(__name__)
+
+
+def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution":
+ """Apply configuration dict read with :func:`read_configuration`"""
+
+ root_dir = os.path.dirname(filename) or "."
+ tool_table = config.get("tool", {}).get("setuptools", {})
+ project_table = config.get("project", {}).copy()
+ _unify_entry_points(project_table)
+ for field, value in project_table.items():
+ norm_key = json_compatible_key(field)
+ corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key)
+ if callable(corresp):
+ corresp(dist, value, root_dir)
+ else:
+ _set_config(dist, corresp, value)
+
+ for field, value in tool_table.items():
+ norm_key = json_compatible_key(field)
+ norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
+ _set_config(dist, norm_key, value)
+
+ _copy_command_options(config, dist, filename)
+
+ current_directory = os.getcwd()
+ os.chdir(root_dir)
+ try:
+ dist._finalize_requires()
+ dist._finalize_license_files()
+ finally:
+ os.chdir(current_directory)
+
+ return dist
+
+
+def json_compatible_key(key: str) -> str:
+ """As defined in :pep:`566#json-compatible-metadata`"""
+ return key.lower().replace("-", "_")
+
+
+def _set_config(dist: "Distribution", field: str, value: Any):
+ setter = getattr(dist.metadata, f"set_{field}", None)
+ if setter:
+ setter(value)
+ elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
+ setattr(dist.metadata, field, value)
+ else:
+ setattr(dist, field, value)
+
+
+_CONTENT_TYPES = {
+ ".md": "text/markdown",
+ ".rst": "text/x-rst",
+ ".txt": "text/plain",
+}
+
+
+def _guess_content_type(file: str) -> Optional[str]:
+ _, ext = os.path.splitext(file.lower())
+ if not ext:
+ return None
+
+ if ext in _CONTENT_TYPES:
+ return _CONTENT_TYPES[ext]
+
+ valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items())
+ msg = f"only the following file extensions are recognized: {valid}."
+ raise ValueError(f"Undefined content type for {file}, {msg}")
+
+
+def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path):
+ from setuptools.config import expand
+
+ if isinstance(val, str):
+ text = expand.read_files(val, root_dir)
+ ctype = _guess_content_type(val)
+ else:
+ text = val.get("text") or expand.read_files(val.get("file", []), root_dir)
+ ctype = val["content-type"]
+
+ _set_config(dist, "long_description", text)
+ if ctype:
+ _set_config(dist, "long_description_content_type", ctype)
+
+
+def _license(dist: "Distribution", val: dict, root_dir: _Path):
+ from setuptools.config import expand
+
+ if "file" in val:
+ _set_config(dist, "license", expand.read_files([val["file"]], root_dir))
+ else:
+ _set_config(dist, "license", val["text"])
+
+
+def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str):
+ field = []
+ email_field = []
+ for person in val:
+ if "name" not in person:
+ email_field.append(person["email"])
+ elif "email" not in person:
+ field.append(person["name"])
+ else:
+ addr = Address(display_name=person["name"], addr_spec=person["email"])
+ email_field.append(str(addr))
+
+ if field:
+ _set_config(dist, kind, ", ".join(field))
+ if email_field:
+ _set_config(dist, f"{kind}_email", ", ".join(email_field))
+
+
+def _project_urls(dist: "Distribution", val: dict, _root_dir):
+ special = {"downloadurl": "download_url", "homepage": "url"}
+ for key, url in val.items():
+ norm_key = json_compatible_key(key).replace("_", "")
+ _set_config(dist, special.get(norm_key, key), url)
+ # If `homepage` is missing, distutils will warn the following message:
+ # "warning: check: missing required meta-data: url"
+ # In the context of PEP 621, users might ask themselves: "which url?".
+ # Let's add a warning before distutils check to help users understand the problem:
+ if not dist.metadata.url:
+ msg = (
+ "Missing `Homepage` url.\nIt is advisable to link some kind of reference "
+ "for your project (e.g. source code or documentation).\n"
+ )
+ _logger.warning(msg)
+ _set_config(dist, "project_urls", val.copy())
+
+
+def _python_requires(dist: "Distribution", val: dict, _root_dir):
+ from setuptools.extern.packaging.specifiers import SpecifierSet
+
+ _set_config(dist, "python_requires", SpecifierSet(val))
+
+
+def _unify_entry_points(project_table: dict):
+ project = project_table
+ entry_points = project.pop("entry-points", project.pop("entry_points", {}))
+ renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
+ for key, value in list(project.items()): # eager to allow modifications
+ norm_key = json_compatible_key(key)
+ if norm_key in renaming and value:
+ entry_points[renaming[norm_key]] = project.pop(key)
+
+ if entry_points:
+ project["entry-points"] = {
+ name: [f"{k} = {v}" for k, v in group.items()]
+ for name, group in entry_points.items()
+ }
+
+
+def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path):
+ tool_table = pyproject.get("tool", {})
+ cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
+ valid_options = _valid_command_options(cmdclass)
+
+ cmd_opts = dist.command_options
+ for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
+ cmd = json_compatible_key(cmd)
+ valid = valid_options.get(cmd, set())
+ cmd_opts.setdefault(cmd, {})
+ for key, value in config.items():
+ key = json_compatible_key(key)
+ cmd_opts[cmd][key] = (str(filename), value)
+ if key not in valid:
+ # To avoid removing options that are specified dynamically we
+ # just log a warn...
+ _logger.warning(f"Command option {cmd}.{key} is not defined")
+
+
+def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
+ from .._importlib import metadata
+ from setuptools.dist import Distribution
+
+ valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
+
+ unloaded_entry_points = metadata.entry_points(group='distutils.commands')
+ loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points)
+ entry_points = (ep for ep in loaded_entry_points if ep)
+ for cmd, cmd_class in chain(entry_points, cmdclass.items()):
+ opts = valid_options.get(cmd, set())
+ opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
+ valid_options[cmd] = opts
+
+ return valid_options
+
+
+def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]:
+ # Ignore all the errors
+ try:
+ return (ep.name, ep.load())
+ except Exception as ex:
+ msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}"
+ _logger.warning(f"{msg}: {ex}")
+ return None
+
+
+def _normalise_cmd_option_key(name: str) -> str:
+ return json_compatible_key(name).strip("_=")
+
+
+def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]:
+ return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
+
+
+PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
+ "readme": _long_description,
+ "license": _license,
+ "authors": partial(_people, kind="author"),
+ "maintainers": partial(_people, kind="maintainer"),
+ "urls": _project_urls,
+ "dependencies": "install_requires",
+ "optional_dependencies": "extras_require",
+ "requires_python": _python_requires,
+}
+
+TOOL_TABLE_RENAMES = {"script_files": "scripts"}
+
+SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls",
+ "provides_extras", "license_file", "license_files"}
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
new file mode 100644
index 0000000..ff9b2c9
--- /dev/null
+++ b/setuptools/config/expand.py
@@ -0,0 +1,458 @@
+"""Utility functions to expand configuration directives or special values
+(such glob patterns).
+
+We can split the process of interpreting configuration files into 2 steps:
+
+1. The parsing the file contents from strings to value objects
+ that can be understand by Python (for example a string with a comma
+ separated list of keywords into an actual Python list of strings).
+
+2. The expansion (or post-processing) of these values according to the
+ semantics ``setuptools`` assign to them (for example a configuration field
+ with the ``file:`` directive should be expanded from a list of file paths to
+ a single string with the contents of those files concatenated)
+
+This module focus on the second step, and therefore allow sharing the expansion
+functions among several configuration file formats.
+"""
+import ast
+import importlib
+import io
+import os
+import sys
+import warnings
+from glob import iglob
+from configparser import ConfigParser
+from importlib.machinery import ModuleSpec
+from itertools import chain
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+ cast
+)
+from types import ModuleType
+
+from distutils.errors import DistutilsOptionError
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+ from setuptools.discovery import ConfigDiscovery # noqa
+ from distutils.dist import DistributionMetadata # noqa
+
+chain_iter = chain.from_iterable
+_Path = Union[str, os.PathLike]
+_K = TypeVar("_K")
+_V = TypeVar("_V", covariant=True)
+
+
+class StaticModule:
+ """Proxy to a module object that avoids executing arbitrary code."""
+
+ def __init__(self, name: str, spec: ModuleSpec):
+ with open(spec.origin) as strm: # type: ignore
+ src = strm.read()
+ module = ast.parse(src)
+ vars(self).update(locals())
+ del self.self
+
+ def __getattr__(self, attr):
+ """Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
+ try:
+ assignment_expressions = (
+ statement
+ for statement in self.module.body
+ if isinstance(statement, ast.Assign)
+ )
+ expressions_with_target = (
+ (statement, target)
+ for statement in assignment_expressions
+ for target in statement.targets
+ )
+ matching_values = (
+ statement.value
+ for statement, target in expressions_with_target
+ if isinstance(target, ast.Name) and target.id == attr
+ )
+ return next(ast.literal_eval(value) for value in matching_values)
+ except Exception as e:
+ raise AttributeError(f"{self.name} has no attribute {attr}") from e
+
+
+def glob_relative(
+ patterns: Iterable[str], root_dir: Optional[_Path] = None
+) -> List[str]:
+ """Expand the list of glob patterns, but preserving relative paths.
+
+ :param list[str] patterns: List of glob patterns
+ :param str root_dir: Path to which globs should be relative
+ (current directory by default)
+ :rtype: list
+ """
+ glob_characters = {'*', '?', '[', ']', '{', '}'}
+ expanded_values = []
+ root_dir = root_dir or os.getcwd()
+ for value in patterns:
+
+ # Has globby characters?
+ if any(char in value for char in glob_characters):
+ # then expand the glob pattern while keeping paths *relative*:
+ glob_path = os.path.abspath(os.path.join(root_dir, value))
+ expanded_values.extend(sorted(
+ os.path.relpath(path, root_dir).replace(os.sep, "/")
+ for path in iglob(glob_path, recursive=True)))
+
+ else:
+ # take the value as-is
+ path = os.path.relpath(value, root_dir).replace(os.sep, "/")
+ expanded_values.append(path)
+
+ return expanded_values
+
+
+def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str:
+ """Return the content of the files concatenated using ``\n`` as str
+
+ This function is sandboxed and won't reach anything outside ``root_dir``
+
+ (By default ``root_dir`` is the current directory).
+ """
+ from setuptools.extern.more_itertools import always_iterable
+
+ root_dir = os.path.abspath(root_dir or os.getcwd())
+ _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
+ return '\n'.join(
+ _read_file(path)
+ for path in _filter_existing_files(_filepaths)
+ if _assert_local(path, root_dir)
+ )
+
+
+def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]:
+ for path in filepaths:
+ if os.path.isfile(path):
+ yield path
+ else:
+ warnings.warn(f"File {path!r} cannot be found")
+
+
+def _read_file(filepath: Union[bytes, _Path]) -> str:
+ with io.open(filepath, encoding='utf-8') as f:
+ return f.read()
+
+
+def _assert_local(filepath: _Path, root_dir: str):
+ if not os.path.abspath(filepath).startswith(root_dir):
+ msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
+ raise DistutilsOptionError(msg)
+
+ return True
+
+
+def read_attr(
+ attr_desc: str,
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+):
+ """Reads the value of an attribute from a module.
+
+ This function will try to read the attributed statically first
+ (via :func:`ast.literal_eval`), and only evaluate the module if it fails.
+
+ Examples:
+ read_attr("package.attr")
+ read_attr("package.module.attr")
+
+ :param str attr_desc: Dot-separated string describing how to reach the
+ attribute (see examples above)
+ :param dict[str, str] package_dir: Mapping of package names to their
+ location in disk (represented by paths relative to ``root_dir``).
+ :param str root_dir: Path to directory containing all the packages in
+ ``package_dir`` (current directory by default).
+ :rtype: str
+ """
+ root_dir = root_dir or os.getcwd()
+ attrs_path = attr_desc.strip().split('.')
+ attr_name = attrs_path.pop()
+ module_name = '.'.join(attrs_path)
+ module_name = module_name or '__init__'
+ _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+ spec = _find_spec(module_name, path)
+
+ try:
+ return getattr(StaticModule(module_name, spec), attr_name)
+ except Exception:
+ # fallback to evaluate module
+ module = _load_spec(spec, module_name)
+ return getattr(module, attr_name)
+
+
+def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec:
+ spec = importlib.util.spec_from_file_location(module_name, module_path)
+ spec = spec or importlib.util.find_spec(module_name)
+
+ if spec is None:
+ raise ModuleNotFoundError(module_name)
+
+ return spec
+
+
+def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
+ name = getattr(spec, "__name__", module_name)
+ if name in sys.modules:
+ return sys.modules[name]
+ module = importlib.util.module_from_spec(spec)
+ sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
+ spec.loader.exec_module(module) # type: ignore
+ return module
+
+
+def _find_module(
+ module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path
+) -> Tuple[_Path, Optional[str], str]:
+ """Given a module (that could normally be imported by ``module_name``
+ after the build is complete), find the path to the parent directory where
+ it is contained and the canonical name that could be used to import it
+ considering the ``package_dir`` in the build configuration and ``root_dir``
+ """
+ parent_path = root_dir
+ module_parts = module_name.split('.')
+ if package_dir:
+ if module_parts[0] in package_dir:
+ # A custom path was specified for the module we want to import
+ custom_path = package_dir[module_parts[0]]
+ parts = custom_path.rsplit('/', 1)
+ if len(parts) > 1:
+ parent_path = os.path.join(root_dir, parts[0])
+ parent_module = parts[1]
+ else:
+ parent_module = custom_path
+ module_name = ".".join([parent_module, *module_parts[1:]])
+ elif '' in package_dir:
+ # A custom parent directory was specified for all root modules
+ parent_path = os.path.join(root_dir, package_dir[''])
+
+ path_start = os.path.join(parent_path, *module_name.split("."))
+ candidates = chain(
+ (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
+ iglob(f"{path_start}.*")
+ )
+ module_path = next((x for x in candidates if os.path.isfile(x)), None)
+ return parent_path, module_path, module_name
+
+
+def resolve_class(
+ qualified_class_name: str,
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+) -> Callable:
+ """Given a qualified class name, return the associated class object"""
+ root_dir = root_dir or os.getcwd()
+ idx = qualified_class_name.rfind('.')
+ class_name = qualified_class_name[idx + 1 :]
+ pkg_name = qualified_class_name[:idx]
+
+ _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
+ module = _load_spec(_find_spec(module_name, path), module_name)
+ return getattr(module, class_name)
+
+
+def cmdclass(
+ values: Dict[str, str],
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+) -> Dict[str, Callable]:
+ """Given a dictionary mapping command names to strings for qualified class
+ names, apply :func:`resolve_class` to the dict values.
+ """
+ return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
+
+
+def find_packages(
+ *,
+ namespaces=True,
+ fill_package_dir: Optional[Dict[str, str]] = None,
+ root_dir: Optional[_Path] = None,
+ **kwargs
+) -> List[str]:
+ """Works similarly to :func:`setuptools.find_packages`, but with all
+ arguments given as keyword arguments. Moreover, ``where`` can be given
+ as a list (the results will be simply concatenated).
+
+ When the additional keyword argument ``namespaces`` is ``True``, it will
+ behave like :func:`setuptools.find_namespace_packages`` (i.e. include
+ implicit namespaces as per :pep:`420`).
+
+ The ``where`` argument will be considered relative to ``root_dir`` (or the current
+ working directory when ``root_dir`` is not given).
+
+ If the ``fill_package_dir`` argument is passed, this function will consider it as a
+ similar data structure to the ``package_dir`` configuration parameter add fill-in
+ any missing package location.
+
+ :rtype: list
+ """
+ from setuptools.discovery import construct_package_dir
+ from setuptools.extern.more_itertools import unique_everseen, always_iterable
+
+ if namespaces:
+ from setuptools.discovery import PEP420PackageFinder as PackageFinder
+ else:
+ from setuptools.discovery import PackageFinder # type: ignore
+
+ root_dir = root_dir or os.curdir
+ where = kwargs.pop('where', ['.'])
+ packages: List[str] = []
+ fill_package_dir = {} if fill_package_dir is None else fill_package_dir
+
+ for path in unique_everseen(always_iterable(where)):
+ package_path = _nest_path(root_dir, path)
+ pkgs = PackageFinder.find(package_path, **kwargs)
+ packages.extend(pkgs)
+ if pkgs and not (
+ fill_package_dir.get("") == path
+ or os.path.samefile(package_path, root_dir)
+ ):
+ fill_package_dir.update(construct_package_dir(pkgs, path))
+
+ return packages
+
+
+def _nest_path(parent: _Path, path: _Path) -> str:
+ path = parent if path == "." else os.path.join(parent, path)
+ return os.path.normpath(path)
+
+
+def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
+ """When getting the version directly from an attribute,
+ it should be normalised to string.
+ """
+ if callable(value):
+ value = value()
+
+ value = cast(Iterable[Union[str, int]], value)
+
+ if not isinstance(value, str):
+ if hasattr(value, '__iter__'):
+ value = '.'.join(map(str, value))
+ else:
+ value = '%s' % value
+
+ return value
+
+
+def canonic_package_data(package_data: dict) -> dict:
+ if "*" in package_data:
+ package_data[""] = package_data.pop("*")
+ return package_data
+
+
+def canonic_data_files(
+ data_files: Union[list, dict], root_dir: Optional[_Path] = None
+) -> List[Tuple[str, List[str]]]:
+ """For compatibility with ``setup.py``, ``data_files`` should be a list
+ of pairs instead of a dict.
+
+ This function also expands glob patterns.
+ """
+ if isinstance(data_files, list):
+ return data_files
+
+ return [
+ (dest, glob_relative(patterns, root_dir))
+ for dest, patterns in data_files.items()
+ ]
+
+
+def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
+ """Given the contents of entry-points file,
+ process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
+ The first level keys are entry-point groups, the second level keys are
+ entry-point names, and the second level values are references to objects
+ (that correspond to the entry-point value).
+ """
+ parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore
+ parser.optionxform = str # case sensitive
+ parser.read_string(text, text_source)
+ groups = {k: dict(v.items()) for k, v in parser.items()}
+ groups.pop(parser.default_section, None)
+ return groups
+
+
+class EnsurePackagesDiscovered:
+ """Some expand functions require all the packages to already be discovered before
+ they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
+
+ Therefore in some cases we will need to run autodiscovery during the evaluation of
+ the configuration. However, it is better to postpone calling package discovery as
+ much as possible, because some parameters can influence it (e.g. ``package_dir``),
+ and those might not have been processed yet.
+ """
+
+ def __init__(self, distribution: "Distribution"):
+ self._dist = distribution
+ self._called = False
+
+ def __call__(self):
+ """Trigger the automatic package discovery, if it is still necessary."""
+ if not self._called:
+ self._called = True
+ self._dist.set_defaults(name=False) # Skip name, we can still be parsing
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ if self._called:
+ self._dist.set_defaults.analyse_name() # Now we can set a default name
+
+ def _get_package_dir(self) -> Mapping[str, str]:
+ self()
+ pkg_dir = self._dist.package_dir
+ return {} if pkg_dir is None else pkg_dir
+
+ @property
+ def package_dir(self) -> Mapping[str, str]:
+ """Proxy to ``package_dir`` that may trigger auto-discovery when used."""
+ return LazyMappingProxy(self._get_package_dir)
+
+
+class LazyMappingProxy(Mapping[_K, _V]):
+ """Mapping proxy that delays resolving the target object, until really needed.
+
+ >>> def obtain_mapping():
+ ... print("Running expensive function!")
+ ... return {"key": "value", "other key": "other value"}
+ >>> mapping = LazyMappingProxy(obtain_mapping)
+ >>> mapping["key"]
+ Running expensive function!
+ 'value'
+ >>> mapping["other key"]
+ 'other value'
+ """
+
+ def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
+ self._obtain = obtain_mapping_value
+ self._value: Optional[Mapping[_K, _V]] = None
+
+ def _target(self) -> Mapping[_K, _V]:
+ if self._value is None:
+ self._value = self._obtain()
+ return self._value
+
+ def __getitem__(self, key: _K) -> _V:
+ return self._target()[key]
+
+ def __len__(self) -> int:
+ return len(self._target())
+
+ def __iter__(self) -> Iterator[_K]:
+ return iter(self._target())
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
new file mode 100644
index 0000000..c7f8cb6
--- /dev/null
+++ b/setuptools/config/pyprojecttoml.py
@@ -0,0 +1,338 @@
+"""Load setuptools configuration from ``pyproject.toml`` files"""
+import logging
+import os
+import warnings
+from contextlib import contextmanager
+from functools import partial
+from typing import TYPE_CHECKING, Callable, Dict, Optional, Mapping, Union
+
+from setuptools.errors import FileError, OptionError
+
+from . import expand as _expand
+from ._apply_pyprojecttoml import apply
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+
+_Path = Union[str, os.PathLike]
+_logger = logging.getLogger(__name__)
+
+
+def load_file(filepath: _Path) -> dict:
+ from setuptools.extern import tomli # type: ignore
+
+ with open(filepath, "rb") as file:
+ return tomli.load(file)
+
+
+def validate(config: dict, filepath: _Path):
+ from setuptools.extern._validate_pyproject import validate as _validate
+
+ try:
+ return _validate(config)
+ except Exception as ex:
+ if ex.__class__.__name__ != "ValidationError":
+ # Workaround for the fact that `extern` can duplicate imports
+ ex_cls = ex.__class__.__name__
+ error = ValueError(f"invalid pyproject.toml config: {ex_cls} - {ex}")
+ raise error from None
+
+ _logger.error(f"configuration error: {ex.summary}") # type: ignore
+ _logger.debug(ex.details) # type: ignore
+ error = ValueError(f"invalid pyproject.toml config: {ex.name}") # type: ignore
+ raise error from None
+
+
+def apply_configuration(
+ dist: "Distribution", filepath: _Path, ignore_option_errors=False,
+) -> "Distribution":
+ """Apply the configuration from a ``pyproject.toml`` file into an existing
+ distribution object.
+ """
+ config = read_configuration(filepath, True, ignore_option_errors, dist)
+ return apply(dist, config, filepath)
+
+
+def read_configuration(
+ filepath: _Path,
+ expand=True,
+ ignore_option_errors=False,
+ dist: Optional["Distribution"] = None,
+):
+ """Read given configuration file and returns options from it as a dict.
+
+ :param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
+ format.
+
+ :param bool expand: Whether to expand directives and other computed values
+ (i.e. post-process the given configuration)
+
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+
+ :param Distribution|None: Distribution object to which the configuration refers.
+ If not given a dummy object will be created and discarded after the
+ configuration is read. This is used for auto-discovery of packages in the case
+ a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded.
+ When ``expand=False`` this object is simply ignored.
+
+ :rtype: dict
+ """
+ filepath = os.path.abspath(filepath)
+
+ if not os.path.isfile(filepath):
+ raise FileError(f"Configuration file {filepath!r} does not exist.")
+
+ asdict = load_file(filepath) or {}
+ project_table = asdict.get("project", {})
+ tool_table = asdict.get("tool", {})
+ setuptools_table = tool_table.get("setuptools", {})
+ if not asdict or not (project_table or setuptools_table):
+ return {} # User is not using pyproject to configure setuptools
+
+ # TODO: Remove once the feature stabilizes
+ msg = (
+ "Support for project metadata in `pyproject.toml` is still experimental "
+ "and may be removed (or change) in future releases."
+ )
+ warnings.warn(msg, _ExperimentalProjectMetadata)
+
+ # There is an overall sense in the community that making include_package_data=True
+ # the default would be an improvement.
+ # `ini2toml` backfills include_package_data=False when nothing is explicitly given,
+ # therefore setting a default here is backwards compatible.
+ if dist and getattr(dist, "include_package_data") is not None:
+ setuptools_table.setdefault("include-package-data", dist.include_package_data)
+ else:
+ setuptools_table.setdefault("include-package-data", True)
+ # Persist changes:
+ asdict["tool"] = tool_table
+ tool_table["setuptools"] = setuptools_table
+
+ with _ignore_errors(ignore_option_errors):
+ # Don't complain about unrelated errors (e.g. tools not using the "tool" table)
+ subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
+ validate(subset, filepath)
+
+ if expand:
+ root_dir = os.path.dirname(filepath)
+ return expand_configuration(asdict, root_dir, ignore_option_errors, dist)
+
+ return asdict
+
+
+def expand_configuration(
+ config: dict,
+ root_dir: Optional[_Path] = None,
+ ignore_option_errors=False,
+ dist: Optional["Distribution"] = None,
+) -> dict:
+ """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
+ find their final values.
+
+ :param dict config: Dict containing the configuration for the distribution
+ :param str root_dir: Top-level directory for the distribution/project
+ (the same directory where ``pyproject.toml`` is place)
+ :param bool ignore_option_errors: see :func:`read_configuration`
+ :param Distribution|None: Distribution object to which the configuration refers.
+ If not given a dummy object will be created and discarded after the
+ configuration is read. Used in the case a dynamic configuration
+ (e.g. ``attr`` or ``cmdclass``).
+
+ :rtype: dict
+ """
+ root_dir = root_dir or os.getcwd()
+ project_cfg = config.get("project", {})
+ setuptools_cfg = config.get("tool", {}).get("setuptools", {})
+ ignore = ignore_option_errors
+
+ _expand_packages(setuptools_cfg, root_dir, ignore)
+ _canonic_package_data(setuptools_cfg)
+ _canonic_package_data(setuptools_cfg, "exclude-package-data")
+
+ # A distribution object is required for discovering the correct package_dir
+ dist = _ensure_dist(dist, project_cfg, root_dir)
+
+ with _EnsurePackagesDiscovered(dist, setuptools_cfg) as ensure_discovered:
+ package_dir = ensure_discovered.package_dir
+ process = partial(_process_field, ignore_option_errors=ignore)
+ cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
+ data_files = partial(_expand.canonic_data_files, root_dir=root_dir)
+
+ process(setuptools_cfg, "data-files", data_files)
+ process(setuptools_cfg, "cmdclass", cmdclass)
+ _expand_all_dynamic(project_cfg, setuptools_cfg, package_dir, root_dir, ignore)
+
+ return config
+
+
+def _ensure_dist(
+ dist: Optional["Distribution"], project_cfg: dict, root_dir: _Path
+) -> "Distribution":
+ from setuptools.dist import Distribution
+
+ attrs = {"src_root": root_dir, "name": project_cfg.get("name", None)}
+ return dist or Distribution(attrs)
+
+
+class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
+ def __init__(self, distribution: "Distribution", setuptools_cfg: dict):
+ super().__init__(distribution)
+ self._setuptools_cfg = setuptools_cfg
+
+ def __enter__(self):
+ """When entering the context, the values of ``packages``, ``py_modules`` and
+ ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
+ """
+ dist, cfg = self._dist, self._setuptools_cfg
+ package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
+ package_dir.update(dist.package_dir or {})
+ dist.package_dir = package_dir # needs to be the same object
+
+ dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour
+
+ # Set `py_modules` and `packages` in dist to short-circuit auto-discovery,
+ # but avoid overwriting empty lists purposefully set by users.
+ if dist.py_modules is None:
+ dist.py_modules = cfg.get("py-modules")
+ if dist.packages is None:
+ dist.packages = cfg.get("packages")
+
+ return super().__enter__()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """When exiting the context, if values of ``packages``, ``py_modules`` and
+ ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
+ """
+ # If anything was discovered set them back, so they count in the final config.
+ self._setuptools_cfg.setdefault("packages", self._dist.packages)
+ self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules)
+ return super().__exit__(exc_type, exc_value, traceback)
+
+
+def _expand_all_dynamic(
+ project_cfg: dict,
+ setuptools_cfg: dict,
+ package_dir: Mapping[str, str],
+ root_dir: _Path,
+ ignore_option_errors: bool,
+):
+ ignore = ignore_option_errors
+ dynamic_cfg = setuptools_cfg.get("dynamic", {})
+ pkg_dir = package_dir
+ special = (
+ "readme",
+ "version",
+ "entry-points",
+ "scripts",
+ "gui-scripts",
+ "classifiers",
+ )
+ # readme, version and entry-points need special handling
+ dynamic = project_cfg.get("dynamic", [])
+ regular_dynamic = (x for x in dynamic if x not in special)
+
+ for field in regular_dynamic:
+ value = _expand_dynamic(dynamic_cfg, field, pkg_dir, root_dir, ignore)
+ project_cfg[field] = value
+
+ if "version" in dynamic and "version" in dynamic_cfg:
+ version = _expand_dynamic(dynamic_cfg, "version", pkg_dir, root_dir, ignore)
+ project_cfg["version"] = _expand.version(version)
+
+ if "readme" in dynamic:
+ project_cfg["readme"] = _expand_readme(dynamic_cfg, root_dir, ignore)
+
+ if "entry-points" in dynamic:
+ field = "entry-points"
+ value = _expand_dynamic(dynamic_cfg, field, pkg_dir, root_dir, ignore)
+ project_cfg.update(_expand_entry_points(value, dynamic))
+
+ if "classifiers" in dynamic:
+ value = _expand_dynamic(dynamic_cfg, "classifiers", pkg_dir, root_dir, ignore)
+ project_cfg["classifiers"] = (value or "").splitlines()
+
+
+def _expand_dynamic(
+ dynamic_cfg: dict,
+ field: str,
+ package_dir: Mapping[str, str],
+ root_dir: _Path,
+ ignore_option_errors: bool,
+):
+ if field in dynamic_cfg:
+ directive = dynamic_cfg[field]
+ with _ignore_errors(ignore_option_errors):
+ if "file" in directive:
+ return _expand.read_files(directive["file"], root_dir)
+ if "attr" in directive:
+ return _expand.read_attr(directive["attr"], package_dir, root_dir)
+ elif not ignore_option_errors:
+ msg = f"Impossible to expand dynamic value of {field!r}. "
+ msg += f"No configuration found for `tool.setuptools.dynamic.{field}`"
+ raise OptionError(msg)
+ return None
+
+
+def _expand_readme(
+ dynamic_cfg: dict, root_dir: _Path, ignore_option_errors: bool
+) -> Dict[str, str]:
+ ignore = ignore_option_errors
+ return {
+ "text": _expand_dynamic(dynamic_cfg, "readme", {}, root_dir, ignore),
+ "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
+ }
+
+
+def _expand_entry_points(text: str, dynamic: set):
+ groups = _expand.entry_points(text)
+ expanded = {"entry-points": groups}
+ if "scripts" in dynamic and "console_scripts" in groups:
+ expanded["scripts"] = groups.pop("console_scripts")
+ if "gui-scripts" in dynamic and "gui_scripts" in groups:
+ expanded["gui-scripts"] = groups.pop("gui_scripts")
+ return expanded
+
+
+def _expand_packages(setuptools_cfg: dict, root_dir: _Path, ignore_option_errors=False):
+ packages = setuptools_cfg.get("packages")
+ if packages is None or isinstance(packages, (list, tuple)):
+ return
+
+ find = packages.get("find")
+ if isinstance(find, dict):
+ find["root_dir"] = root_dir
+ find["fill_package_dir"] = setuptools_cfg.setdefault("package-dir", {})
+ with _ignore_errors(ignore_option_errors):
+ setuptools_cfg["packages"] = _expand.find_packages(**find)
+
+
+def _process_field(
+ container: dict, field: str, fn: Callable, ignore_option_errors=False
+):
+ if field in container:
+ with _ignore_errors(ignore_option_errors):
+ container[field] = fn(container[field])
+
+
+def _canonic_package_data(setuptools_cfg, field="package-data"):
+ package_data = setuptools_cfg.get(field, {})
+ return _expand.canonic_package_data(package_data)
+
+
+@contextmanager
+def _ignore_errors(ignore_option_errors: bool):
+ if not ignore_option_errors:
+ yield
+ return
+
+ try:
+ yield
+ except Exception as ex:
+ _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
+
+
+class _ExperimentalProjectMetadata(UserWarning):
+ """Explicitly inform users that `pyproject.toml` configuration is experimental"""
diff --git a/setuptools/config.py b/setuptools/config/setupcfg.py
index 8eddcae..5ecf626 100644
--- a/setuptools/config.py
+++ b/setuptools/config/setupcfg.py
@@ -1,17 +1,40 @@
-from __future__ import absolute_import, unicode_literals
-import io
+"""Load setuptools configuration from ``setup.cfg`` files"""
import os
-import sys
+
+import warnings
+import functools
from collections import defaultdict
from functools import partial
-from importlib import import_module
+from functools import wraps
+from typing import (TYPE_CHECKING, Callable, Any, Dict, Generic, Iterable, List,
+ Optional, Tuple, TypeVar, Union)
from distutils.errors import DistutilsOptionError, DistutilsFileError
-from setuptools.extern.six import string_types
+from setuptools.extern.packaging.version import Version, InvalidVersion
+from setuptools.extern.packaging.specifiers import SpecifierSet
+
+from . import expand
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+ from distutils.dist import DistributionMetadata # noqa
+
+_Path = Union[str, os.PathLike]
+SingleCommandOptions = Dict["str", Tuple["str", Any]]
+"""Dict that associate the name of the options of a particular command to a
+tuple. The first element of the tuple indicates the origin of the option value
+(e.g. the name of the configuration file where it was read from),
+while the second element of the tuple is the option value itself
+"""
+AllCommandOptions = Dict["str", SingleCommandOptions] # cmd name => its options
+Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
def read_configuration(
- filepath, find_others=False, ignore_option_errors=False):
+ filepath: _Path,
+ find_others=False,
+ ignore_option_errors=False
+) -> dict:
"""Read given configuration file and returns options from it as a dict.
:param str|unicode filepath: Path to configuration file
@@ -27,37 +50,65 @@ def read_configuration(
:rtype: dict
"""
- from setuptools.dist import Distribution, _Distribution
+ from setuptools.dist import Distribution
+
+ dist = Distribution()
+ filenames = dist.find_config_files() if find_others else []
+ handlers = _apply(dist, filepath, filenames, ignore_option_errors)
+ return configuration_to_dict(handlers)
+
+
+def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution":
+ """Apply the configuration from a ``setup.cfg`` file into an existing
+ distribution object.
+ """
+ _apply(dist, filepath)
+ dist._finalize_requires()
+ return dist
+
+
+def _apply(
+ dist: "Distribution", filepath: _Path,
+ other_files: Iterable[_Path] = (),
+ ignore_option_errors: bool = False
+) -> Tuple["ConfigHandler", ...]:
+ """Read configuration from ``filepath`` and applies to the ``dist`` object."""
+ from setuptools.dist import _Distribution
filepath = os.path.abspath(filepath)
if not os.path.isfile(filepath):
- raise DistutilsFileError(
- 'Configuration file %s does not exist.' % filepath)
+ raise DistutilsFileError('Configuration file %s does not exist.' % filepath)
current_directory = os.getcwd()
os.chdir(os.path.dirname(filepath))
+ filenames = [*other_files, filepath]
try:
- dist = Distribution()
-
- filenames = dist.find_config_files() if find_others else []
- if filepath not in filenames:
- filenames.append(filepath)
-
_Distribution.parse_config_files(dist, filenames=filenames)
-
handlers = parse_configuration(
- dist, dist.command_options,
- ignore_option_errors=ignore_option_errors)
-
+ dist, dist.command_options, ignore_option_errors=ignore_option_errors
+ )
+ dist._finalize_license_files()
finally:
os.chdir(current_directory)
- return configuration_to_dict(handlers)
+ return handlers
-def configuration_to_dict(handlers):
+def _get_option(target_obj: Target, key: str):
+ """
+ Given a target object and option key, get that option from
+ the target object, either through a get_{key} method or
+ from an attribute directly.
+ """
+ getter_name = 'get_{key}'.format(**locals())
+ by_attribute = functools.partial(getattr, target_obj, key)
+ getter = getattr(target_obj, getter_name, by_attribute)
+ return getter()
+
+
+def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
"""Returns configuration data gathered by given handlers as a dict.
:param list[ConfigHandler] handlers: Handlers list,
@@ -65,29 +116,21 @@ def configuration_to_dict(handlers):
:rtype: dict
"""
- config_dict = defaultdict(dict)
+ config_dict: dict = defaultdict(dict)
for handler in handlers:
-
- obj_alias = handler.section_prefix
- target_obj = handler.target_obj
-
for option in handler.set_options:
- getter = getattr(target_obj, 'get_%s' % option, None)
-
- if getter is None:
- value = getattr(target_obj, option)
-
- else:
- value = getter()
-
- config_dict[obj_alias][option] = value
+ value = _get_option(handler.target_obj, option)
+ config_dict[handler.section_prefix][option] = value
return config_dict
def parse_configuration(
- distribution, command_options, ignore_option_errors=False):
+ distribution: "Distribution",
+ command_options: AllCommandOptions,
+ ignore_option_errors=False
+) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
"""Performs additional parsing of configuration options
for a distribution.
@@ -101,35 +144,55 @@ def parse_configuration(
If False exceptions are propagated as expected.
:rtype: list
"""
- meta = ConfigMetadataHandler(
- distribution.metadata, command_options, ignore_option_errors)
- meta.parse()
+ with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered:
+ options = ConfigOptionsHandler(
+ distribution,
+ command_options,
+ ignore_option_errors,
+ ensure_discovered,
+ )
- options = ConfigOptionsHandler(
- distribution, command_options, ignore_option_errors)
- options.parse()
+ options.parse()
+ if not distribution.package_dir:
+ distribution.package_dir = options.package_dir # Filled by `find_packages`
+
+ meta = ConfigMetadataHandler(
+ distribution.metadata,
+ command_options,
+ ignore_option_errors,
+ ensure_discovered,
+ distribution.package_dir,
+ distribution.src_root,
+ )
+ meta.parse()
return meta, options
-class ConfigHandler(object):
+class ConfigHandler(Generic[Target]):
"""Handles metadata supplied in configuration files."""
- section_prefix = None
+ section_prefix: str
"""Prefix for config sections handled by this handler.
Must be provided by class heirs.
"""
- aliases = {}
+ aliases: Dict[str, str] = {}
"""Options aliases.
For compatibility with various packages. E.g.: d2to1 and pbr.
Note: `-` in keys is replaced with `_` by config parser.
"""
- def __init__(self, target_obj, options, ignore_option_errors=False):
- sections = {}
+ def __init__(
+ self,
+ target_obj: Target,
+ options: AllCommandOptions,
+ ignore_option_errors,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ ):
+ sections: AllCommandOptions = {}
section_prefix = self.section_prefix
for section_name, section_options in options.items():
@@ -142,13 +205,15 @@ class ConfigHandler(object):
self.ignore_option_errors = ignore_option_errors
self.target_obj = target_obj
self.sections = sections
- self.set_options = []
+ self.set_options: List[str] = []
+ self.ensure_discovered = ensure_discovered
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
raise NotImplementedError(
- '%s must provide .parsers property' % self.__class__.__name__)
+ '%s must provide .parsers property' % self.__class__.__name__
+ )
def __setitem__(self, option_name, value):
unknown = tuple()
@@ -221,7 +286,8 @@ class ConfigHandler(object):
key, sep, val = line.partition(separator)
if sep != separator:
raise DistutilsOptionError(
- 'Unable to parse option value to dict: %s' % value)
+ 'Unable to parse option value to dict: %s' % value
+ )
result[key.strip()] = val.strip()
return result
@@ -237,7 +303,30 @@ class ConfigHandler(object):
return value in ('1', 'true', 'yes')
@classmethod
- def _parse_file(cls, value):
+ def _exclude_files_parser(cls, key):
+ """Returns a parser function to make sure field inputs
+ are not files.
+
+ Parses a value after getting the key so error messages are
+ more informative.
+
+ :param key:
+ :rtype: callable
+ """
+
+ def parser(value):
+ exclude_directive = 'file:'
+ if value.startswith(exclude_directive):
+ raise ValueError(
+ 'Only strings are accepted for the {0} field, '
+ 'files are not accepted'.format(key)
+ )
+ return value
+
+ return parser
+
+ @classmethod
+ def _parse_file(cls, value, root_dir: _Path):
"""Represents value as a string, allowing including text
from nearest files using `file:` directive.
@@ -245,7 +334,6 @@ class ConfigHandler(object):
directory with setup.py.
Examples:
- file: LICENSE
file: README.rst, CHANGELOG.md, src/file.txt
:param str value:
@@ -253,34 +341,17 @@ class ConfigHandler(object):
"""
include_directive = 'file:'
- if not isinstance(value, string_types):
+ if not isinstance(value, str):
return value
if not value.startswith(include_directive):
return value
- spec = value[len(include_directive):]
- filepaths = (os.path.abspath(path.strip()) for path in spec.split(','))
- return '\n'.join(
- cls._read_file(path)
- for path in filepaths
- if (cls._assert_local(path) or True)
- and os.path.isfile(path)
- )
+ spec = value[len(include_directive) :]
+ filepaths = (path.strip() for path in spec.split(','))
+ return expand.read_files(filepaths, root_dir)
- @staticmethod
- def _assert_local(filepath):
- if not filepath.startswith(os.getcwd()):
- raise DistutilsOptionError(
- '`file:` directive can not access %s' % filepath)
-
- @staticmethod
- def _read_file(filepath):
- with io.open(filepath, encoding='utf-8') as f:
- return f.read()
-
- @classmethod
- def _parse_attr(cls, value):
+ def _parse_attr(self, value, package_dir, root_dir: _Path):
"""Represents value as a module attribute.
Examples:
@@ -294,21 +365,11 @@ class ConfigHandler(object):
if not value.startswith(attr_directive):
return value
- attrs_path = value.replace(attr_directive, '').strip().split('.')
- attr_name = attrs_path.pop()
-
- module_name = '.'.join(attrs_path)
- module_name = module_name or '__init__'
+ attr_desc = value.replace(attr_directive, '')
- sys.path.insert(0, os.getcwd())
- try:
- module = import_module(module_name)
- value = getattr(module, attr_name)
-
- finally:
- sys.path = sys.path[1:]
-
- return value
+ # Make sure package_dir is populated correctly, so `attr:` directives can work
+ package_dir.update(self.ensure_discovered.package_dir)
+ return expand.read_attr(attr_desc, package_dir, root_dir)
@classmethod
def _get_parser_compound(cls, *parse_methods):
@@ -319,6 +380,7 @@ class ConfigHandler(object):
:param parse_methods:
:rtype: callable
"""
+
def parse(value):
parsed = value
@@ -368,21 +430,38 @@ class ConfigHandler(object):
if section_name: # [section.option] variant
method_postfix = '_%s' % section_name
- section_parser_method = getattr(
+ section_parser_method: Optional[Callable] = getattr(
self,
- # Dots in section names are tranlsated into dunderscores.
+ # Dots in section names are translated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
- None)
+ None,
+ )
if section_parser_method is None:
raise DistutilsOptionError(
- 'Unsupported distribution option section: [%s.%s]' % (
- self.section_prefix, section_name))
+ 'Unsupported distribution option section: [%s.%s]'
+ % (self.section_prefix, section_name)
+ )
section_parser_method(section_options)
+ def _deprecated_config_handler(self, func, msg, warning_class):
+ """this function will wrap around parameters that are deprecated
+
+ :param msg: deprecation message
+ :param warning_class: class of warning exception to be raised
+ :param func: function to be wrapped around
+ """
+
+ @wraps(func)
+ def config_handler(*args, **kwargs):
+ warnings.warn(msg, warning_class)
+ return func(*args, **kwargs)
+
+ return config_handler
-class ConfigMetadataHandler(ConfigHandler):
+
+class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
section_prefix = 'metadata'
@@ -399,21 +478,47 @@ class ConfigMetadataHandler(ConfigHandler):
"""
+ def __init__(
+ self,
+ target_obj: "DistributionMetadata",
+ options: AllCommandOptions,
+ ignore_option_errors: bool,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ package_dir: Optional[dict] = None,
+ root_dir: _Path = os.curdir
+ ):
+ super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
+ self.package_dir = package_dir
+ self.root_dir = root_dir
+
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
- parse_file = self._parse_file
+ parse_file = partial(self._parse_file, root_dir=self.root_dir)
parse_dict = self._parse_dict
+ exclude_files_parser = self._exclude_files_parser
return {
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
- 'requires': parse_list,
+ 'requires': self._deprecated_config_handler(
+ parse_list,
+ "The requires parameter is deprecated, please use "
+ "install_requires for runtime dependencies.",
+ DeprecationWarning,
+ ),
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
- 'license': parse_file,
+ 'license': exclude_files_parser('license'),
+ 'license_file': self._deprecated_config_handler(
+ exclude_files_parser('license_file'),
+ "The license_file parameter is deprecated, "
+ "use license_files instead.",
+ DeprecationWarning,
+ ),
+ 'license_files': parse_list,
'description': parse_file,
'long_description': parse_file,
'version': self._parse_version,
@@ -427,24 +532,41 @@ class ConfigMetadataHandler(ConfigHandler):
:rtype: str
"""
- version = self._parse_attr(value)
+ version = self._parse_file(value, self.root_dir)
- if callable(version):
- version = version()
+ if version != value:
+ version = version.strip()
+ # Be strict about versions loaded from file because it's easy to
+ # accidentally include newlines and other unintended content
+ try:
+ Version(version)
+ except InvalidVersion:
+ tmpl = (
+ 'Version loaded from {value} does not '
+ 'comply with PEP 440: {version}'
+ )
+ raise DistutilsOptionError(tmpl.format(**locals()))
- if not isinstance(version, string_types):
- if hasattr(version, '__iter__'):
- version = '.'.join(map(str, version))
- else:
- version = '%s' % version
+ return version
- return version
+ return expand.version(self._parse_attr(value, self.package_dir, self.root_dir))
-class ConfigOptionsHandler(ConfigHandler):
+class ConfigOptionsHandler(ConfigHandler["Distribution"]):
section_prefix = 'options'
+ def __init__(
+ self,
+ target_obj: "Distribution",
+ options: AllCommandOptions,
+ ignore_option_errors: bool,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ ):
+ super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
+ self.root_dir = target_obj.src_root
+ self.package_dir: Dict[str, str] = {} # To be filled by `find_packages`
+
@property
def parsers(self):
"""Metadata item name to parser function mapping."""
@@ -452,15 +574,13 @@ class ConfigOptionsHandler(ConfigHandler):
parse_list_semicolon = partial(self._parse_list, separator=';')
parse_bool = self._parse_bool
parse_dict = self._parse_dict
+ parse_cmdclass = self._parse_cmdclass
+ parse_file = partial(self._parse_file, root_dir=self.root_dir)
return {
'zip_safe': parse_bool,
- 'use_2to3': parse_bool,
'include_package_data': parse_bool,
'package_dir': parse_dict,
- 'use_2to3_fixers': parse_list,
- 'use_2to3_exclude_fixers': parse_list,
- 'convert_2to3_doctests': parse_list,
'scripts': parse_list,
'eager_resources': parse_list,
'dependency_links': parse_list,
@@ -469,28 +589,40 @@ class ConfigOptionsHandler(ConfigHandler):
'setup_requires': parse_list_semicolon,
'tests_require': parse_list_semicolon,
'packages': self._parse_packages,
- 'entry_points': self._parse_file,
+ 'entry_points': parse_file,
'py_modules': parse_list,
+ 'python_requires': SpecifierSet,
+ 'cmdclass': parse_cmdclass,
}
+ def _parse_cmdclass(self, value):
+ package_dir = self.ensure_discovered.package_dir
+ return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir)
+
def _parse_packages(self, value):
"""Parses `packages` option value.
:param value:
:rtype: list
"""
- find_directive = 'find:'
+ find_directives = ['find:', 'find_namespace:']
+ trimmed_value = value.strip()
- if not value.startswith(find_directive):
+ if trimmed_value not in find_directives:
return self._parse_list(value)
# Read function arguments from a dedicated section.
find_kwargs = self.parse_section_packages__find(
- self.sections.get('packages.find', {}))
+ self.sections.get('packages.find', {})
+ )
- from setuptools import find_packages
+ find_kwargs.update(
+ namespaces=(trimmed_value == find_directives[1]),
+ root_dir=self.root_dir,
+ fill_package_dir=self.package_dir,
+ )
- return find_packages(**find_kwargs)
+ return expand.find_packages(**find_kwargs)
def parse_section_packages__find(self, section_options):
"""Parses `packages.find` configuration file section.
@@ -499,13 +631,13 @@ class ConfigOptionsHandler(ConfigHandler):
:param dict section_options:
"""
- section_data = self._parse_section_to_dict(
- section_options, self._parse_list)
+ section_data = self._parse_section_to_dict(section_options, self._parse_list)
valid_keys = ['where', 'include', 'exclude']
find_kwargs = dict(
- [(k, v) for k, v in section_data.items() if k in valid_keys and v])
+ [(k, v) for k, v in section_data.items() if k in valid_keys and v]
+ )
where = find_kwargs.get('where')
if where is not None:
@@ -522,14 +654,8 @@ class ConfigOptionsHandler(ConfigHandler):
self['entry_points'] = parsed
def _parse_package_data(self, section_options):
- parsed = self._parse_section_to_dict(section_options, self._parse_list)
-
- root = parsed.get('*')
- if root:
- parsed[''] = root
- del parsed['*']
-
- return parsed
+ package_data = self._parse_section_to_dict(section_options, self._parse_list)
+ return expand.canonic_package_data(package_data)
def parse_section_package_data(self, section_options):
"""Parses `package_data` configuration file section.
@@ -543,8 +669,7 @@ class ConfigOptionsHandler(ConfigHandler):
:param dict section_options:
"""
- self['exclude_package_data'] = self._parse_package_data(
- section_options)
+ self['exclude_package_data'] = self._parse_package_data(section_options)
def parse_section_extras_require(self, section_options):
"""Parses `extras_require` configuration file section.
@@ -553,4 +678,13 @@ class ConfigOptionsHandler(ConfigHandler):
"""
parse_list = partial(self._parse_list, separator=';')
self['extras_require'] = self._parse_section_to_dict(
- section_options, parse_list)
+ section_options, parse_list
+ )
+
+ def parse_section_data_files(self, section_options):
+ """Parses `data_files` configuration file section.
+
+ :param dict section_options:
+ """
+ parsed = self._parse_section_to_dict(section_options, self._parse_list)
+ self['data_files'] = expand.canonic_data_files(parsed, self.root_dir)
diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py
index 2931c13..521eb71 100644
--- a/setuptools/dep_util.py
+++ b/setuptools/dep_util.py
@@ -1,5 +1,6 @@
from distutils.dep_util import newer_group
+
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
@@ -10,7 +11,8 @@ def newer_pairwise_group(sources_groups, targets):
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
- raise ValueError("'sources_group' and 'targets' must be the same length")
+ raise ValueError(
+ "'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 45e7052..adffd12 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -1,10 +1,12 @@
import sys
-import imp
import marshal
-from distutils.version import StrictVersion
-from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
+import contextlib
+import dis
-from .py33compat import Bytecode
+from setuptools.extern.packaging import version
+
+from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
+from . import _imp
__all__ = [
@@ -15,11 +17,12 @@ __all__ = [
class Require:
"""A prerequisite to building or installing a distribution"""
- def __init__(self, name, requested_version, module, homepage='',
+ def __init__(
+ self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
- format = StrictVersion
+ format = version.Version
if format is not None:
requested_version = format(requested_version)
@@ -38,7 +41,7 @@ class Require:
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
- str(version) != "unknown" and version >= self.requested_version
+ str(version) != "unknown" and self.format(version) >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
@@ -76,26 +79,18 @@ class Require:
version = self.get_version(paths)
if version is None:
return False
- return self.version_ok(version)
-
-
-def find_module(module, paths=None):
- """Just like 'imp.find_module()', but with package support"""
-
- parts = module.split('.')
+ return self.version_ok(str(version))
- while parts:
- part = parts.pop(0)
- f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
- if kind == PKG_DIRECTORY:
- parts = parts or ['__init__']
- paths = [path]
-
- elif parts:
- raise ImportError("Can't find %r in %s" % (parts, module))
+def maybe_close(f):
+ @contextlib.contextmanager
+ def empty():
+ yield
+ return
+ if not f:
+ return empty()
- return info
+ return contextlib.closing(f)
def get_module_constant(module, symbol, default=-1, paths=None):
@@ -106,28 +101,23 @@ def get_module_constant(module, symbol, default=-1, paths=None):
constant. Otherwise, return 'default'."""
try:
- f, path, (suffix, mode, kind) = find_module(module, paths)
+ f, path, (suffix, mode, kind) = info = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
- try:
+ with maybe_close(f):
if kind == PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind == PY_FROZEN:
- code = imp.get_frozen_object(module)
+ code = _imp.get_frozen_object(module, paths)
elif kind == PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
- if module not in sys.modules:
- imp.load_module(module, f, path, (suffix, mode, kind))
- return getattr(sys.modules[module], symbol, None)
-
- finally:
- if f:
- f.close()
+ imported = _imp.get_module(module, paths, info)
+ return getattr(imported, symbol, None)
return extract_constant(code, symbol, default)
@@ -156,7 +146,7 @@ def extract_constant(code, symbol, default=-1):
const = default
- for byte_code in Bytecode(code):
+ for byte_code in dis.Bytecode(code):
op = byte_code.opcode
arg = byte_code.arg
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
new file mode 100644
index 0000000..95c3c7f
--- /dev/null
+++ b/setuptools/discovery.py
@@ -0,0 +1,588 @@
+"""Automatic discovery of Python modules and packages (for inclusion in the
+distribution) and other config values.
+
+For the purposes of this module, the following nomenclature is used:
+
+- "src-layout": a directory representing a Python project that contains a "src"
+ folder. Everything under the "src" folder is meant to be included in the
+ distribution when packaging the project. Example::
+
+ .
+ ├── tox.ini
+ ├── pyproject.toml
+ └── src/
+ └── mypkg/
+ ├── __init__.py
+ ├── mymodule.py
+ └── my_data_file.txt
+
+- "flat-layout": a Python project that does not use "src-layout" but instead
+ have a directory under the project root for each package::
+
+ .
+ ├── tox.ini
+ ├── pyproject.toml
+ └── mypkg/
+ ├── __init__.py
+ ├── mymodule.py
+ └── my_data_file.txt
+
+- "single-module": a project that contains a single Python script direct under
+ the project root (no directory used)::
+
+ .
+ ├── tox.ini
+ ├── pyproject.toml
+ └── mymodule.py
+
+"""
+
+import itertools
+import os
+from fnmatch import fnmatchcase
+from glob import glob
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Callable, Dict, Iterator, Iterable, List, Optional, Tuple, Union
+
+import _distutils_hack.override # noqa: F401
+
+from distutils import log
+from distutils.util import convert_path
+
+_Path = Union[str, os.PathLike]
+_Filter = Callable[[str], bool]
+StrIter = Iterator[str]
+
+chain_iter = itertools.chain.from_iterable
+
+if TYPE_CHECKING:
+ from setuptools import Distribution # noqa
+
+
+def _valid_name(path: _Path) -> bool:
+ # Ignore invalid names that cannot be imported directly
+ return os.path.basename(path).isidentifier()
+
+
+class _Finder:
+ """Base class that exposes functionality for module/package finders"""
+
+ ALWAYS_EXCLUDE: Tuple[str, ...] = ()
+ DEFAULT_EXCLUDE: Tuple[str, ...] = ()
+
+ @classmethod
+ def find(
+ cls,
+ where: _Path = '.',
+ exclude: Iterable[str] = (),
+ include: Iterable[str] = ('*',)
+ ) -> List[str]:
+ """Return a list of all Python items (packages or modules, depending on
+ the finder implementation) found within directory 'where'.
+
+ 'where' is the root directory which will be searched.
+ It should be supplied as a "cross-platform" (i.e. URL-style) path;
+ it will be converted to the appropriate local path syntax.
+
+ 'exclude' is a sequence of names to exclude; '*' can be used
+ as a wildcard in the names.
+ When finding packages, 'foo.*' will exclude all subpackages of 'foo'
+ (but not 'foo' itself).
+
+ 'include' is a sequence of names to include.
+ If it's specified, only the named items will be included.
+ If it's not specified, all found items will be included.
+ 'include' can contain shell style wildcard patterns just like
+ 'exclude'.
+ """
+
+ exclude = exclude or cls.DEFAULT_EXCLUDE
+ return list(
+ cls._find_iter(
+ convert_path(str(where)),
+ cls._build_filter(*cls.ALWAYS_EXCLUDE, *exclude),
+ cls._build_filter(*include),
+ )
+ )
+
+ @classmethod
+ def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+ raise NotImplementedError
+
+ @staticmethod
+ def _build_filter(*patterns: str) -> _Filter:
+ """
+ Given a list of patterns, return a callable that will be true only if
+ the input matches at least one of the patterns.
+ """
+ return lambda name: any(fnmatchcase(name, pat) for pat in patterns)
+
+
+class PackageFinder(_Finder):
+ """
+ Generate a list of all Python packages found within a directory
+ """
+
+ ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__")
+
+ @classmethod
+ def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+ """
+ All the packages found in 'where' that pass the 'include' filter, but
+ not the 'exclude' filter.
+ """
+ for root, dirs, files in os.walk(str(where), followlinks=True):
+ # Copy dirs to iterate over it, then empty dirs.
+ all_dirs = dirs[:]
+ dirs[:] = []
+
+ for dir in all_dirs:
+ full_path = os.path.join(root, dir)
+ rel_path = os.path.relpath(full_path, where)
+ package = rel_path.replace(os.path.sep, '.')
+
+ # Skip directory trees that are not valid packages
+ if '.' in dir or not cls._looks_like_package(full_path, package):
+ continue
+
+ # Should this package be included?
+ if include(package) and not exclude(package):
+ yield package
+
+ # Keep searching subdirectories, as there may be more packages
+ # down there, even if the parent was excluded.
+ dirs.append(dir)
+
+ @staticmethod
+ def _looks_like_package(path: _Path, _package_name: str) -> bool:
+ """Does a directory look like a package?"""
+ return os.path.isfile(os.path.join(path, '__init__.py'))
+
+
+class PEP420PackageFinder(PackageFinder):
+ @staticmethod
+ def _looks_like_package(_path: _Path, _package_name: str) -> bool:
+ return True
+
+
+class ModuleFinder(_Finder):
+ """Find isolated Python modules.
+ This function will **not** recurse subdirectories.
+ """
+
+ @classmethod
+ def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter:
+ for file in glob(os.path.join(where, "*.py")):
+ module, _ext = os.path.splitext(os.path.basename(file))
+
+ if not cls._looks_like_module(module):
+ continue
+
+ if include(module) and not exclude(module):
+ yield module
+
+ _looks_like_module = staticmethod(_valid_name)
+
+
+# We have to be extra careful in the case of flat layout to not include files
+# and directories not meant for distribution (e.g. tool-related)
+
+
+class FlatLayoutPackageFinder(PEP420PackageFinder):
+ _EXCLUDE = (
+ "ci",
+ "bin",
+ "doc",
+ "docs",
+ "documentation",
+ "manpages",
+ "news",
+ "changelog",
+ "test",
+ "tests",
+ "unit_test",
+ "unit_tests",
+ "example",
+ "examples",
+ "scripts",
+ "tools",
+ "util",
+ "utils",
+ "python",
+ "build",
+ "dist",
+ "venv",
+ "env",
+ "requirements",
+ # ---- Task runners / Build tools ----
+ "tasks", # invoke
+ "fabfile", # fabric
+ "site_scons", # SCons
+ # ---- Other tools ----
+ "benchmark",
+ "benchmarks",
+ "exercise",
+ "exercises",
+ # ---- Hidden directories/Private packages ----
+ "[._]*",
+ )
+
+ DEFAULT_EXCLUDE = tuple(chain_iter((p, f"{p}.*") for p in _EXCLUDE))
+ """Reserved package names"""
+
+ @staticmethod
+ def _looks_like_package(_path: _Path, package_name: str) -> bool:
+ names = package_name.split('.')
+ # Consider PEP 561
+ root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs")
+ return root_pkg_is_valid and all(name.isidentifier() for name in names[1:])
+
+
+class FlatLayoutModuleFinder(ModuleFinder):
+ DEFAULT_EXCLUDE = (
+ "setup",
+ "conftest",
+ "test",
+ "tests",
+ "example",
+ "examples",
+ "build",
+ # ---- Task runners ----
+ "toxfile",
+ "noxfile",
+ "pavement",
+ "dodo",
+ "tasks",
+ "fabfile",
+ # ---- Other tools ----
+ "[Ss][Cc]onstruct", # SCons
+ "conanfile", # Connan: C/C++ build tool
+ "manage", # Django
+ "benchmark",
+ "benchmarks",
+ "exercise",
+ "exercises",
+ # ---- Hidden files/Private modules ----
+ "[._]*",
+ )
+ """Reserved top-level module names"""
+
+
+def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]:
+ nested = PEP420PackageFinder.find(pkg_dir)
+ return [root_pkg] + [".".join((root_pkg, n)) for n in nested]
+
+
+class ConfigDiscovery:
+ """Fill-in metadata and options that can be automatically derived
+ (from other metadata/options, the file system or conventions)
+ """
+
+ def __init__(self, distribution: "Distribution"):
+ self.dist = distribution
+ self._called = False
+ self._disabled = False
+ self._skip_ext_modules = False
+
+ def _disable(self):
+ """Internal API to disable automatic discovery"""
+ self._disabled = True
+
+ def _ignore_ext_modules(self):
+ """Internal API to disregard ext_modules.
+
+ Normally auto-discovery would not be triggered if ``ext_modules`` are set
+ (this is done for backward compatibility with existing packages relying on
+ ``setup.py`` or ``setup.cfg``). However, ``setuptools`` can call this function
+ to ignore given ``ext_modules`` and proceed with the auto-discovery if
+ ``packages`` and ``py_modules`` are not given (e.g. when using pyproject.toml
+ metadata).
+ """
+ self._skip_ext_modules = True
+
+ @property
+ def _root_dir(self) -> _Path:
+ # The best is to wait until `src_root` is set in dist, before using _root_dir.
+ return self.dist.src_root or os.curdir
+
+ @property
+ def _package_dir(self) -> Dict[str, str]:
+ if self.dist.package_dir is None:
+ return {}
+ return self.dist.package_dir
+
+ def __call__(self, force=False, name=True, ignore_ext_modules=False):
+ """Automatically discover missing configuration fields
+ and modifies the given ``distribution`` object in-place.
+
+ Note that by default this will only have an effect the first time the
+ ``ConfigDiscovery`` object is called.
+
+ To repeatedly invoke automatic discovery (e.g. when the project
+ directory changes), please use ``force=True`` (or create a new
+ ``ConfigDiscovery`` instance).
+ """
+ if force is False and (self._called or self._disabled):
+ # Avoid overhead of multiple calls
+ return
+
+ self._analyse_package_layout(ignore_ext_modules)
+ if name:
+ self.analyse_name() # depends on ``packages`` and ``py_modules``
+
+ self._called = True
+
+ def _explicitly_specified(self, ignore_ext_modules: bool) -> bool:
+ """``True`` if the user has specified some form of package/module listing"""
+ ignore_ext_modules = ignore_ext_modules or self._skip_ext_modules
+ ext_modules = not (self.dist.ext_modules is None or ignore_ext_modules)
+ return (
+ self.dist.packages is not None
+ or self.dist.py_modules is not None
+ or ext_modules
+ or hasattr(self.dist, "configuration") and self.dist.configuration
+ # ^ Some projects use numpy.distutils.misc_util.Configuration
+ )
+
+ def _analyse_package_layout(self, ignore_ext_modules: bool) -> bool:
+ if self._explicitly_specified(ignore_ext_modules):
+ # For backward compatibility, just try to find modules/packages
+ # when nothing is given
+ return True
+
+ log.debug(
+ "No `packages` or `py_modules` configuration, performing "
+ "automatic discovery."
+ )
+
+ return (
+ self._analyse_explicit_layout()
+ or self._analyse_src_layout()
+ # flat-layout is the trickiest for discovery so it should be last
+ or self._analyse_flat_layout()
+ )
+
+ def _analyse_explicit_layout(self) -> bool:
+ """The user can explicitly give a package layout via ``package_dir``"""
+ package_dir = self._package_dir.copy() # don't modify directly
+ package_dir.pop("", None) # This falls under the "src-layout" umbrella
+ root_dir = self._root_dir
+
+ if not package_dir:
+ return False
+
+ log.debug(f"`explicit-layout` detected -- analysing {package_dir}")
+ pkgs = chain_iter(
+ _find_packages_within(pkg, os.path.join(root_dir, parent_dir))
+ for pkg, parent_dir in package_dir.items()
+ )
+ self.dist.packages = list(pkgs)
+ log.debug(f"discovered packages -- {self.dist.packages}")
+ return True
+
+ def _analyse_src_layout(self) -> bool:
+ """Try to find all packages or modules under the ``src`` directory
+ (or anything pointed by ``package_dir[""]``).
+
+ The "src-layout" is relatively safe for automatic discovery.
+ We assume that everything within is meant to be included in the
+ distribution.
+
+ If ``package_dir[""]`` is not given, but the ``src`` directory exists,
+ this function will set ``package_dir[""] = "src"``.
+ """
+ package_dir = self._package_dir
+ src_dir = os.path.join(self._root_dir, package_dir.get("", "src"))
+ if not os.path.isdir(src_dir):
+ return False
+
+ log.debug(f"`src-layout` detected -- analysing {src_dir}")
+ package_dir.setdefault("", os.path.basename(src_dir))
+ self.dist.package_dir = package_dir # persist eventual modifications
+ self.dist.packages = PEP420PackageFinder.find(src_dir)
+ self.dist.py_modules = ModuleFinder.find(src_dir)
+ log.debug(f"discovered packages -- {self.dist.packages}")
+ log.debug(f"discovered py_modules -- {self.dist.py_modules}")
+ return True
+
+ def _analyse_flat_layout(self) -> bool:
+ """Try to find all packages and modules under the project root.
+
+ Since the ``flat-layout`` is more dangerous in terms of accidentally including
+ extra files/directories, this function is more conservative and will raise an
+ error if multiple packages or modules are found.
+
+ This assumes that multi-package dists are uncommon and refuse to support that
+ use case in order to be able to prevent unintended errors.
+ """
+ log.debug(f"`flat-layout` detected -- analysing {self._root_dir}")
+ return self._analyse_flat_packages() or self._analyse_flat_modules()
+
+ def _analyse_flat_packages(self) -> bool:
+ self.dist.packages = FlatLayoutPackageFinder.find(self._root_dir)
+ top_level = remove_nested_packages(remove_stubs(self.dist.packages))
+ log.debug(f"discovered packages -- {self.dist.packages}")
+ self._ensure_no_accidental_inclusion(top_level, "packages")
+ return bool(top_level)
+
+ def _analyse_flat_modules(self) -> bool:
+ self.dist.py_modules = FlatLayoutModuleFinder.find(self._root_dir)
+ log.debug(f"discovered py_modules -- {self.dist.py_modules}")
+ self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules")
+ return bool(self.dist.py_modules)
+
+ def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str):
+ if len(detected) > 1:
+ from inspect import cleandoc
+ from setuptools.errors import PackageDiscoveryError
+
+ msg = f"""Multiple top-level {kind} discovered in a flat-layout: {detected}.
+
+ To avoid accidental inclusion of unwanted files or directories,
+ setuptools will not proceed with this build.
+
+ If you are trying to create a single distribution with multiple {kind}
+ on purpose, you should not rely on automatic discovery.
+ Instead, consider the following options:
+
+ 1. set up custom discovery (`find` directive with `include` or `exclude`)
+ 2. use a `src-layout`
+ 3. explicitly set `py_modules` or `packages` with a list of names
+
+ To find more information, look for "package discovery" on setuptools docs.
+ """
+ raise PackageDiscoveryError(cleandoc(msg))
+
+ def analyse_name(self):
+ """The packages/modules are the essential contribution of the author.
+ Therefore the name of the distribution can be derived from them.
+ """
+ if self.dist.metadata.name or self.dist.name:
+ # get_name() is not reliable (can return "UNKNOWN")
+ return None
+
+ log.debug("No `name` configuration, performing automatic discovery")
+
+ name = (
+ self._find_name_single_package_or_module()
+ or self._find_name_from_packages()
+ )
+ if name:
+ self.dist.metadata.name = name
+ self.dist.name = name
+
+ def _find_name_single_package_or_module(self) -> Optional[str]:
+ """Exactly one module or package"""
+ for field in ('packages', 'py_modules'):
+ items = getattr(self.dist, field, None) or []
+ if items and len(items) == 1:
+ log.debug(f"Single module/package detected, name: {items[0]}")
+ return items[0]
+
+ return None
+
+ def _find_name_from_packages(self) -> Optional[str]:
+ """Try to find the root package that is not a PEP 420 namespace"""
+ if not self.dist.packages:
+ return None
+
+ packages = remove_stubs(sorted(self.dist.packages, key=len))
+ package_dir = self.dist.package_dir or {}
+
+ parent_pkg = find_parent_package(packages, package_dir, self._root_dir)
+ if parent_pkg:
+ log.debug(f"Common parent package detected, name: {parent_pkg}")
+ return parent_pkg
+
+ log.warn("No parent package detected, impossible to derive `name`")
+ return None
+
+
+def remove_nested_packages(packages: List[str]) -> List[str]:
+ """Remove nested packages from a list of packages.
+
+ >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"])
+ ['a']
+ >>> remove_nested_packages(["a", "b", "c.d", "c.d.e.f", "g.h", "a.a1"])
+ ['a', 'b', 'c.d', 'g.h']
+ """
+ pkgs = sorted(packages, key=len)
+ top_level = pkgs[:]
+ size = len(pkgs)
+ for i, name in enumerate(reversed(pkgs)):
+ if any(name.startswith(f"{other}.") for other in top_level):
+ top_level.pop(size - i - 1)
+
+ return top_level
+
+
+def remove_stubs(packages: List[str]) -> List[str]:
+ """Remove type stubs (:pep:`561`) from a list of packages.
+
+ >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"])
+ ['a', 'a.b', 'b']
+ """
+ return [pkg for pkg in packages if not pkg.split(".")[0].endswith("-stubs")]
+
+
+def find_parent_package(
+ packages: List[str], package_dir: Dict[str, str], root_dir: _Path
+) -> Optional[str]:
+ """Find the parent package that is not a namespace."""
+ packages = sorted(packages, key=len)
+ common_ancestors = []
+ for i, name in enumerate(packages):
+ if not all(n.startswith(f"{name}.") for n in packages[i+1:]):
+ # Since packages are sorted by length, this condition is able
+ # to find a list of all common ancestors.
+ # When there is divergence (e.g. multiple root packages)
+ # the list will be empty
+ break
+ common_ancestors.append(name)
+
+ for name in common_ancestors:
+ pkg_path = find_package_path(name, package_dir, root_dir)
+ init = os.path.join(pkg_path, "__init__.py")
+ if os.path.isfile(init):
+ return name
+
+ return None
+
+
+def find_package_path(name: str, package_dir: Dict[str, str], root_dir: _Path) -> str:
+ """Given a package name, return the path where it should be found on
+ disk, considering the ``package_dir`` option.
+
+ >>> path = find_package_path("my.pkg", {"": "root/is/nested"}, ".")
+ >>> path.replace(os.sep, "/")
+ './root/is/nested/my/pkg'
+
+ >>> path = find_package_path("my.pkg", {"my": "root/is/nested"}, ".")
+ >>> path.replace(os.sep, "/")
+ './root/is/nested/pkg'
+
+ >>> path = find_package_path("my.pkg", {"my.pkg": "root/is/nested"}, ".")
+ >>> path.replace(os.sep, "/")
+ './root/is/nested'
+
+ >>> path = find_package_path("other.pkg", {"my.pkg": "root/is/nested"}, ".")
+ >>> path.replace(os.sep, "/")
+ './other/pkg'
+ """
+ parts = name.split(".")
+ for i in range(len(parts), 0, -1):
+ # Look backwards, the most specific package_dir first
+ partial_name = ".".join(parts[:i])
+ if partial_name in package_dir:
+ parent = package_dir[partial_name]
+ return os.path.join(root_dir, parent, *parts[i:])
+
+ parent = package_dir.get("") or ""
+ return os.path.join(root_dir, *parent.split("/"), *parts)
+
+
+def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]:
+ parent_pkgs = remove_nested_packages(packages)
+ prefix = Path(package_path).parts
+ return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs}
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 321ab6b..865a19d 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
__all__ = ['Distribution']
+import io
+import sys
import re
import os
import warnings
@@ -9,97 +11,196 @@ import distutils.log
import distutils.core
import distutils.cmd
import distutils.dist
+import distutils.command
+from distutils.util import strtobool
+from distutils.debug import DEBUG
+from distutils.fancy_getopt import translate_longopt
+from glob import iglob
import itertools
+import textwrap
+from typing import List, Optional, TYPE_CHECKING
+from pathlib import Path
+
from collections import defaultdict
-from distutils.errors import (
- DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError,
-)
+from email import message_from_file
+
+from distutils.errors import DistutilsOptionError, DistutilsSetupError
from distutils.util import rfc822_escape
-from distutils.version import StrictVersion
-from setuptools.extern import six
from setuptools.extern import packaging
-from setuptools.extern.six.moves import map, filter, filterfalse
+from setuptools.extern import ordered_set
+from setuptools.extern.more_itertools import unique_everseen, partition
+from setuptools.extern import nspektr
+
+from ._importlib import metadata
-from setuptools.depends import Require
+from . import SetuptoolsDeprecationWarning
+
+import setuptools
+import setuptools.command
from setuptools import windows_support
from setuptools.monkey import get_unpatched
-from setuptools.config import parse_configuration
+from setuptools.config import setupcfg, pyprojecttoml
+from setuptools.discovery import ConfigDiscovery
+
import pkg_resources
-from .py36compat import Distribution_parse_config_files
+from setuptools.extern.packaging import version
+from . import _reqs
+from . import _entry_points
+
+if TYPE_CHECKING:
+ from email.message import Message
__import__('setuptools.extern.packaging.specifiers')
__import__('setuptools.extern.packaging.version')
def _get_unpatched(cls):
- warnings.warn("Do not call this function", DeprecationWarning)
+ warnings.warn("Do not call this function", DistDeprecationWarning)
return get_unpatched(cls)
-def get_metadata_version(dist_md):
- if dist_md.long_description_content_type or dist_md.provides_extras:
- return StrictVersion('2.1')
- elif (dist_md.maintainer is not None or
- dist_md.maintainer_email is not None or
- getattr(dist_md, 'python_requires', None) is not None):
- return StrictVersion('1.2')
- elif (dist_md.provides or dist_md.requires or dist_md.obsoletes or
- dist_md.classifiers or dist_md.download_url):
- return StrictVersion('1.1')
+def get_metadata_version(self):
+ mv = getattr(self, 'metadata_version', None)
+ if mv is None:
+ mv = version.Version('2.1')
+ self.metadata_version = mv
+ return mv
+
+
+def rfc822_unescape(content: str) -> str:
+ """Reverse RFC-822 escaping by removing leading whitespaces from content."""
+ lines = content.splitlines()
+ if len(lines) == 1:
+ return lines[0].lstrip()
+ return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
+
+
+def _read_field_from_msg(msg: "Message", field: str) -> Optional[str]:
+ """Read Message header field."""
+ value = msg[field]
+ if value == 'UNKNOWN':
+ return None
+ return value
+
+
+def _read_field_unescaped_from_msg(msg: "Message", field: str) -> Optional[str]:
+ """Read Message header field and apply rfc822_unescape."""
+ value = _read_field_from_msg(msg, field)
+ if value is None:
+ return value
+ return rfc822_unescape(value)
+
+
+def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]:
+ """Read Message header field and return all results as list."""
+ values = msg.get_all(field, None)
+ if values == []:
+ return None
+ return values
+
+
+def _read_payload_from_msg(msg: "Message") -> Optional[str]:
+ value = msg.get_payload().strip()
+ if value == 'UNKNOWN':
+ return None
+ return value
+
+
+def read_pkg_file(self, file):
+ """Reads the metadata values from a file object."""
+ msg = message_from_file(file)
+
+ self.metadata_version = version.Version(msg['metadata-version'])
+ self.name = _read_field_from_msg(msg, 'name')
+ self.version = _read_field_from_msg(msg, 'version')
+ self.description = _read_field_from_msg(msg, 'summary')
+ # we are filling author only.
+ self.author = _read_field_from_msg(msg, 'author')
+ self.maintainer = None
+ self.author_email = _read_field_from_msg(msg, 'author-email')
+ self.maintainer_email = None
+ self.url = _read_field_from_msg(msg, 'home-page')
+ self.download_url = _read_field_from_msg(msg, 'download-url')
+ self.license = _read_field_unescaped_from_msg(msg, 'license')
+
+ self.long_description = _read_field_unescaped_from_msg(msg, 'description')
+ if (
+ self.long_description is None and
+ self.metadata_version >= version.Version('2.1')
+ ):
+ self.long_description = _read_payload_from_msg(msg)
+ self.description = _read_field_from_msg(msg, 'summary')
+
+ if 'keywords' in msg:
+ self.keywords = _read_field_from_msg(msg, 'keywords').split(',')
+
+ self.platforms = _read_list_from_msg(msg, 'platform')
+ self.classifiers = _read_list_from_msg(msg, 'classifier')
+
+ # PEP 314 - these fields only exist in 1.1
+ if self.metadata_version == version.Version('1.1'):
+ self.requires = _read_list_from_msg(msg, 'requires')
+ self.provides = _read_list_from_msg(msg, 'provides')
+ self.obsoletes = _read_list_from_msg(msg, 'obsoletes')
+ else:
+ self.requires = None
+ self.provides = None
+ self.obsoletes = None
- return StrictVersion('1.0')
+ self.license_files = _read_list_from_msg(msg, 'license-file')
-# Based on Python 3.5 version
-def write_pkg_file(self, file):
- """Write the PKG-INFO format data to a file object.
+def single_line(val):
"""
- version = get_metadata_version(self)
-
- file.write('Metadata-Version: %s\n' % version)
- file.write('Name: %s\n' % self.get_name())
- file.write('Version: %s\n' % self.get_version())
- file.write('Summary: %s\n' % self.get_description())
- file.write('Home-page: %s\n' % self.get_url())
+ Quick and dirty validation for Summary pypa/setuptools#1390.
+ """
+ if '\n' in val:
+ # TODO: Replace with `raise ValueError("newlines not allowed")`
+ # after reviewing #2893.
+ warnings.warn("newlines not allowed and will break in the future")
+ val = val.strip().split('\n')[0]
+ return val
- if version < StrictVersion('1.2'):
- file.write('Author: %s\n' % self.get_contact())
- file.write('Author-email: %s\n' % self.get_contact_email())
- else:
- optional_fields = (
- ('Author', 'author'),
- ('Author-email', 'author_email'),
- ('Maintainer', 'maintainer'),
- ('Maintainer-email', 'maintainer_email'),
- )
- for field, attr in optional_fields:
- attr_val = getattr(self, attr)
- if six.PY2:
- attr_val = self._encode_field(attr_val)
+# Based on Python 3.5 version
+def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME
+ """Write the PKG-INFO format data to a file object."""
+ version = self.get_metadata_version()
+
+ def write_field(key, value):
+ file.write("%s: %s\n" % (key, value))
+
+ write_field('Metadata-Version', str(version))
+ write_field('Name', self.get_name())
+ write_field('Version', self.get_version())
+ write_field('Summary', single_line(self.get_description()))
+
+ optional_fields = (
+ ('Home-page', 'url'),
+ ('Download-URL', 'download_url'),
+ ('Author', 'author'),
+ ('Author-email', 'author_email'),
+ ('Maintainer', 'maintainer'),
+ ('Maintainer-email', 'maintainer_email'),
+ )
- if attr_val is not None:
- file.write('%s: %s\n' % (field, attr_val))
+ for field, attr in optional_fields:
+ attr_val = getattr(self, attr, None)
+ if attr_val is not None:
+ write_field(field, attr_val)
- file.write('License: %s\n' % self.get_license())
- if self.download_url:
- file.write('Download-URL: %s\n' % self.download_url)
+ license = rfc822_escape(self.get_license())
+ write_field('License', license)
for project_url in self.project_urls.items():
- file.write('Project-URL: %s, %s\n' % project_url)
-
- long_desc = rfc822_escape(self.get_long_description())
- file.write('Description: %s\n' % long_desc)
+ write_field('Project-URL', '%s, %s' % project_url)
keywords = ','.join(self.get_keywords())
if keywords:
- file.write('Keywords: %s\n' % keywords)
+ write_field('Keywords', keywords)
- if version >= StrictVersion('1.2'):
- for platform in self.get_platforms():
- file.write('Platform: %s\n' % platform)
- else:
- self._write_list(file, 'Platform', self.get_platforms())
+ for platform in self.get_platforms():
+ write_field('Platform', platform)
self._write_list(file, 'Classifier', self.get_classifiers())
@@ -110,17 +211,18 @@ def write_pkg_file(self, file):
# Setuptools specific for PEP 345
if hasattr(self, 'python_requires'):
- file.write('Requires-Python: %s\n' % self.python_requires)
+ write_field('Requires-Python', self.python_requires)
# PEP 566
if self.long_description_content_type:
- file.write(
- 'Description-Content-Type: %s\n' %
- self.long_description_content_type
- )
+ write_field('Description-Content-Type', self.long_description_content_type)
if self.provides_extras:
for extra in self.provides_extras:
- file.write('Provides-Extra: %s\n' % extra)
+ write_field('Provides-Extra', extra)
+
+ self._write_list(file, 'License-File', self.license_files or [])
+
+ file.write("\n%s\n\n" % self.get_long_description())
sequence = tuple, list
@@ -128,23 +230,26 @@ sequence = tuple, list
def check_importable(dist, attr, value):
try:
- ep = pkg_resources.EntryPoint.parse('x=' + value)
+ ep = metadata.EntryPoint(value=value, name=None, group=None)
assert not ep.extras
- except (TypeError, ValueError, AttributeError, AssertionError):
+ except (TypeError, ValueError, AttributeError, AssertionError) as e:
raise DistutilsSetupError(
- "%r must be importable 'module:attrs' string (got %r)"
- % (attr, value)
- )
+ "%r must be importable 'module:attrs' string (got %r)" % (attr, value)
+ ) from e
def assert_string_list(dist, attr, value):
- """Verify that value is a string list or None"""
+ """Verify that value is a string list"""
try:
+ # verify that value is a list or tuple to exclude unordered
+ # or single-use iterables
+ assert isinstance(value, (list, tuple))
+ # verify that elements of value are strings
assert ''.join(value) != value
- except (TypeError, ValueError, AttributeError, AssertionError):
+ except (TypeError, ValueError, AttributeError, AssertionError) as e:
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr, value)
- )
+ ) from e
def check_nsp(dist, attr, value):
@@ -154,14 +259,16 @@ def check_nsp(dist, attr, value):
for nsp in ns_packages:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
- "Distribution contains no modules or packages for " +
- "namespace package %r" % nsp
+ "Distribution contains no modules or packages for "
+ + "namespace package %r" % nsp
)
parent, sep, child = nsp.rpartition('.')
if parent and parent not in ns_packages:
distutils.log.warn(
"WARNING: %r is declared as a package namespace, but %r"
- " is not: please correct this in setup.py", nsp, parent
+ " is not: please correct this in setup.py",
+ nsp,
+ parent,
)
@@ -169,19 +276,19 @@ def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
list(itertools.starmap(_check_extra, value.items()))
- except (TypeError, ValueError, AttributeError):
+ except (TypeError, ValueError, AttributeError) as e:
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
- )
+ ) from e
def _check_extra(extra, reqs):
name, sep, marker = extra.partition(':')
if marker and pkg_resources.invalid_marker(marker):
raise DistutilsSetupError("Invalid environment marker: " + marker)
- list(pkg_resources.parse_requirements(reqs))
+ list(_reqs.parse(reqs))
def assert_bool(dist, attr, value):
@@ -191,10 +298,17 @@ def assert_bool(dist, attr, value):
raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
+def invalid_unless_false(dist, attr, value):
+ if not value:
+ warnings.warn(f"{attr} is ignored.", DistDeprecationWarning)
+ return
+ raise DistutilsSetupError(f"{attr} is invalid.")
+
+
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
- list(pkg_resources.parse_requirements(value))
+ list(_reqs.parse(value))
if isinstance(value, (dict, set)):
raise TypeError("Unordered types are not allowed")
except (TypeError, ValueError) as error:
@@ -202,50 +316,46 @@ def check_requirements(dist, attr, value):
"{attr!r} must be a string or list of strings "
"containing valid project/version requirement specifiers; {error}"
)
- raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
def check_specifier(dist, attr, value):
"""Verify that value is a valid version specifier"""
try:
packaging.specifiers.SpecifierSet(value)
- except packaging.specifiers.InvalidSpecifier as error:
+ except (packaging.specifiers.InvalidSpecifier, AttributeError) as error:
tmpl = (
- "{attr!r} must be a string "
- "containing valid version specifiers; {error}"
+ "{attr!r} must be a string " "containing valid version specifiers; {error}"
)
- raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
- pkg_resources.EntryPoint.parse_map(value)
- except ValueError as e:
- raise DistutilsSetupError(e)
+ _entry_points.load(value)
+ except Exception as e:
+ raise DistutilsSetupError(e) from e
def check_test_suite(dist, attr, value):
- if not isinstance(value, six.string_types):
+ if not isinstance(value, str):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
- if isinstance(value, dict):
- for k, v in value.items():
- if not isinstance(k, str):
- break
- try:
- iter(v)
- except TypeError:
- break
- else:
- return
- raise DistutilsSetupError(
- attr + " must be a dictionary mapping package names to lists of "
- "wildcard patterns"
- )
+ if not isinstance(value, dict):
+ raise DistutilsSetupError(
+ "{!r} must be a dictionary mapping package names to lists of "
+ "string wildcard patterns".format(attr)
+ )
+ for k, v in value.items():
+ if not isinstance(k, str):
+ raise DistutilsSetupError(
+ "keys of {!r} dict must be strings (got {!r})".format(attr, k)
+ )
+ assert_string_list(dist, 'values of {!r} dict'.format(attr), v)
def check_packages(dist, attr, value):
@@ -253,15 +363,16 @@ def check_packages(dist, attr, value):
if not re.match(r'\w+(\.\w+)*', pkgname):
distutils.log.warn(
"WARNING: %r not a valid package name; please use only "
- ".-separated package names in setup.py", pkgname
+ ".-separated package names in setup.py",
+ pkgname,
)
_Distribution = get_unpatched(distutils.core.Distribution)
-class Distribution(Distribution_parse_config_files, _Distribution):
- """Distribution with support for features, tests, and package data
+class Distribution(_Distribution):
+ """Distribution with support for tests and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
@@ -288,21 +399,6 @@ class Distribution(Distribution_parse_config_files, _Distribution):
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
- 'features' **deprecated** -- a dictionary mapping option names to
- 'setuptools.Feature'
- objects. Features are a portion of the distribution that can be
- included or excluded based on user options, inter-feature dependencies,
- and availability on the current system. Excluded features are omitted
- from all setup commands, including source and binary distributions, so
- you can create multiple distributions from the same source tree.
- Feature names should be valid Python identifiers, except that they may
- contain the '-' (minus) sign. Features can be included or excluded
- via the command line options '--with-X' and '--without-X', where 'X' is
- the name of the feature. Whether a feature is included by default, and
- whether you are allowed to control this from the command line, is
- determined by the Feature object. See the 'Feature' class for more
- information.
-
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
@@ -324,10 +420,17 @@ class Distribution(Distribution_parse_config_files, _Distribution):
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
- the distribution. They are used by the feature subsystem to configure the
- distribution for the included and excluded features.
+ the distribution.
"""
+ _DISTUTILS_UNSUPPORTED_METADATA = {
+ 'long_description_content_type': lambda: None,
+ 'project_urls': dict,
+ 'provides_extras': ordered_set.OrderedSet,
+ 'license_file': lambda: None,
+ 'license_files': lambda: None,
+ }
+
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
@@ -348,55 +451,85 @@ class Distribution(Distribution_parse_config_files, _Distribution):
if not have_package_data:
self.package_data = {}
attrs = attrs or {}
- if 'features' in attrs or 'require_features' in attrs:
- Feature.warn_deprecated()
- self.require_features = []
- self.features = {}
self.dist_files = []
+ # Filter-out setuptools' specific options.
self.src_root = attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
- self.project_urls = attrs.get('project_urls', {})
self.dependency_links = attrs.pop('dependency_links', [])
self.setup_requires = attrs.pop('setup_requires', [])
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ for ep in metadata.entry_points(group='distutils.setup_keywords'):
vars(self).setdefault(ep.name, None)
- _Distribution.__init__(self, attrs)
-
- # The project_urls attribute may not be supported in distutils, so
- # prime it here from our value if not automatically set
- self.metadata.project_urls = getattr(
- self.metadata, 'project_urls', self.project_urls)
- self.metadata.long_description_content_type = attrs.get(
- 'long_description_content_type'
+ _Distribution.__init__(
+ self,
+ {
+ k: v
+ for k, v in attrs.items()
+ if k not in self._DISTUTILS_UNSUPPORTED_METADATA
+ },
)
- self.metadata.provides_extras = getattr(
- self.metadata, 'provides_extras', set()
+
+ self.set_defaults = ConfigDiscovery(self)
+
+ self._set_metadata_defaults(attrs)
+
+ self.metadata.version = self._normalize_version(
+ self._validate_version(self.metadata.version)
)
+ self._finalize_requires()
+
+ def _validate_metadata(self):
+ required = {"name"}
+ provided = {
+ key
+ for key in vars(self.metadata)
+ if getattr(self.metadata, key, None) is not None
+ }
+ missing = required - provided
- if isinstance(self.metadata.version, numbers.Number):
+ if missing:
+ msg = f"Required package metadata is missing: {missing}"
+ raise DistutilsSetupError(msg)
+
+ def _set_metadata_defaults(self, attrs):
+ """
+ Fill-in missing metadata fields not supported by distutils.
+ Some fields may have been set by other tools (e.g. pbr).
+ Those fields (vars(self.metadata)) take precedence to
+ supplied attrs.
+ """
+ for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items():
+ vars(self.metadata).setdefault(option, attrs.get(option, default()))
+
+ @staticmethod
+ def _normalize_version(version):
+ if isinstance(version, setuptools.sic) or version is None:
+ return version
+
+ normalized = str(packaging.version.Version(version))
+ if version != normalized:
+ tmpl = "Normalizing '{version}' to '{normalized}'"
+ warnings.warn(tmpl.format(**locals()))
+ return normalized
+ return version
+
+ @staticmethod
+ def _validate_version(version):
+ if isinstance(version, numbers.Number):
# Some people apparently take "version number" too literally :)
- self.metadata.version = str(self.metadata.version)
+ version = str(version)
- if self.metadata.version is not None:
+ if version is not None:
try:
- ver = packaging.version.Version(self.metadata.version)
- normalized_version = str(ver)
- if self.metadata.version != normalized_version:
- warnings.warn(
- "Normalizing '%s' to '%s'" % (
- self.metadata.version,
- normalized_version,
- )
- )
- self.metadata.version = normalized_version
+ packaging.version.Version(version)
except (packaging.version.InvalidVersion, TypeError):
warnings.warn(
"The version specified (%r) is an invalid version, this "
"may not work as expected with newer versions of "
"setuptools, pip, and PyPI. Please see PEP 440 for more "
- "details." % self.metadata.version
+ "details." % version
)
- self._finalize_requires()
+ return setuptools.sic(version)
+ return version
def _finalize_requires(self):
"""
@@ -429,7 +562,7 @@ class Distribution(Distribution_parse_config_files, _Distribution):
for section, v in spec_ext_reqs.items():
# Do not strip empty sections.
self._tmp_extras_require[section]
- for r in pkg_resources.parse_requirements(v):
+ for r in _reqs.parse(v):
suffix = self._suffix_for(r)
self._tmp_extras_require[section + suffix].append(r)
@@ -455,9 +588,9 @@ class Distribution(Distribution_parse_config_files, _Distribution):
return not req.marker
spec_inst_reqs = getattr(self, 'install_requires', None) or ()
- inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs))
+ inst_reqs = list(_reqs.parse(spec_inst_reqs))
simple_reqs = filter(is_simple_req, inst_reqs)
- complex_reqs = filterfalse(is_simple_req, inst_reqs)
+ complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs)
self.install_requires = list(map(str, simple_reqs))
for r in complex_reqs:
@@ -474,32 +607,241 @@ class Distribution(Distribution_parse_config_files, _Distribution):
req.marker = None
return req
+ def _finalize_license_files(self):
+ """Compute names of all license files which should be included."""
+ license_files: Optional[List[str]] = self.metadata.license_files
+ patterns: List[str] = license_files if license_files else []
+
+ license_file: Optional[str] = self.metadata.license_file
+ if license_file and license_file not in patterns:
+ patterns.append(license_file)
+
+ if license_files is None and license_file is None:
+ # Default patterns match the ones wheel uses
+ # See https://wheel.readthedocs.io/en/stable/user_guide.html
+ # -> 'Including license files in the generated wheel file'
+ patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
+
+ self.metadata.license_files = list(
+ unique_everseen(self._expand_patterns(patterns))
+ )
+
+ @staticmethod
+ def _expand_patterns(patterns):
+ """
+ >>> list(Distribution._expand_patterns(['LICENSE']))
+ ['LICENSE']
+ >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*']))
+ ['setup.cfg', 'LICENSE']
+ """
+ return (
+ path
+ for pattern in patterns
+ for path in sorted(iglob(pattern))
+ if not path.endswith('~') and os.path.isfile(path)
+ )
+
+ # FIXME: 'Distribution._parse_config_files' is too complex (14)
+ def _parse_config_files(self, filenames=None): # noqa: C901
+ """
+ Adapted from distutils.dist.Distribution.parse_config_files,
+ this method provides the same functionality in subtly-improved
+ ways.
+ """
+ from configparser import ConfigParser
+
+ # Ignore install directory options if we have a venv
+ ignore_options = (
+ []
+ if sys.prefix == sys.base_prefix
+ else [
+ 'install-base',
+ 'install-platbase',
+ 'install-lib',
+ 'install-platlib',
+ 'install-purelib',
+ 'install-headers',
+ 'install-scripts',
+ 'install-data',
+ 'prefix',
+ 'exec-prefix',
+ 'home',
+ 'user',
+ 'root',
+ ]
+ )
+
+ ignore_options = frozenset(ignore_options)
+
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ if DEBUG:
+ self.announce("Distribution.parse_config_files():")
+
+ parser = ConfigParser()
+ parser.optionxform = str
+ for filename in filenames:
+ with io.open(filename, encoding='utf-8') as reader:
+ if DEBUG:
+ self.announce(" reading {filename}".format(**locals()))
+ parser.read_file(reader)
+ for section in parser.sections():
+ options = parser.options(section)
+ opt_dict = self.get_option_dict(section)
+
+ for opt in options:
+ if opt == '__name__' or opt in ignore_options:
+ continue
+
+ val = parser.get(section, opt)
+ opt = self.warn_dash_deprecation(opt, section)
+ opt = self.make_option_lowercase(opt, section)
+ opt_dict[opt] = (filename, val)
+
+ # Make the ConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ if 'global' not in self.command_options:
+ return
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+
+ for (opt, (src, val)) in self.command_options['global'].items():
+ alias = self.negative_opt.get(opt)
+ if alias:
+ val = not strtobool(val)
+ elif opt in ('verbose', 'dry_run'): # ugh!
+ val = strtobool(val)
+
+ try:
+ setattr(self, alias or opt, val)
+ except ValueError as e:
+ raise DistutilsOptionError(e) from e
+
+ def warn_dash_deprecation(self, opt, section):
+ if section in (
+ 'options.extras_require',
+ 'options.data_files',
+ ):
+ return opt
+
+ underscore_opt = opt.replace('-', '_')
+ commands = list(itertools.chain(
+ distutils.command.__all__,
+ self._setuptools_commands(),
+ ))
+ if (
+ not section.startswith('options')
+ and section != 'metadata'
+ and section not in commands
+ ):
+ return underscore_opt
+
+ if '-' in opt:
+ warnings.warn(
+ "Usage of dash-separated '%s' will not be supported in future "
+ "versions. Please use the underscore name '%s' instead"
+ % (opt, underscore_opt)
+ )
+ return underscore_opt
+
+ def _setuptools_commands(self):
+ try:
+ return metadata.distribution('setuptools').entry_points.names
+ except metadata.PackageNotFoundError:
+ # during bootstrapping, distribution doesn't exist
+ return []
+
+ def make_option_lowercase(self, opt, section):
+ if section != 'metadata' or opt.islower():
+ return opt
+
+ lowercase_opt = opt.lower()
+ warnings.warn(
+ "Usage of uppercase key '%s' in '%s' will be deprecated in future "
+ "versions. Please use lowercase '%s' instead"
+ % (opt, section, lowercase_opt)
+ )
+ return lowercase_opt
+
+ # FIXME: 'Distribution._set_command_options' is too complex (14)
+ def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
+ """
+ Set the options for 'command_obj' from 'option_dict'. Basically
+ this means copying elements of a dictionary ('option_dict') to
+ attributes of an instance ('command').
+
+ 'command_obj' must be a Command instance. If 'option_dict' is not
+ supplied, uses the standard option dictionary for this command
+ (from 'self.command_options').
+
+ (Adopted from distutils.dist.Distribution._set_command_options)
+ """
+ command_name = command_obj.get_command_name()
+ if option_dict is None:
+ option_dict = self.get_option_dict(command_name)
+
+ if DEBUG:
+ self.announce(" setting options for '%s' command:" % command_name)
+ for (option, (source, value)) in option_dict.items():
+ if DEBUG:
+ self.announce(" %s = %s (from %s)" % (option, value, source))
+ try:
+ bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
+ except AttributeError:
+ bool_opts = []
+ try:
+ neg_opt = command_obj.negative_opt
+ except AttributeError:
+ neg_opt = {}
+
+ try:
+ is_string = isinstance(value, str)
+ if option in neg_opt and is_string:
+ setattr(command_obj, neg_opt[option], not strtobool(value))
+ elif option in bool_opts and is_string:
+ setattr(command_obj, option, strtobool(value))
+ elif hasattr(command_obj, option):
+ setattr(command_obj, option, value)
+ else:
+ raise DistutilsOptionError(
+ "error in %s: command '%s' has no such option '%s'"
+ % (source, command_name, option)
+ )
+ except ValueError as e:
+ raise DistutilsOptionError(e) from e
+
def parse_config_files(self, filenames=None, ignore_option_errors=False):
"""Parses configuration files from various levels
and loads configuration.
-
"""
- _Distribution.parse_config_files(self, filenames=filenames)
+ tomlfiles = []
+ standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml")
+ if filenames is not None:
+ parts = partition(lambda f: Path(f).suffix == ".toml", filenames)
+ filenames = list(parts[0]) # 1st element => predicate is False
+ tomlfiles = list(parts[1]) # 2nd element => predicate is True
+ elif standard_project_metadata.exists():
+ tomlfiles = [standard_project_metadata]
+
+ self._parse_config_files(filenames=filenames)
+
+ setupcfg.parse_configuration(
+ self, self.command_options, ignore_option_errors=ignore_option_errors
+ )
+ for filename in tomlfiles:
+ pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
- parse_configuration(self, self.command_options,
- ignore_option_errors=ignore_option_errors)
self._finalize_requires()
-
- def parse_command_line(self):
- """Process features after parsing command line options"""
- result = _Distribution.parse_command_line(self)
- if self.features:
- self._finalize_features()
- return result
-
- def _feature_attrname(self, name):
- """Convert feature name to corresponding option attribute name"""
- return 'with_' + name.replace('-', '_')
+ self._finalize_license_files()
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
resolved_dists = pkg_resources.working_set.resolve(
- pkg_resources.parse_requirements(requires),
+ _reqs.parse(requires),
installer=self.fetch_build_egg,
replace_conflicting=True,
)
@@ -508,23 +850,52 @@ class Distribution(Distribution_parse_config_files, _Distribution):
return resolved_dists
def finalize_options(self):
- _Distribution.finalize_options(self)
- if self.features:
- self._set_global_opts_from_features()
+ """
+ Allow plugins to apply arbitrary operations to the
+ distribution. Each hook may optionally define a 'order'
+ to influence the order of execution. Smaller numbers
+ go first and the default is 0.
+ """
+ group = 'setuptools.finalize_distribution_options'
+
+ def by_order(hook):
+ return getattr(hook, 'order', 0)
- for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ defined = metadata.entry_points(group=group)
+ filtered = itertools.filterfalse(self._removed, defined)
+ loaded = map(lambda e: e.load(), filtered)
+ for ep in sorted(loaded, key=by_order):
+ ep(self)
+
+ @staticmethod
+ def _removed(ep):
+ """
+ When removing an entry point, if metadata is loaded
+ from an older version of Setuptools, that removed
+ entry point will attempt to be loaded and will fail.
+ See #2765 for more details.
+ """
+ removed = {
+ # removed 2021-09-05
+ '2to3_doctests',
+ }
+ return ep.name in removed
+
+ def _finalize_setup_keywords(self):
+ for ep in metadata.entry_points(group='distutils.setup_keywords'):
value = getattr(self, ep.name, None)
if value is not None:
- ep.require(installer=self.fetch_build_egg)
+ self._install_dependencies(ep)
ep.load()(self, ep.name, value)
- if getattr(self, 'convert_2to3_doctests', None):
- # XXX may convert to set here when we can rely on set being builtin
- self.convert_2to3_doctests = [
- os.path.abspath(p)
- for p in self.convert_2to3_doctests
- ]
- else:
- self.convert_2to3_doctests = []
+
+ def _install_dependencies(self, ep):
+ """
+ Given an entry point, ensure that any declared extras for
+ its distribution are installed.
+ """
+ for req in nspektr.missing(ep):
+ # fetch_build_egg expects pkg_resources.Requirement
+ self.fetch_build_egg(pkg_resources.Requirement(str(req)))
def get_egg_cache_dir(self):
egg_cache_dir = os.path.join(os.curdir, '.eggs')
@@ -533,139 +904,55 @@ class Distribution(Distribution_parse_config_files, _Distribution):
windows_support.hide_file(egg_cache_dir)
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
with open(readme_txt_filename, 'w') as f:
- f.write('This directory contains eggs that were downloaded '
- 'by setuptools to build, test, and run plug-ins.\n\n')
- f.write('This directory caches those eggs to prevent '
- 'repeated downloads.\n\n')
+ f.write(
+ 'This directory contains eggs that were downloaded '
+ 'by setuptools to build, test, and run plug-ins.\n\n'
+ )
+ f.write(
+ 'This directory caches those eggs to prevent '
+ 'repeated downloads.\n\n'
+ )
f.write('However, it is safe to delete this directory.\n\n')
return egg_cache_dir
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
- from setuptools.command.easy_install import easy_install
- dist = self.__class__({'script_args': ['easy_install']})
- opts = dist.get_option_dict('easy_install')
- opts.clear()
- opts.update(
- (k, v)
- for k, v in self.get_option_dict('easy_install').items()
- if k in (
- # don't use any other settings
- 'find_links', 'site_dirs', 'index_url',
- 'optimize', 'site_dirs', 'allow_hosts',
- ))
- if self.dependency_links:
- links = self.dependency_links[:]
- if 'find_links' in opts:
- links = opts['find_links'][1] + links
- opts['find_links'] = ('setup', links)
- install_dir = self.get_egg_cache_dir()
- cmd = easy_install(
- dist, args=["x"], install_dir=install_dir,
- exclude_scripts=True,
- always_copy=False, build_directory=None, editable=False,
- upgrade=False, multi_version=True, no_report=True, user=False
- )
- cmd.ensure_finalized()
- return cmd.easy_install(req)
-
- def _set_global_opts_from_features(self):
- """Add --with-X/--without-X options based on optional features"""
-
- go = []
- no = self.negative_opt.copy()
-
- for name, feature in self.features.items():
- self._set_feature(name, None)
- feature.validate(self)
-
- if feature.optional:
- descr = feature.description
- incdef = ' (default)'
- excdef = ''
- if not feature.include_by_default():
- excdef, incdef = incdef, excdef
-
- new = (
- ('with-' + name, None, 'include ' + descr + incdef),
- ('without-' + name, None, 'exclude ' + descr + excdef),
- )
- go.extend(new)
- no['without-' + name] = 'with-' + name
-
- self.global_options = self.feature_options = go + self.global_options
- self.negative_opt = self.feature_negopt = no
+ from setuptools.installer import fetch_build_egg
- def _finalize_features(self):
- """Add/remove features and resolve dependencies between them"""
-
- # First, flag all the enabled items (and thus their dependencies)
- for name, feature in self.features.items():
- enabled = self.feature_is_included(name)
- if enabled or (enabled is None and feature.include_by_default()):
- feature.include_in(self)
- self._set_feature(name, 1)
-
- # Then disable the rest, so that off-by-default features don't
- # get flagged as errors when they're required by an enabled feature
- for name, feature in self.features.items():
- if not self.feature_is_included(name):
- feature.exclude_from(self)
- self._set_feature(name, 0)
+ return fetch_build_egg(self, req)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
- eps = pkg_resources.iter_entry_points('distutils.commands', command)
+ eps = metadata.entry_points(group='distutils.commands', name=command)
for ep in eps:
- ep.require(installer=self.fetch_build_egg)
+ self._install_dependencies(ep)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ for ep in metadata.entry_points(group='distutils.commands'):
if ep.name not in self.cmdclass:
- # don't require extras as the commands won't be invoked
- cmdclass = ep.resolve()
+ cmdclass = ep.load()
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def get_command_list(self):
- for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ for ep in metadata.entry_points(group='distutils.commands'):
if ep.name not in self.cmdclass:
- # don't require extras as the commands won't be invoked
- cmdclass = ep.resolve()
+ cmdclass = ep.load()
self.cmdclass[ep.name] = cmdclass
return _Distribution.get_command_list(self)
- def _set_feature(self, name, status):
- """Set feature's inclusion status"""
- setattr(self, self._feature_attrname(name), status)
-
- def feature_is_included(self, name):
- """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
- return getattr(self, self._feature_attrname(name))
-
- def include_feature(self, name):
- """Request inclusion of feature named 'name'"""
-
- if self.feature_is_included(name) == 0:
- descr = self.features[name].description
- raise DistutilsOptionError(
- descr + " is required, but was excluded or is not available"
- )
- self.features[name].include_in(self)
- self._set_feature(name, 1)
-
def include(self, **attrs):
"""Add items to distribution that are named in keyword arguments
- For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
+ For example, 'dist.include(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
@@ -690,19 +977,18 @@ class Distribution(Distribution_parse_config_files, _Distribution):
pfx = package + '.'
if self.packages:
self.packages = [
- p for p in self.packages
- if p != package and not p.startswith(pfx)
+ p for p in self.packages if p != package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
- p for p in self.py_modules
- if p != package and not p.startswith(pfx)
+ p for p in self.py_modules if p != package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
- p for p in self.ext_modules
+ p
+ for p in self.ext_modules
if p.name != package and not p.name.startswith(pfx)
]
@@ -723,10 +1009,8 @@ class Distribution(Distribution_parse_config_files, _Distribution):
)
try:
old = getattr(self, name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
+ except AttributeError as e:
+ raise DistutilsSetupError("%s: No such distribution setting" % name) from e
if old is not None and not isinstance(old, sequence):
raise DistutilsSetupError(
name + ": this setting cannot be changed via include/exclude"
@@ -738,15 +1022,11 @@ class Distribution(Distribution_parse_config_files, _Distribution):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value, sequence):
- raise DistutilsSetupError(
- "%s: setting must be a list (%r)" % (name, value)
- )
+ raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value))
try:
old = getattr(self, name)
- except AttributeError:
- raise DistutilsSetupError(
- "%s: No such distribution setting" % name
- )
+ except AttributeError as e:
+ raise DistutilsSetupError("%s: No such distribution setting" % name) from e
if old is None:
setattr(self, name, value)
elif not isinstance(old, sequence):
@@ -799,6 +1079,7 @@ class Distribution(Distribution_parse_config_files, _Distribution):
src, alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
+
args[:1] = shlex.split(alias, True)
command = args[0]
@@ -879,11 +1160,10 @@ class Distribution(Distribution_parse_config_files, _Distribution):
"""
import sys
- if six.PY2 or self.help_commands:
+ if self.help_commands:
return _Distribution.handle_display_options(self, option_order)
# Stdout may be StringIO (e.g. in tests)
- import io
if not isinstance(sys.stdout, io.TextIOWrapper):
return _Distribution.handle_display_options(self, option_order)
@@ -899,163 +1179,23 @@ class Distribution(Distribution_parse_config_files, _Distribution):
line_buffering = sys.stdout.line_buffering
sys.stdout = io.TextIOWrapper(
- sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
+ sys.stdout.detach(), 'utf-8', errors, newline, line_buffering
+ )
try:
return _Distribution.handle_display_options(self, option_order)
finally:
sys.stdout = io.TextIOWrapper(
- sys.stdout.detach(), encoding, errors, newline, line_buffering)
-
-
-class Feature:
- """
- **deprecated** -- The `Feature` facility was never completely implemented
- or supported, `has reported issues
- <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
- a future version.
-
- A subset of the distribution that can be excluded if unneeded/wanted
-
- Features are created using these keyword arguments:
-
- 'description' -- a short, human readable description of the feature, to
- be used in error messages, and option help messages.
-
- 'standard' -- if true, the feature is included by default if it is
- available on the current system. Otherwise, the feature is only
- included if requested via a command line '--with-X' option, or if
- another included feature requires it. The default setting is 'False'.
-
- 'available' -- if true, the feature is available for installation on the
- current system. The default setting is 'True'.
-
- 'optional' -- if true, the feature's inclusion can be controlled from the
- command line, using the '--with-X' or '--without-X' options. If
- false, the feature's inclusion status is determined automatically,
- based on 'availabile', 'standard', and whether any other feature
- requires it. The default setting is 'True'.
-
- 'require_features' -- a string or sequence of strings naming features
- that should also be included if this feature is included. Defaults to
- empty list. May also contain 'Require' objects that should be
- added/removed from the distribution.
-
- 'remove' -- a string or list of strings naming packages to be removed
- from the distribution if this feature is *not* included. If the
- feature *is* included, this argument is ignored. This argument exists
- to support removing features that "crosscut" a distribution, such as
- defining a 'tests' feature that removes all the 'tests' subpackages
- provided by other features. The default for this argument is an empty
- list. (Note: the named package(s) or modules must exist in the base
- distribution when the 'setup()' function is initially called.)
-
- other keywords -- any other keyword arguments are saved, and passed to
- the distribution's 'include()' and 'exclude()' methods when the
- feature is included or excluded, respectively. So, for example, you
- could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
- added or removed from the distribution as appropriate.
-
- A feature must include at least one 'requires', 'remove', or other
- keyword argument. Otherwise, it can't affect the distribution in any way.
- Note also that you can subclass 'Feature' to create your own specialized
- feature types that modify the distribution in other ways when included or
- excluded. See the docstrings for the various methods here for more detail.
- Aside from the methods, the only feature attributes that distributions look
- at are 'description' and 'optional'.
- """
-
- @staticmethod
- def warn_deprecated():
- msg = (
- "Features are deprecated and will be removed in a future "
- "version. See https://github.com/pypa/setuptools/issues/65."
- )
- warnings.warn(msg, DeprecationWarning, stacklevel=3)
-
- def __init__(
- self, description, standard=False, available=True,
- optional=True, require_features=(), remove=(), **extras):
- self.warn_deprecated()
-
- self.description = description
- self.standard = standard
- self.available = available
- self.optional = optional
- if isinstance(require_features, (str, Require)):
- require_features = require_features,
-
- self.require_features = [
- r for r in require_features if isinstance(r, str)
- ]
- er = [r for r in require_features if not isinstance(r, str)]
- if er:
- extras['require_features'] = er
-
- if isinstance(remove, str):
- remove = remove,
- self.remove = remove
- self.extras = extras
-
- if not remove and not require_features and not extras:
- raise DistutilsSetupError(
- "Feature %s: must define 'require_features', 'remove', or "
- "at least one of 'packages', 'py_modules', etc."
+ sys.stdout.detach(), encoding, errors, newline, line_buffering
)
- def include_by_default(self):
- """Should this feature be included by default?"""
- return self.available and self.standard
-
- def include_in(self, dist):
- """Ensure feature and its requirements are included in distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. Note that this method may be called more than once
- per feature, and so should be idempotent.
-
- """
-
- if not self.available:
- raise DistutilsPlatformError(
- self.description + " is required, "
- "but is not available on this platform"
- )
+ def run_command(self, command):
+ self.set_defaults()
+ # Postpone defaults until all explicit configuration is considered
+ # (setup() args, config files, command line and plugins)
- dist.include(**self.extras)
+ super().run_command(command)
- for f in self.require_features:
- dist.include_feature(f)
- def exclude_from(self, dist):
- """Ensure feature is excluded from distribution
-
- You may override this in a subclass to perform additional operations on
- the distribution. This method will be called at most once per
- feature, and only after all included features have been asked to
- include themselves.
- """
-
- dist.exclude(**self.extras)
-
- if self.remove:
- for item in self.remove:
- dist.exclude_package(item)
-
- def validate(self, dist):
- """Verify that feature makes sense in context of distribution
-
- This method is called by the distribution just before it parses its
- command line. It checks to ensure that the 'remove' attribute, if any,
- contains only valid package/module names that are present in the base
- distribution when 'setup()' is called. You may override it in a
- subclass to perform any other required validation of the feature
- against a target distribution.
- """
-
- for item in self.remove:
- if not dist.has_contents_for(item):
- raise DistutilsSetupError(
- "%s wants to be able to remove %s, but the distribution"
- " doesn't contain any packages or modules under %s"
- % (self.description, item, item)
- )
+class DistDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Class for warning about deprecations in dist in
+ setuptools. Not ignored by default, unlike DeprecationWarning."""
diff --git a/setuptools/errors.py b/setuptools/errors.py
new file mode 100644
index 0000000..ec7fb3b
--- /dev/null
+++ b/setuptools/errors.py
@@ -0,0 +1,58 @@
+"""setuptools.errors
+
+Provides exceptions used by setuptools modules.
+"""
+
+from distutils import errors as _distutils_errors
+
+
+# Re-export errors from distutils to facilitate the migration to PEP632
+
+ByteCompileError = _distutils_errors.DistutilsByteCompileError
+CCompilerError = _distutils_errors.CCompilerError
+ClassError = _distutils_errors.DistutilsClassError
+CompileError = _distutils_errors.CompileError
+ExecError = _distutils_errors.DistutilsExecError
+FileError = _distutils_errors.DistutilsFileError
+InternalError = _distutils_errors.DistutilsInternalError
+LibError = _distutils_errors.LibError
+LinkError = _distutils_errors.LinkError
+ModuleError = _distutils_errors.DistutilsModuleError
+OptionError = _distutils_errors.DistutilsOptionError
+PlatformError = _distutils_errors.DistutilsPlatformError
+PreprocessError = _distutils_errors.PreprocessError
+SetupError = _distutils_errors.DistutilsSetupError
+TemplateError = _distutils_errors.DistutilsTemplateError
+UnknownFileError = _distutils_errors.UnknownFileError
+
+# The root error class in the hierarchy
+BaseError = _distutils_errors.DistutilsError
+
+
+class RemovedCommandError(BaseError, RuntimeError):
+ """Error used for commands that have been removed in setuptools.
+
+ Since ``setuptools`` is built on ``distutils``, simply removing a command
+ from ``setuptools`` will make the behavior fall back to ``distutils``; this
+ error is raised if a command exists in ``distutils`` but has been actively
+ removed in ``setuptools``.
+ """
+
+
+class PackageDiscoveryError(BaseError, RuntimeError):
+ """Impossible to perform automatic discovery of packages and/or modules.
+
+ The current project layout or given discovery options can lead to problems when
+ scanning the project directory.
+
+ Setuptools might also refuse to complete auto-discovery if an error prone condition
+ is detected (e.g. when a project is organised as a flat-layout but contains
+ multiple directories that can be taken as top-level packages inside a single
+ distribution [*]_). In these situations the users are encouraged to be explicit
+ about which packages to include or to make the discovery parameters more specific.
+
+ .. [*] Since multi-package distributions are uncommon it is very likely that the
+ developers did not intend for all the directories to be packaged, and are just
+ leaving auxiliary code in the repository top-level, such as maintenance-related
+ scripts.
+ """
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 2946889..f696c9c 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -4,8 +4,6 @@ import distutils.core
import distutils.errors
import distutils.extension
-from setuptools.extern.six.moves import map
-
from .monkey import get_unpatched
@@ -36,7 +34,7 @@ class Extension(_Extension):
# The *args is needed for compatibility as calls may use positional
# arguments. py_limited_api may be set only via keyword.
self.py_limited_api = kw.pop("py_limited_api", False)
- _Extension.__init__(self, name, sources, *args, **kw)
+ super().__init__(name, sources, *args, **kw)
def _convert_pyx_sources_to_lang(self):
"""
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index da3d668..f09b7fa 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -1,3 +1,4 @@
+import importlib.util
import sys
@@ -20,17 +21,10 @@ class VendorImporter:
yield self.vendor_pkg + '.'
yield ''
- def find_module(self, fullname, path=None):
- """
- Return self when fullname starts with root_name and the
- target module is one vendored through this importer.
- """
+ def _module_matches_namespace(self, fullname):
+ """Figure out if the target module is vendored."""
root, base, target = fullname.partition(self.root_name + '.')
- if root:
- return
- if not any(map(target.startswith, self.vendored_names)):
- return
- return self
+ return not root and any(map(target.startswith, self.vendored_names))
def load_module(self, fullname):
"""
@@ -43,13 +37,6 @@ class VendorImporter:
__import__(extant)
mod = sys.modules[extant]
sys.modules[fullname] = mod
- # mysterious hack:
- # Remove the reference to the extant package/module
- # on later Python versions to cause relative imports
- # in the vendor package to resolve the same modules
- # as those going through this importer.
- if sys.version_info > (3, 3):
- del sys.modules[extant]
return mod
except ImportError:
pass
@@ -61,6 +48,19 @@ class VendorImporter:
"distribution.".format(**locals())
)
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
+ def find_spec(self, fullname, path=None, target=None):
+ """Return a module spec for vendored names."""
+ return (
+ importlib.util.spec_from_loader(fullname, self)
+ if self._module_matches_namespace(fullname) else None
+ )
+
def install(self):
"""
Install this importer into sys.meta_path if not already present.
@@ -69,5 +69,9 @@ class VendorImporter:
sys.meta_path.append(self)
-names = 'six', 'packaging', 'pyparsing',
+names = (
+ 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'importlib_metadata',
+ 'zipp', 'importlib_resources', 'jaraco', 'typing_extensions', 'nspektr',
+ 'tomli', '_validate_pyproject',
+)
VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/setuptools/glibc.py b/setuptools/glibc.py
deleted file mode 100644
index a134591..0000000
--- a/setuptools/glibc.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# This file originally from pip:
-# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.py
-from __future__ import absolute_import
-
-import ctypes
-import re
-import warnings
-
-
-def glibc_version_string():
- "Returns glibc version string, or None if not using glibc."
-
- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
- # manpage says, "If filename is NULL, then the returned handle is for the
- # main program". This way we can let the linker do the work to figure out
- # which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
- try:
- gnu_get_libc_version = process_namespace.gnu_get_libc_version
- except AttributeError:
- # Symbol doesn't exist -> therefore, we are not linked to
- # glibc.
- return None
-
- # Call gnu_get_libc_version, which returns a string like "2.5"
- gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
- # py2 / py3 compatibility:
- if not isinstance(version_str, str):
- version_str = version_str.decode("ascii")
-
- return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
- # Parse string and check against requested version.
- #
- # We use a regexp instead of str.split because we want to discard any
- # random junk that might come after the minor version -- this might happen
- # in patched/forked versions of glibc (e.g. Linaro's version of glibc
- # uses version strings like "2.20-2014.11"). See gh-3588.
- m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
- if not m:
- warnings.warn("Expected glibc version with 2 components major.minor,"
- " got: %s" % version_str, RuntimeWarning)
- return False
- return (int(m.group("major")) == required_major and
- int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
- version_str = glibc_version_string()
- if version_str is None:
- return False
- return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.7')
-# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.9')
-#
-# But the truth is:
-#
-# ~$ ldd --version
-# ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
- """Try to determine the glibc version
-
- Returns a tuple of strings (lib, version) which default to empty strings
- in case the lookup fails.
- """
- glibc_version = glibc_version_string()
- if glibc_version is None:
- return ("", "")
- else:
- return ("glibc", glibc_version)
diff --git a/setuptools/glob.py b/setuptools/glob.py
index 6c781de..87062b8 100644
--- a/setuptools/glob.py
+++ b/setuptools/glob.py
@@ -3,14 +3,12 @@ Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
Changes include:
* `yield from` and PEP3102 `*` removed.
- * `bytes` changed to `six.binary_type`.
* Hidden files are not ignored.
"""
import os
import re
import fnmatch
-from setuptools.extern.six import binary_type
__all__ = ["glob", "iglob", "escape"]
@@ -49,6 +47,8 @@ def iglob(pathname, recursive=False):
def _iglob(pathname, recursive):
dirname, basename = os.path.split(pathname)
+ glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1
+
if not has_magic(pathname):
if basename:
if os.path.lexists(pathname):
@@ -58,13 +58,9 @@ def _iglob(pathname, recursive):
if os.path.isdir(dirname):
yield pathname
return
+
if not dirname:
- if recursive and _isrecursive(basename):
- for x in glob2(dirname, basename):
- yield x
- else:
- for x in glob1(dirname, basename):
- yield x
+ yield from glob_in_dir(dirname, basename)
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
@@ -73,12 +69,7 @@ def _iglob(pathname, recursive):
dirs = _iglob(dirname, recursive)
else:
dirs = [dirname]
- if has_magic(basename):
- if recursive and _isrecursive(basename):
- glob_in_dir = glob2
- else:
- glob_in_dir = glob1
- else:
+ if not has_magic(basename):
glob_in_dir = glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
@@ -92,7 +83,7 @@ def _iglob(pathname, recursive):
def glob1(dirname, pattern):
if not dirname:
- if isinstance(pattern, binary_type):
+ if isinstance(pattern, bytes):
dirname = os.curdir.encode('ASCII')
else:
dirname = os.curdir
@@ -129,8 +120,8 @@ def glob2(dirname, pattern):
# Recursively yields relative pathnames inside a literal directory.
def _rlistdir(dirname):
if not dirname:
- if isinstance(dirname, binary_type):
- dirname = binary_type(os.curdir, 'ASCII')
+ if isinstance(dirname, bytes):
+ dirname = os.curdir.encode('ASCII')
else:
dirname = os.curdir
try:
@@ -149,7 +140,7 @@ magic_check_bytes = re.compile(b'([*?[])')
def has_magic(s):
- if isinstance(s, binary_type):
+ if isinstance(s, bytes):
match = magic_check_bytes.search(s)
else:
match = magic_check.search(s)
@@ -157,7 +148,7 @@ def has_magic(s):
def _isrecursive(pattern):
- if isinstance(pattern, binary_type):
+ if isinstance(pattern, bytes):
return pattern == b'**'
else:
return pattern == '**'
@@ -169,7 +160,7 @@ def escape(pathname):
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped.
drive, pathname = os.path.splitdrive(pathname)
- if isinstance(pathname, binary_type):
+ if isinstance(pathname, bytes):
pathname = magic_check_bytes.sub(br'[\1]', pathname)
else:
pathname = magic_check.sub(r'[\1]', pathname)
diff --git a/setuptools/gui-arm64.exe b/setuptools/gui-arm64.exe
new file mode 100644
index 0000000..5730f11
--- /dev/null
+++ b/setuptools/gui-arm64.exe
Binary files differ
diff --git a/setuptools/installer.py b/setuptools/installer.py
new file mode 100644
index 0000000..b7096df
--- /dev/null
+++ b/setuptools/installer.py
@@ -0,0 +1,104 @@
+import glob
+import os
+import subprocess
+import sys
+import tempfile
+import warnings
+from distutils import log
+from distutils.errors import DistutilsError
+
+import pkg_resources
+from setuptools.wheel import Wheel
+from ._deprecation_warning import SetuptoolsDeprecationWarning
+
+
+def _fixup_find_links(find_links):
+ """Ensure find-links option end-up being a list of strings."""
+ if isinstance(find_links, str):
+ return find_links.split()
+ assert isinstance(find_links, (tuple, list))
+ return find_links
+
+
+def fetch_build_egg(dist, req): # noqa: C901 # is too complex (16) # FIXME
+ """Fetch an egg needed for building.
+
+ Use pip/wheel to fetch/build a wheel."""
+ warnings.warn(
+ "setuptools.installer is deprecated. Requirements should "
+ "be satisfied by a PEP 517 installer.",
+ SetuptoolsDeprecationWarning,
+ )
+ # Warn if wheel is not available
+ try:
+ pkg_resources.get_distribution('wheel')
+ except pkg_resources.DistributionNotFound:
+ dist.announce('WARNING: The wheel package is not available.', log.WARN)
+ # Ignore environment markers; if supplied, it is required.
+ req = strip_marker(req)
+ # Take easy_install options into account, but do not override relevant
+ # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
+ # take precedence.
+ opts = dist.get_option_dict('easy_install')
+ if 'allow_hosts' in opts:
+ raise DistutilsError('the `allow-hosts` option is not supported '
+ 'when using pip to install requirements.')
+ quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ
+ if 'PIP_INDEX_URL' in os.environ:
+ index_url = None
+ elif 'index_url' in opts:
+ index_url = opts['index_url'][1]
+ else:
+ index_url = None
+ find_links = (
+ _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts
+ else []
+ )
+ if dist.dependency_links:
+ find_links.extend(dist.dependency_links)
+ eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
+ environment = pkg_resources.Environment()
+ for egg_dist in pkg_resources.find_distributions(eggs_dir):
+ if egg_dist in req and environment.can_add(egg_dist):
+ return egg_dist
+ with tempfile.TemporaryDirectory() as tmpdir:
+ cmd = [
+ sys.executable, '-m', 'pip',
+ '--disable-pip-version-check',
+ 'wheel', '--no-deps',
+ '-w', tmpdir,
+ ]
+ if quiet:
+ cmd.append('--quiet')
+ if index_url is not None:
+ cmd.extend(('--index-url', index_url))
+ for link in find_links or []:
+ cmd.extend(('--find-links', link))
+ # If requirement is a PEP 508 direct URL, directly pass
+ # the URL to pip, as `req @ url` does not work on the
+ # command line.
+ cmd.append(req.url or str(req))
+ try:
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as e:
+ raise DistutilsError(str(e)) from e
+ wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
+ dist_location = os.path.join(eggs_dir, wheel.egg_name())
+ wheel.install_as_egg(dist_location)
+ dist_metadata = pkg_resources.PathMetadata(
+ dist_location, os.path.join(dist_location, 'EGG-INFO'))
+ dist = pkg_resources.Distribution.from_filename(
+ dist_location, metadata=dist_metadata)
+ return dist
+
+
+def strip_marker(req):
+ """
+ Return a new requirement without the environment marker to avoid
+ calling pip with something like `babel; extra == "i18n"`, which
+ would always be ignored.
+ """
+ # create a copy to avoid mutating the input
+ req = pkg_resources.Requirement.parse(str(req))
+ req.marker = None
+ return req
diff --git a/setuptools/launch.py b/setuptools/launch.py
index 308283e..0208fdf 100644
--- a/setuptools/launch.py
+++ b/setuptools/launch.py
@@ -25,7 +25,8 @@ def run():
sys.argv[:] = sys.argv[1:]
open_ = getattr(tokenize, 'open', open)
- script = open_(script_name).read()
+ with open_(script_name) as fid:
+ script = fid.read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, script_name, 'exec')
exec(code, namespace)
diff --git a/setuptools/lib2to3_ex.py b/setuptools/lib2to3_ex.py
deleted file mode 100644
index 4b1a73f..0000000
--- a/setuptools/lib2to3_ex.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
-Customized Mixin2to3 support:
-
- - adds support for converting doctests
-
-
-This module raises an ImportError on Python 2.
-"""
-
-from distutils.util import Mixin2to3 as _Mixin2to3
-from distutils import log
-from lib2to3.refactor import RefactoringTool, get_fixers_from_package
-
-import setuptools
-
-
-class DistutilsRefactoringTool(RefactoringTool):
- def log_error(self, msg, *args, **kw):
- log.error(msg, *args)
-
- def log_message(self, msg, *args):
- log.info(msg, *args)
-
- def log_debug(self, msg, *args):
- log.debug(msg, *args)
-
-
-class Mixin2to3(_Mixin2to3):
- def run_2to3(self, files, doctests=False):
- # See of the distribution option has been set, otherwise check the
- # setuptools default.
- if self.distribution.use_2to3 is not True:
- return
- if not files:
- return
- log.info("Fixing " + " ".join(files))
- self.__build_fixer_names()
- self.__exclude_fixers()
- if doctests:
- if setuptools.run_2to3_on_doctests:
- r = DistutilsRefactoringTool(self.fixer_names)
- r.refactor(files, write=True, doctests_only=True)
- else:
- _Mixin2to3.run_2to3(self, files)
-
- def __build_fixer_names(self):
- if self.fixer_names:
- return
- self.fixer_names = []
- for p in setuptools.lib2to3_fixer_packages:
- self.fixer_names.extend(get_fixers_from_package(p))
- if self.distribution.use_2to3_fixers is not None:
- for p in self.distribution.use_2to3_fixers:
- self.fixer_names.extend(get_fixers_from_package(p))
-
- def __exclude_fixers(self):
- excluded_fixers = getattr(self, 'exclude_fixers', [])
- if self.distribution.use_2to3_exclude_fixers is not None:
- excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
- for fixer_name in excluded_fixers:
- if fixer_name in self.fixer_names:
- self.fixer_names.remove(fixer_name)
diff --git a/setuptools/logging.py b/setuptools/logging.py
new file mode 100644
index 0000000..15b5761
--- /dev/null
+++ b/setuptools/logging.py
@@ -0,0 +1,36 @@
+import sys
+import logging
+import distutils.log
+from . import monkey
+
+
+def _not_warning(record):
+ return record.levelno < logging.WARNING
+
+
+def configure():
+ """
+ Configure logging to emit warning and above to stderr
+ and everything else to stdout. This behavior is provided
+ for compatibilty with distutils.log but may change in
+ the future.
+ """
+ err_handler = logging.StreamHandler()
+ err_handler.setLevel(logging.WARNING)
+ out_handler = logging.StreamHandler(sys.stdout)
+ out_handler.addFilter(_not_warning)
+ handlers = err_handler, out_handler
+ logging.basicConfig(
+ format="{message}", style='{', handlers=handlers, level=logging.DEBUG)
+ monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
+
+ # For some reason `distutils.log` module is getting cached in `distutils.dist`
+ # and then loaded again when patched,
+ # implying: id(distutils.log) != id(distutils.dist.log).
+ # Make sure the same module object is used everywhere:
+ distutils.dist.log = distutils.log
+
+
+def set_threshold(level):
+ logging.root.setLevel(level*10)
+ return set_threshold.unpatched(level)
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index 08ed50d..fb36dc1 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -10,8 +10,6 @@ import functools
from importlib import import_module
import inspect
-from setuptools.extern import six
-
import setuptools
__all__ = []
@@ -37,7 +35,7 @@ def _get_mro(cls):
def get_unpatched(item):
lookup = (
- get_unpatched_class if isinstance(item, six.class_types) else
+ get_unpatched_class if isinstance(item, type) else
get_unpatched_function if isinstance(item, types.FunctionType) else
lambda item: None
)
@@ -75,8 +73,6 @@ def patch_all():
needs_warehouse = (
sys.version_info < (2, 7, 13)
or
- (3, 0) < sys.version_info < (3, 3, 7)
- or
(3, 4) < sys.version_info < (3, 4, 6)
or
(3, 5) < sys.version_info <= (3, 5, 3)
@@ -86,7 +82,7 @@ def patch_all():
warehouse = 'https://upload.pypi.org/legacy/'
distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse
- _patch_distribution_metadata_write_pkg_file()
+ _patch_distribution_metadata()
# Install Distribution throughout the distutils
for module in distutils.dist, distutils.core, distutils.cmd:
@@ -103,11 +99,11 @@ def patch_all():
patch_for_msvc_specialized_compiler()
-def _patch_distribution_metadata_write_pkg_file():
- """Patch write_pkg_file to also write Requires-Python/Requires-External"""
- distutils.dist.DistributionMetadata.write_pkg_file = (
- setuptools.dist.write_pkg_file
- )
+def _patch_distribution_metadata():
+ """Patch write_pkg_file and read_pkg_file for higher metadata standards"""
+ for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'):
+ new_val = getattr(setuptools.dist, attr)
+ setattr(distutils.dist.DistributionMetadata, attr, new_val)
def patch_func(replacement, target_mod, func_name):
@@ -140,7 +136,7 @@ def patch_for_msvc_specialized_compiler():
msvc = import_module('setuptools.msvc')
if platform.system() != 'Windows':
- # Compilers only availables on Microsoft Windows
+ # Compilers only available on Microsoft Windows
return
def patch_params(mod_name, func_name):
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 5e20b3f..281ea1c 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -11,31 +11,34 @@ Microsoft Visual C++ 9.0:
Microsoft Visual C++ 10.0:
Microsoft Windows SDK 7.1 (x86, x64, ia64)
-Microsoft Visual C++ 14.0:
+Microsoft Visual C++ 14.X:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
- Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
+ Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
+
+This may also support compilers shipped with compatible Visual Studio versions.
"""
-import os
+import json
+from io import open
+from os import listdir, pathsep
+from os.path import join, isfile, isdir, dirname
import sys
+import contextlib
import platform
import itertools
+import subprocess
import distutils.errors
from setuptools.extern.packaging.version import LegacyVersion
-
-from setuptools.extern.six.moves import filterfalse
+from setuptools.extern.more_itertools import unique_everseen
from .monkey import get_unpatched
if platform.system() == 'Windows':
- from setuptools.extern.six.moves import winreg
- safe_env = os.environ
+ import winreg
+ from os import environ
else:
- """
- Mock winreg and environ so the module can be imported
- on this platform.
- """
+ # Mock winreg and environ so the module can be imported on this platform.
class winreg:
HKEY_USERS = None
@@ -43,7 +46,7 @@ else:
HKEY_LOCAL_MACHINE = None
HKEY_CLASSES_ROOT = None
- safe_env = dict()
+ environ = dict()
_msvc9_suppress_errors = (
# msvc9compiler isn't available on some platforms
@@ -63,15 +66,13 @@ except _msvc9_suppress_errors:
def msvc9_find_vcvarsall(version):
"""
Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
- compiler build for Python (VCForPython). Fall back to original behavior
- when the standalone compiler is not available.
+ compiler build for Python
+ (VCForPython / Microsoft Visual C++ Compiler for Python 2.7).
- Redirect the path of "vcvarsall.bat".
+ Fall back to original behavior when the standalone compiler is not
+ available.
- Known supported compilers
- -------------------------
- Microsoft Visual C++ 9.0:
- Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
+ Redirect the path of "vcvarsall.bat".
Parameters
----------
@@ -80,24 +81,25 @@ def msvc9_find_vcvarsall(version):
Return
------
- vcvarsall.bat path: str
+ str
+ vcvarsall.bat path
"""
- VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
- key = VC_BASE % ('', version)
+ vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
+ key = vc_base % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
- key = VC_BASE % ('Wow6432Node\\', version)
+ key = vc_base % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
- vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat")
- if os.path.isfile(vcvarsall):
+ vcvarsall = join(productdir, "vcvarsall.bat")
+ if isfile(vcvarsall):
return vcvarsall
return get_unpatched(msvc9_find_vcvarsall)(version)
@@ -106,20 +108,10 @@ def msvc9_find_vcvarsall(version):
def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
"""
Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
- compilers.
+ Microsoft Visual C++ 9.0 and 10.0 compilers.
Set environment without use of "vcvarsall.bat".
- Known supported compilers
- -------------------------
- Microsoft Visual C++ 9.0:
- Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
- Microsoft Windows SDK 6.1 (x86, x64, ia64)
- Microsoft Windows SDK 7.0 (x86, x64, ia64)
-
- Microsoft Visual C++ 10.0:
- Microsoft Windows SDK 7.1 (x86, x64, ia64)
-
Parameters
----------
ver: float
@@ -129,9 +121,10 @@ def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
Return
------
- environment: dict
+ dict
+ environment
"""
- # Try to get environement from vcvarsall.bat (Classical way)
+ # Try to get environment from vcvarsall.bat (Classical way)
try:
orig = get_unpatched(msvc9_query_vcvarsall)
return orig(ver, arch, *args, **kwargs)
@@ -150,20 +143,163 @@ def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
raise
+def _msvc14_find_vc2015():
+ """Python 3.8 "distutils/_msvccompiler.py" backport"""
+ try:
+ key = winreg.OpenKey(
+ winreg.HKEY_LOCAL_MACHINE,
+ r"Software\Microsoft\VisualStudio\SxS\VC7",
+ 0,
+ winreg.KEY_READ | winreg.KEY_WOW64_32KEY
+ )
+ except OSError:
+ return None, None
+
+ best_version = 0
+ best_dir = None
+ with key:
+ for i in itertools.count():
+ try:
+ v, vc_dir, vt = winreg.EnumValue(key, i)
+ except OSError:
+ break
+ if v and vt == winreg.REG_SZ and isdir(vc_dir):
+ try:
+ version = int(float(v))
+ except (ValueError, TypeError):
+ continue
+ if version >= 14 and version > best_version:
+ best_version, best_dir = version, vc_dir
+ return best_version, best_dir
+
+
+def _msvc14_find_vc2017():
+ """Python 3.8 "distutils/_msvccompiler.py" backport
+
+ Returns "15, path" based on the result of invoking vswhere.exe
+ If no install is found, returns "None, None"
+
+ The version is returned to avoid unnecessarily changing the function
+ result. It may be ignored when the path is not None.
+
+ If vswhere.exe is not available, by definition, VS 2017 is not
+ installed.
+ """
+ root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
+ if not root:
+ return None, None
+
+ try:
+ path = subprocess.check_output([
+ join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
+ "-latest",
+ "-prerelease",
+ "-requiresAny",
+ "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
+ "-requires", "Microsoft.VisualStudio.Workload.WDExpress",
+ "-property", "installationPath",
+ "-products", "*",
+ ]).decode(encoding="mbcs", errors="strict").strip()
+ except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
+ return None, None
+
+ path = join(path, "VC", "Auxiliary", "Build")
+ if isdir(path):
+ return 15, path
+
+ return None, None
+
+
+PLAT_SPEC_TO_RUNTIME = {
+ 'x86': 'x86',
+ 'x86_amd64': 'x64',
+ 'x86_arm': 'arm',
+ 'x86_arm64': 'arm64'
+}
+
+
+def _msvc14_find_vcvarsall(plat_spec):
+ """Python 3.8 "distutils/_msvccompiler.py" backport"""
+ _, best_dir = _msvc14_find_vc2017()
+ vcruntime = None
+
+ if plat_spec in PLAT_SPEC_TO_RUNTIME:
+ vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
+ else:
+ vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
+
+ if best_dir:
+ vcredist = join(best_dir, "..", "..", "redist", "MSVC", "**",
+ vcruntime_plat, "Microsoft.VC14*.CRT",
+ "vcruntime140.dll")
+ try:
+ import glob
+ vcruntime = glob.glob(vcredist, recursive=True)[-1]
+ except (ImportError, OSError, LookupError):
+ vcruntime = None
+
+ if not best_dir:
+ best_version, best_dir = _msvc14_find_vc2015()
+ if best_version:
+ vcruntime = join(best_dir, 'redist', vcruntime_plat,
+ "Microsoft.VC140.CRT", "vcruntime140.dll")
+
+ if not best_dir:
+ return None, None
+
+ vcvarsall = join(best_dir, "vcvarsall.bat")
+ if not isfile(vcvarsall):
+ return None, None
+
+ if not vcruntime or not isfile(vcruntime):
+ vcruntime = None
+
+ return vcvarsall, vcruntime
+
+
+def _msvc14_get_vc_env(plat_spec):
+ """Python 3.8 "distutils/_msvccompiler.py" backport"""
+ if "DISTUTILS_USE_SDK" in environ:
+ return {
+ key.lower(): value
+ for key, value in environ.items()
+ }
+
+ vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
+ if not vcvarsall:
+ raise distutils.errors.DistutilsPlatformError(
+ "Unable to find vcvarsall.bat"
+ )
+
+ try:
+ out = subprocess.check_output(
+ 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
+ stderr=subprocess.STDOUT,
+ ).decode('utf-16le', errors='replace')
+ except subprocess.CalledProcessError as exc:
+ raise distutils.errors.DistutilsPlatformError(
+ "Error executing {}".format(exc.cmd)
+ ) from exc
+
+ env = {
+ key.lower(): value
+ for key, _, value in
+ (line.partition('=') for line in out.splitlines())
+ if key and value
+ }
+
+ if vcruntime:
+ env['py_vcruntime_redist'] = vcruntime
+ return env
+
+
def msvc14_get_vc_env(plat_spec):
"""
Patched "distutils._msvccompiler._get_vc_env" for support extra
- compilers.
+ Microsoft Visual C++ 14.X compilers.
Set environment without use of "vcvarsall.bat".
- Known supported compilers
- -------------------------
- Microsoft Visual C++ 14.0:
- Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
- Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
- Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
-
Parameters
----------
plat_spec: str
@@ -171,18 +307,13 @@ def msvc14_get_vc_env(plat_spec):
Return
------
- environment: dict
+ dict
+ environment
"""
- # Try to get environment from vcvarsall.bat (Classical way)
- try:
- return get_unpatched(msvc14_get_vc_env)(plat_spec)
- except distutils.errors.DistutilsPlatformError:
- # Pass error Vcvarsall.bat is missing
- pass
- # If error, try to set environment directly
+ # Always use backport from CPython 3.8
try:
- return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()
+ return _msvc14_get_vc_env(plat_spec)
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0)
raise
@@ -211,15 +342,15 @@ def _augment_exception(exc, version, arch=''):
if "vcvarsall" in message.lower() or "visual c" in message.lower():
# Special error message if MSVC++ not installed
- tmpl = 'Microsoft Visual C++ {version:0.1f} is required.'
+ tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.'
message = tmpl.format(**locals())
msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
if version == 9.0:
if arch.lower().find('ia64') > -1:
# For VC++ 9.0, if IA64 support is needed, redirect user
- # to Windows SDK 7.0
- message += ' Get it with "Microsoft Windows SDK 7.0": '
- message += msdownload % 3138
+ # to Windows SDK 7.0.
+ # Note: No download link available from Microsoft.
+ message += ' Get it with "Microsoft Windows SDK 7.0"'
else:
# For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
# This redirection link is maintained by Microsoft.
@@ -230,36 +361,60 @@ def _augment_exception(exc, version, arch=''):
message += ' Get it with "Microsoft Windows SDK 7.1": '
message += msdownload % 8279
elif version >= 14.0:
- # For VC++ 14.0 Redirect user to Visual C++ Build Tools
- message += (' Get it with "Microsoft Visual C++ Build Tools": '
- r'http://landinghub.visualstudio.com/'
- 'visual-cpp-build-tools')
+ # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
+ message += (' Get it with "Microsoft C++ Build Tools": '
+ r'https://visualstudio.microsoft.com'
+ r'/visual-cpp-build-tools/')
exc.args = (message, )
class PlatformInfo:
"""
- Current and Target Architectures informations.
+ Current and Target Architectures information.
Parameters
----------
arch: str
Target architecture.
"""
- current_cpu = safe_env.get('processor_architecture', '').lower()
+ current_cpu = environ.get('processor_architecture', '').lower()
def __init__(self, arch):
self.arch = arch.lower().replace('x64', 'amd64')
@property
def target_cpu(self):
+ """
+ Return Target CPU architecture.
+
+ Return
+ ------
+ str
+ Target CPU
+ """
return self.arch[self.arch.find('_') + 1:]
def target_is_x86(self):
+ """
+ Return True if target CPU is x86 32 bits..
+
+ Return
+ ------
+ bool
+ CPU is x86 32 bits
+ """
return self.target_cpu == 'x86'
def current_is_x86(self):
+ """
+ Return True if current CPU is x86 32 bits..
+
+ Return
+ ------
+ bool
+ CPU is x86 32 bits
+ """
return self.current_cpu == 'x86'
def current_dir(self, hidex86=False, x64=False):
@@ -275,8 +430,8 @@ class PlatformInfo:
Return
------
- subfolder: str
- '\target', or '' (see hidex86 parameter)
+ str
+ subfolder: '\target', or '' (see hidex86 parameter)
"""
return (
'' if (self.current_cpu == 'x86' and hidex86) else
@@ -297,8 +452,8 @@ class PlatformInfo:
Return
------
- subfolder: str
- '\current', or '' (see hidex86 parameter)
+ str
+ subfolder: '\current', or '' (see hidex86 parameter)
"""
return (
'' if (self.target_cpu == 'x86' and hidex86) else
@@ -313,13 +468,13 @@ class PlatformInfo:
Parameters
----------
forcex86: bool
- Use 'x86' as current architecture even if current acritecture is
+ Use 'x86' as current architecture even if current architecture is
not x86.
Return
------
- subfolder: str
- '' if target architecture is current architecture,
+ str
+ subfolder: '' if target architecture is current architecture,
'\current_target' if not.
"""
current = 'x86' if forcex86 else self.current_cpu
@@ -331,7 +486,7 @@ class PlatformInfo:
class RegistryInfo:
"""
- Microsoft Visual Studio related registry informations.
+ Microsoft Visual Studio related registry information.
Parameters
----------
@@ -350,6 +505,11 @@ class RegistryInfo:
def visualstudio(self):
"""
Microsoft Visual Studio root registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
return 'VisualStudio'
@@ -357,27 +517,47 @@ class RegistryInfo:
def sxs(self):
"""
Microsoft Visual Studio SxS registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
- return os.path.join(self.visualstudio, 'SxS')
+ return join(self.visualstudio, 'SxS')
@property
def vc(self):
"""
Microsoft Visual C++ VC7 registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
- return os.path.join(self.sxs, 'VC7')
+ return join(self.sxs, 'VC7')
@property
def vs(self):
"""
Microsoft Visual Studio VS7 registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
- return os.path.join(self.sxs, 'VS7')
+ return join(self.sxs, 'VS7')
@property
def vc_for_python(self):
"""
Microsoft Visual C++ for Python registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
return r'DevDiv\VCForPython'
@@ -385,6 +565,11 @@ class RegistryInfo:
def microsoft_sdk(self):
"""
Microsoft SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
return 'Microsoft SDKs'
@@ -392,20 +577,35 @@ class RegistryInfo:
def windows_sdk(self):
"""
Microsoft Windows/Platform SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
- return os.path.join(self.microsoft_sdk, 'Windows')
+ return join(self.microsoft_sdk, 'Windows')
@property
def netfx_sdk(self):
"""
Microsoft .NET Framework SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
- return os.path.join(self.microsoft_sdk, 'NETFXSDK')
+ return join(self.microsoft_sdk, 'NETFXSDK')
@property
def windows_kits_roots(self):
"""
Microsoft Windows Kits Roots registry key.
+
+ Return
+ ------
+ str
+ Registry key
"""
return r'Windows Kits\Installed Roots'
@@ -422,10 +622,11 @@ class RegistryInfo:
Return
------
- str: value
+ str
+ Registry key
"""
node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
- return os.path.join('Software', node64, 'Microsoft', key)
+ return join('Software', node64, 'Microsoft', key)
def lookup(self, key, name):
"""
@@ -440,18 +641,21 @@ class RegistryInfo:
Return
------
- str: value
+ str
+ value
"""
- KEY_READ = winreg.KEY_READ
+ key_read = winreg.KEY_READ
openkey = winreg.OpenKey
+ closekey = winreg.CloseKey
ms = self.microsoft
for hkey in self.HKEYS:
+ bkey = None
try:
- bkey = openkey(hkey, ms(key), 0, KEY_READ)
+ bkey = openkey(hkey, ms(key), 0, key_read)
except (OSError, IOError):
if not self.pi.current_is_x86():
try:
- bkey = openkey(hkey, ms(key, True), 0, KEY_READ)
+ bkey = openkey(hkey, ms(key, True), 0, key_read)
except (OSError, IOError):
continue
else:
@@ -460,11 +664,14 @@ class RegistryInfo:
return winreg.QueryValueEx(bkey, name)[0]
except (OSError, IOError):
pass
+ finally:
+ if bkey:
+ closekey(bkey)
class SystemInfo:
"""
- Microsoft Windows and Visual Studio related system inormations.
+ Microsoft Windows and Visual Studio related system information.
Parameters
----------
@@ -475,83 +682,160 @@ class SystemInfo:
"""
# Variables and properties in this class use originals CamelCase variables
- # names from Microsoft source files for more easy comparaison.
- WinDir = safe_env.get('WinDir', '')
- ProgramFiles = safe_env.get('ProgramFiles', '')
- ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles)
+ # names from Microsoft source files for more easy comparison.
+ WinDir = environ.get('WinDir', '')
+ ProgramFiles = environ.get('ProgramFiles', '')
+ ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles)
def __init__(self, registry_info, vc_ver=None):
self.ri = registry_info
self.pi = self.ri.pi
- self.vc_ver = vc_ver or self._find_latest_available_vc_ver()
- def _find_latest_available_vc_ver(self):
- try:
- return self.find_available_vc_vers()[-1]
- except IndexError:
- err = 'No Microsoft Visual C++ version found'
- raise distutils.errors.DistutilsPlatformError(err)
+ self.known_vs_paths = self.find_programdata_vs_vers()
+
+ # Except for VS15+, VC version is aligned with VS version
+ self.vs_ver = self.vc_ver = (
+ vc_ver or self._find_latest_available_vs_ver())
- def find_available_vc_vers(self):
+ def _find_latest_available_vs_ver(self):
"""
- Find all available Microsoft Visual C++ versions.
+ Find the latest VC version
+
+ Return
+ ------
+ float
+ version
+ """
+ reg_vc_vers = self.find_reg_vs_vers()
+
+ if not (reg_vc_vers or self.known_vs_paths):
+ raise distutils.errors.DistutilsPlatformError(
+ 'No Microsoft Visual C++ version found')
+
+ vc_vers = set(reg_vc_vers)
+ vc_vers.update(self.known_vs_paths)
+ return sorted(vc_vers)[-1]
+
+ def find_reg_vs_vers(self):
+ """
+ Find Microsoft Visual Studio versions available in registry.
+
+ Return
+ ------
+ list of float
+ Versions
"""
ms = self.ri.microsoft
vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
- vc_vers = []
- for hkey in self.ri.HKEYS:
- for key in vckeys:
- try:
- bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
- except (OSError, IOError):
- continue
+ vs_vers = []
+ for hkey, key in itertools.product(self.ri.HKEYS, vckeys):
+ try:
+ bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
+ except (OSError, IOError):
+ continue
+ with bkey:
subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values):
- try:
+ with contextlib.suppress(ValueError):
ver = float(winreg.EnumValue(bkey, i)[0])
- if ver not in vc_vers:
- vc_vers.append(ver)
- except ValueError:
- pass
+ if ver not in vs_vers:
+ vs_vers.append(ver)
for i in range(subkeys):
- try:
+ with contextlib.suppress(ValueError):
ver = float(winreg.EnumKey(bkey, i))
- if ver not in vc_vers:
- vc_vers.append(ver)
- except ValueError:
- pass
- return sorted(vc_vers)
+ if ver not in vs_vers:
+ vs_vers.append(ver)
+ return sorted(vs_vers)
+
+ def find_programdata_vs_vers(self):
+ r"""
+ Find Visual studio 2017+ versions from information in
+ "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances".
+
+ Return
+ ------
+ dict
+ float version as key, path as value.
+ """
+ vs_versions = {}
+ instances_dir = \
+ r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
+
+ try:
+ hashed_names = listdir(instances_dir)
+
+ except (OSError, IOError):
+ # Directory not exists with all Visual Studio versions
+ return vs_versions
+
+ for name in hashed_names:
+ try:
+ # Get VS installation path from "state.json" file
+ state_path = join(instances_dir, name, 'state.json')
+ with open(state_path, 'rt', encoding='utf-8') as state_file:
+ state = json.load(state_file)
+ vs_path = state['installationPath']
+
+ # Raises OSError if this VS installation does not contain VC
+ listdir(join(vs_path, r'VC\Tools\MSVC'))
+
+ # Store version and path
+ vs_versions[self._as_float_version(
+ state['installationVersion'])] = vs_path
+
+ except (OSError, IOError, KeyError):
+ # Skip if "state.json" file is missing or bad format
+ continue
+
+ return vs_versions
+
+ @staticmethod
+ def _as_float_version(version):
+ """
+ Return a string version as a simplified float version (major.minor)
+
+ Parameters
+ ----------
+ version: str
+ Version.
+
+ Return
+ ------
+ float
+ version
+ """
+ return float('.'.join(version.split('.')[:2]))
@property
def VSInstallDir(self):
"""
Microsoft Visual Studio directory.
+
+ Return
+ ------
+ str
+ path
"""
# Default path
- name = 'Microsoft Visual Studio %0.1f' % self.vc_ver
- default = os.path.join(self.ProgramFilesx86, name)
+ default = join(self.ProgramFilesx86,
+ 'Microsoft Visual Studio %0.1f' % self.vs_ver)
# Try to get path from registry, if fail use default path
- return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default
+ return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
@property
def VCInstallDir(self):
"""
Microsoft Visual C++ directory.
- """
- self.VSInstallDir
-
- guess_vc = self._guess_vc() or self._guess_vc_legacy()
- # Try to get "VC++ for Python" path from registry as default path
- reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
- python_vc = self.ri.lookup(reg_path, 'installdir')
- default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc
-
- # Try to get path from registry, if fail use default path
- path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc
+ Return
+ ------
+ str
+ path
+ """
+ path = self._guess_vc() or self._guess_vc_legacy()
- if not os.path.isdir(path):
+ if not isdir(path):
msg = 'Microsoft Visual C++ directory not found'
raise distutils.errors.DistutilsPlatformError(msg)
@@ -559,186 +843,256 @@ class SystemInfo:
def _guess_vc(self):
"""
- Locate Visual C for 2017
+ Locate Visual C++ for VS2017+.
+
+ Return
+ ------
+ str
+ path
"""
- if self.vc_ver <= 14.0:
- return
+ if self.vs_ver <= 14.0:
+ return ''
+
+ try:
+ # First search in known VS paths
+ vs_dir = self.known_vs_paths[self.vs_ver]
+ except KeyError:
+ # Else, search with path from registry
+ vs_dir = self.VSInstallDir
+
+ guess_vc = join(vs_dir, r'VC\Tools\MSVC')
- default = r'VC\Tools\MSVC'
- guess_vc = os.path.join(self.VSInstallDir, default)
# Subdir with VC exact version as name
try:
- vc_exact_ver = os.listdir(guess_vc)[-1]
- return os.path.join(guess_vc, vc_exact_ver)
+ # Update the VC version with real one instead of VS version
+ vc_ver = listdir(guess_vc)[-1]
+ self.vc_ver = self._as_float_version(vc_ver)
+ return join(guess_vc, vc_ver)
except (OSError, IOError, IndexError):
- pass
+ return ''
def _guess_vc_legacy(self):
"""
- Locate Visual C for versions prior to 2017
+ Locate Visual C++ for versions prior to 2017.
+
+ Return
+ ------
+ str
+ path
"""
- default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver
- return os.path.join(self.ProgramFilesx86, default)
+ default = join(self.ProgramFilesx86,
+ r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver)
+
+ # Try to get "VC++ for Python" path from registry as default path
+ reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+ python_vc = self.ri.lookup(reg_path, 'installdir')
+ default_vc = join(python_vc, 'VC') if python_vc else default
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
@property
def WindowsSdkVersion(self):
"""
Microsoft Windows SDK versions for specified MSVC++ version.
- """
- if self.vc_ver <= 9.0:
- return ('7.0', '6.1', '6.0a')
- elif self.vc_ver == 10.0:
- return ('7.1', '7.0a')
- elif self.vc_ver == 11.0:
- return ('8.0', '8.0a')
- elif self.vc_ver == 12.0:
- return ('8.1', '8.1a')
- elif self.vc_ver >= 14.0:
- return ('10.0', '8.1')
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ if self.vs_ver <= 9.0:
+ return '7.0', '6.1', '6.0a'
+ elif self.vs_ver == 10.0:
+ return '7.1', '7.0a'
+ elif self.vs_ver == 11.0:
+ return '8.0', '8.0a'
+ elif self.vs_ver == 12.0:
+ return '8.1', '8.1a'
+ elif self.vs_ver >= 14.0:
+ return '10.0', '8.1'
@property
def WindowsSdkLastVersion(self):
"""
- Microsoft Windows SDK last version
+ Microsoft Windows SDK last version.
+
+ Return
+ ------
+ str
+ version
"""
- return self._use_last_dir_name(os.path.join(
- self.WindowsSdkDir, 'lib'))
+ return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
- @property
- def WindowsSdkDir(self):
+ @property # noqa: C901
+ def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME
"""
Microsoft Windows SDK directory.
+
+ Return
+ ------
+ str
+ path
"""
sdkdir = ''
for ver in self.WindowsSdkVersion:
# Try to get it from registry
- loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
+ loc = join(self.ri.windows_sdk, 'v%s' % ver)
sdkdir = self.ri.lookup(loc, 'installationfolder')
if sdkdir:
break
- if not sdkdir or not os.path.isdir(sdkdir):
+ if not sdkdir or not isdir(sdkdir):
# Try to get "VC++ for Python" version from registry
- path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+ path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
install_base = self.ri.lookup(path, 'installdir')
if install_base:
- sdkdir = os.path.join(install_base, 'WinSDK')
- if not sdkdir or not os.path.isdir(sdkdir):
+ sdkdir = join(install_base, 'WinSDK')
+ if not sdkdir or not isdir(sdkdir):
# If fail, use default new path
for ver in self.WindowsSdkVersion:
intver = ver[:ver.rfind('.')]
- path = r'Microsoft SDKs\Windows Kits\%s' % (intver)
- d = os.path.join(self.ProgramFiles, path)
- if os.path.isdir(d):
+ path = r'Microsoft SDKs\Windows Kits\%s' % intver
+ d = join(self.ProgramFiles, path)
+ if isdir(d):
sdkdir = d
- if not sdkdir or not os.path.isdir(sdkdir):
+ if not sdkdir or not isdir(sdkdir):
# If fail, use default old path
for ver in self.WindowsSdkVersion:
path = r'Microsoft SDKs\Windows\v%s' % ver
- d = os.path.join(self.ProgramFiles, path)
- if os.path.isdir(d):
+ d = join(self.ProgramFiles, path)
+ if isdir(d):
sdkdir = d
if not sdkdir:
# If fail, use Platform SDK
- sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')
+ sdkdir = join(self.VCInstallDir, 'PlatformSDK')
return sdkdir
@property
def WindowsSDKExecutablePath(self):
"""
Microsoft Windows SDK executable directory.
+
+ Return
+ ------
+ str
+ path
"""
# Find WinSDK NetFx Tools registry dir name
- if self.vc_ver <= 11.0:
+ if self.vs_ver <= 11.0:
netfxver = 35
arch = ''
else:
netfxver = 40
- hidex86 = True if self.vc_ver <= 12.0 else False
+ hidex86 = True if self.vs_ver <= 12.0 else False
arch = self.pi.current_dir(x64=True, hidex86=hidex86)
fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
- # liste all possibles registry paths
+ # list all possibles registry paths
regpaths = []
- if self.vc_ver >= 14.0:
+ if self.vs_ver >= 14.0:
for ver in self.NetFxSdkVersion:
- regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
+ regpaths += [join(self.ri.netfx_sdk, ver, fx)]
for ver in self.WindowsSdkVersion:
- regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+ regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
# Return installation folder from the more recent path
for path in regpaths:
execpath = self.ri.lookup(path, 'installationfolder')
if execpath:
- break
- return execpath
+ return execpath
@property
def FSharpInstallDir(self):
"""
Microsoft Visual F# directory.
+
+ Return
+ ------
+ str
+ path
"""
- path = r'%0.1f\Setup\F#' % self.vc_ver
- path = os.path.join(self.ri.visualstudio, path)
+ path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
return self.ri.lookup(path, 'productdir') or ''
@property
def UniversalCRTSdkDir(self):
"""
Microsoft Universal CRT SDK directory.
+
+ Return
+ ------
+ str
+ path
"""
# Set Kit Roots versions for specified MSVC++ version
- if self.vc_ver >= 14.0:
- vers = ('10', '81')
- else:
- vers = ()
+ vers = ('10', '81') if self.vs_ver >= 14.0 else ()
# Find path of the more recent Kit
for ver in vers:
sdkdir = self.ri.lookup(self.ri.windows_kits_roots,
'kitsroot%s' % ver)
if sdkdir:
- break
- return sdkdir or ''
+ return sdkdir or ''
@property
def UniversalCRTSdkLastVersion(self):
"""
- Microsoft Universal C Runtime SDK last version
+ Microsoft Universal C Runtime SDK last version.
+
+ Return
+ ------
+ str
+ version
"""
- return self._use_last_dir_name(os.path.join(
- self.UniversalCRTSdkDir, 'lib'))
+ return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
@property
def NetFxSdkVersion(self):
"""
Microsoft .NET Framework SDK versions.
+
+ Return
+ ------
+ tuple of str
+ versions
"""
- # Set FxSdk versions for specified MSVC++ version
- if self.vc_ver >= 14.0:
- return ('4.6.1', '4.6')
- else:
- return ()
+ # Set FxSdk versions for specified VS version
+ return (('4.7.2', '4.7.1', '4.7',
+ '4.6.2', '4.6.1', '4.6',
+ '4.5.2', '4.5.1', '4.5')
+ if self.vs_ver >= 14.0 else ())
@property
def NetFxSdkDir(self):
"""
Microsoft .NET Framework SDK directory.
+
+ Return
+ ------
+ str
+ path
"""
+ sdkdir = ''
for ver in self.NetFxSdkVersion:
- loc = os.path.join(self.ri.netfx_sdk, ver)
+ loc = join(self.ri.netfx_sdk, ver)
sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
if sdkdir:
break
- return sdkdir or ''
+ return sdkdir
@property
def FrameworkDir32(self):
"""
Microsoft .NET Framework 32bit directory.
+
+ Return
+ ------
+ str
+ path
"""
# Default path
- guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')
+ guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
@@ -747,9 +1101,14 @@ class SystemInfo:
def FrameworkDir64(self):
"""
Microsoft .NET Framework 64bit directory.
+
+ Return
+ ------
+ str
+ path
"""
# Default path
- guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')
+ guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
@@ -758,6 +1117,11 @@ class SystemInfo:
def FrameworkVersion32(self):
"""
Microsoft .NET Framework 32bit versions.
+
+ Return
+ ------
+ tuple of str
+ versions
"""
return self._find_dot_net_versions(32)
@@ -765,6 +1129,11 @@ class SystemInfo:
def FrameworkVersion64(self):
"""
Microsoft .NET Framework 64bit versions.
+
+ Return
+ ------
+ tuple of str
+ versions
"""
return self._find_dot_net_versions(64)
@@ -776,6 +1145,11 @@ class SystemInfo:
----------
bits: int
Platform number of bits: 32 or 64.
+
+ Return
+ ------
+ tuple of str
+ versions
"""
# Find actual .NET version in registry
reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
@@ -783,18 +1157,17 @@ class SystemInfo:
ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
# Set .NET versions for specified MSVC++ version
- if self.vc_ver >= 12.0:
- frameworkver = (ver, 'v4.0')
- elif self.vc_ver >= 10.0:
- frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver,
- 'v3.5')
- elif self.vc_ver == 9.0:
- frameworkver = ('v3.5', 'v2.0.50727')
- if self.vc_ver == 8.0:
- frameworkver = ('v3.0', 'v2.0.50727')
- return frameworkver
-
- def _use_last_dir_name(self, path, prefix=''):
+ if self.vs_ver >= 12.0:
+ return ver, 'v4.0'
+ elif self.vs_ver >= 10.0:
+ return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5'
+ elif self.vs_ver == 9.0:
+ return 'v3.5', 'v2.0.50727'
+ elif self.vs_ver == 8.0:
+ return 'v3.0', 'v2.0.50727'
+
+ @staticmethod
+ def _use_last_dir_name(path, prefix=''):
"""
Return name of the last dir in path or '' if no dir found.
@@ -803,12 +1176,17 @@ class SystemInfo:
path: str
Use dirs in this path
prefix: str
- Use only dirs startings by this prefix
+ Use only dirs starting by this prefix
+
+ Return
+ ------
+ str
+ name
"""
matching_dirs = (
dir_name
- for dir_name in reversed(os.listdir(path))
- if os.path.isdir(os.path.join(path, dir_name)) and
+ for dir_name in reversed(listdir(path))
+ if isdir(join(path, dir_name)) and
dir_name.startswith(prefix)
)
return next(matching_dirs, None) or ''
@@ -819,7 +1197,7 @@ class EnvironmentInfo:
Return environment variables for specified Microsoft Visual C++ version
and platform : Lib, Include, Path and libpath.
- This function is compatible with Microsoft Visual C++ 9.0 to 14.0.
+ This function is compatible with Microsoft Visual C++ 9.0 to 14.X.
Script created by analysing Microsoft environment configuration files like
"vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
@@ -836,7 +1214,7 @@ class EnvironmentInfo:
"""
# Variables and properties in this class use originals CamelCase variables
- # names from Microsoft source files for more easy comparaison.
+ # names from Microsoft source files for more easy comparison.
def __init__(self, arch, vc_ver=None, vc_min_ver=0):
self.pi = PlatformInfo(arch)
@@ -848,204 +1226,257 @@ class EnvironmentInfo:
raise distutils.errors.DistutilsPlatformError(err)
@property
+ def vs_ver(self):
+ """
+ Microsoft Visual Studio.
+
+ Return
+ ------
+ float
+ version
+ """
+ return self.si.vs_ver
+
+ @property
def vc_ver(self):
"""
Microsoft Visual C++ version.
+
+ Return
+ ------
+ float
+ version
"""
return self.si.vc_ver
@property
def VSTools(self):
"""
- Microsoft Visual Studio Tools
+ Microsoft Visual Studio Tools.
+
+ Return
+ ------
+ list of str
+ paths
"""
paths = [r'Common7\IDE', r'Common7\Tools']
- if self.vc_ver >= 14.0:
+ if self.vs_ver >= 14.0:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
paths += [r'Team Tools\Performance Tools']
paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
- return [os.path.join(self.si.VSInstallDir, path) for path in paths]
+ return [join(self.si.VSInstallDir, path) for path in paths]
@property
def VCIncludes(self):
"""
- Microsoft Visual C++ & Microsoft Foundation Class Includes
+ Microsoft Visual C++ & Microsoft Foundation Class Includes.
+
+ Return
+ ------
+ list of str
+ paths
"""
- return [os.path.join(self.si.VCInstallDir, 'Include'),
- os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')]
+ return [join(self.si.VCInstallDir, 'Include'),
+ join(self.si.VCInstallDir, r'ATLMFC\Include')]
@property
def VCLibraries(self):
"""
- Microsoft Visual C++ & Microsoft Foundation Class Libraries
+ Microsoft Visual C++ & Microsoft Foundation Class Libraries.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver >= 15.0:
+ if self.vs_ver >= 15.0:
arch_subdir = self.pi.target_dir(x64=True)
else:
arch_subdir = self.pi.target_dir(hidex86=True)
paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
- if self.vc_ver >= 14.0:
+ if self.vs_ver >= 14.0:
paths += [r'Lib\store%s' % arch_subdir]
- return [os.path.join(self.si.VCInstallDir, path) for path in paths]
+ return [join(self.si.VCInstallDir, path) for path in paths]
@property
def VCStoreRefs(self):
"""
- Microsoft Visual C++ store references Libraries
+ Microsoft Visual C++ store references Libraries.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 14.0:
+ if self.vs_ver < 14.0:
return []
- return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]
+ return [join(self.si.VCInstallDir, r'Lib\store\references')]
@property
def VCTools(self):
"""
- Microsoft Visual C++ Tools
+ Microsoft Visual C++ Tools.
+
+ Return
+ ------
+ list of str
+ paths
"""
si = self.si
- tools = [os.path.join(si.VCInstallDir, 'VCPackages')]
+ tools = [join(si.VCInstallDir, 'VCPackages')]
- forcex86 = True if self.vc_ver <= 10.0 else False
+ forcex86 = True if self.vs_ver <= 10.0 else False
arch_subdir = self.pi.cross_dir(forcex86)
if arch_subdir:
- tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+ tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
- if self.vc_ver == 14.0:
+ if self.vs_ver == 14.0:
path = 'Bin%s' % self.pi.current_dir(hidex86=True)
- tools += [os.path.join(si.VCInstallDir, path)]
+ tools += [join(si.VCInstallDir, path)]
- elif self.vc_ver >= 15.0:
+ elif self.vs_ver >= 15.0:
host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
r'bin\HostX64%s')
- tools += [os.path.join(
+ tools += [join(
si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
if self.pi.current_cpu != self.pi.target_cpu:
- tools += [os.path.join(
+ tools += [join(
si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
else:
- tools += [os.path.join(si.VCInstallDir, 'Bin')]
+ tools += [join(si.VCInstallDir, 'Bin')]
return tools
@property
def OSLibraries(self):
"""
- Microsoft Windows SDK Libraries
+ Microsoft Windows SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver <= 10.0:
+ if self.vs_ver <= 10.0:
arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
- return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+ return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
else:
arch_subdir = self.pi.target_dir(x64=True)
- lib = os.path.join(self.si.WindowsSdkDir, 'lib')
+ lib = join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir
- return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))]
+ return [join(lib, '%sum%s' % (libver, arch_subdir))]
@property
def OSIncludes(self):
"""
- Microsoft Windows SDK Include
+ Microsoft Windows SDK Include.
+
+ Return
+ ------
+ list of str
+ paths
"""
- include = os.path.join(self.si.WindowsSdkDir, 'include')
+ include = join(self.si.WindowsSdkDir, 'include')
- if self.vc_ver <= 10.0:
- return [include, os.path.join(include, 'gl')]
+ if self.vs_ver <= 10.0:
+ return [include, join(include, 'gl')]
else:
- if self.vc_ver >= 14.0:
+ if self.vs_ver >= 14.0:
sdkver = self._sdk_subdir
else:
sdkver = ''
- return [os.path.join(include, '%sshared' % sdkver),
- os.path.join(include, '%sum' % sdkver),
- os.path.join(include, '%swinrt' % sdkver)]
+ return [join(include, '%sshared' % sdkver),
+ join(include, '%sum' % sdkver),
+ join(include, '%swinrt' % sdkver)]
@property
def OSLibpath(self):
"""
- Microsoft Windows SDK Libraries Paths
+ Microsoft Windows SDK Libraries Paths.
+
+ Return
+ ------
+ list of str
+ paths
"""
- ref = os.path.join(self.si.WindowsSdkDir, 'References')
+ ref = join(self.si.WindowsSdkDir, 'References')
libpath = []
- if self.vc_ver <= 9.0:
+ if self.vs_ver <= 9.0:
libpath += self.OSLibraries
- if self.vc_ver >= 11.0:
- libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]
+ if self.vs_ver >= 11.0:
+ libpath += [join(ref, r'CommonConfiguration\Neutral')]
- if self.vc_ver >= 14.0:
+ if self.vs_ver >= 14.0:
libpath += [
ref,
- os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),
- os.path.join(
- ref,
- 'Windows.Foundation.UniversalApiContract',
- '1.0.0.0',
- ),
- os.path.join(
- ref,
- 'Windows.Foundation.FoundationContract',
- '1.0.0.0',
- ),
- os.path.join(
- ref,
- 'Windows.Networking.Connectivity.WwanContract',
- '1.0.0.0',
- ),
- os.path.join(
- self.si.WindowsSdkDir,
- 'ExtensionSDKs',
- 'Microsoft.VCLibs',
- '%0.1f' % self.vc_ver,
- 'References',
- 'CommonConfiguration',
- 'neutral',
- ),
+ join(self.si.WindowsSdkDir, 'UnionMetadata'),
+ join(
+ ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
+ join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
+ join(
+ ref, 'Windows.Networking.Connectivity.WwanContract',
+ '1.0.0.0'),
+ join(
+ self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
+ '%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
+ 'neutral'),
]
return libpath
@property
def SdkTools(self):
"""
- Microsoft Windows SDK Tools
+ Microsoft Windows SDK Tools.
+
+ Return
+ ------
+ list of str
+ paths
"""
return list(self._sdk_tools())
def _sdk_tools(self):
"""
- Microsoft Windows SDK Tools paths generator
+ Microsoft Windows SDK Tools paths generator.
+
+ Return
+ ------
+ generator of str
+ paths
"""
- if self.vc_ver < 15.0:
- bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86'
- yield os.path.join(self.si.WindowsSdkDir, bin_dir)
+ if self.vs_ver < 15.0:
+ bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
+ yield join(self.si.WindowsSdkDir, bin_dir)
if not self.pi.current_is_x86():
arch_subdir = self.pi.current_dir(x64=True)
path = 'Bin%s' % arch_subdir
- yield os.path.join(self.si.WindowsSdkDir, path)
+ yield join(self.si.WindowsSdkDir, path)
- if self.vc_ver == 10.0 or self.vc_ver == 11.0:
+ if self.vs_ver in (10.0, 11.0):
if self.pi.target_is_x86():
arch_subdir = ''
else:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
- yield os.path.join(self.si.WindowsSdkDir, path)
+ yield join(self.si.WindowsSdkDir, path)
- elif self.vc_ver >= 15.0:
- path = os.path.join(self.si.WindowsSdkDir, 'Bin')
+ elif self.vs_ver >= 15.0:
+ path = join(self.si.WindowsSdkDir, 'Bin')
arch_subdir = self.pi.current_dir(x64=True)
sdkver = self.si.WindowsSdkLastVersion
- yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
+ yield join(path, '%s%s' % (sdkver, arch_subdir))
if self.si.WindowsSDKExecutablePath:
yield self.si.WindowsSDKExecutablePath
@@ -1053,7 +1484,12 @@ class EnvironmentInfo:
@property
def _sdk_subdir(self):
"""
- Microsoft Windows SDK version subdir
+ Microsoft Windows SDK version subdir.
+
+ Return
+ ------
+ str
+ subdir
"""
ucrtver = self.si.WindowsSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@@ -1061,22 +1497,32 @@ class EnvironmentInfo:
@property
def SdkSetup(self):
"""
- Microsoft Windows SDK Setup
+ Microsoft Windows SDK Setup.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver > 9.0:
+ if self.vs_ver > 9.0:
return []
- return [os.path.join(self.si.WindowsSdkDir, 'Setup')]
+ return [join(self.si.WindowsSdkDir, 'Setup')]
@property
def FxTools(self):
"""
- Microsoft .NET Framework Tools
+ Microsoft .NET Framework Tools.
+
+ Return
+ ------
+ list of str
+ paths
"""
pi = self.pi
si = self.si
- if self.vc_ver <= 10.0:
+ if self.vs_ver <= 10.0:
include32 = True
include64 = not pi.target_is_x86() and not pi.current_is_x86()
else:
@@ -1085,102 +1531,142 @@ class EnvironmentInfo:
tools = []
if include32:
- tools += [os.path.join(si.FrameworkDir32, ver)
+ tools += [join(si.FrameworkDir32, ver)
for ver in si.FrameworkVersion32]
if include64:
- tools += [os.path.join(si.FrameworkDir64, ver)
+ tools += [join(si.FrameworkDir64, ver)
for ver in si.FrameworkVersion64]
return tools
@property
def NetFxSDKLibraries(self):
"""
- Microsoft .Net Framework SDK Libraries
+ Microsoft .Net Framework SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
+ if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
return []
arch_subdir = self.pi.target_dir(x64=True)
- return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+ return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
@property
def NetFxSDKIncludes(self):
"""
- Microsoft .Net Framework SDK Includes
+ Microsoft .Net Framework SDK Includes.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
+ if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
return []
- return [os.path.join(self.si.NetFxSdkDir, r'include\um')]
+ return [join(self.si.NetFxSdkDir, r'include\um')]
@property
def VsTDb(self):
"""
- Microsoft Visual Studio Team System Database
+ Microsoft Visual Studio Team System Database.
+
+ Return
+ ------
+ list of str
+ paths
"""
- return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+ return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
@property
def MSBuild(self):
"""
- Microsoft Build Engine
+ Microsoft Build Engine.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 12.0:
+ if self.vs_ver < 12.0:
return []
- elif self.vc_ver < 15.0:
+ elif self.vs_ver < 15.0:
base_path = self.si.ProgramFilesx86
arch_subdir = self.pi.current_dir(hidex86=True)
else:
base_path = self.si.VSInstallDir
arch_subdir = ''
- path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir)
- build = [os.path.join(base_path, path)]
+ path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
+ build = [join(base_path, path)]
- if self.vc_ver >= 15.0:
+ if self.vs_ver >= 15.0:
# Add Roslyn C# & Visual Basic Compiler
- build += [os.path.join(base_path, path, 'Roslyn')]
+ build += [join(base_path, path, 'Roslyn')]
return build
@property
def HTMLHelpWorkshop(self):
"""
- Microsoft HTML Help Workshop
+ Microsoft HTML Help Workshop.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 11.0:
+ if self.vs_ver < 11.0:
return []
- return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+ return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
@property
def UCRTLibraries(self):
"""
- Microsoft Universal C Runtime SDK Libraries
+ Microsoft Universal C Runtime SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 14.0:
+ if self.vs_ver < 14.0:
return []
arch_subdir = self.pi.target_dir(x64=True)
- lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
+ lib = join(self.si.UniversalCRTSdkDir, 'lib')
ucrtver = self._ucrt_subdir
- return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+ return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
@property
def UCRTIncludes(self):
"""
- Microsoft Universal C Runtime SDK Include
+ Microsoft Universal C Runtime SDK Include.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 14.0:
+ if self.vs_ver < 14.0:
return []
- include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
- return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
+ include = join(self.si.UniversalCRTSdkDir, 'include')
+ return [join(include, '%sucrt' % self._ucrt_subdir)]
@property
def _ucrt_subdir(self):
"""
- Microsoft Universal C Runtime SDK version subdir
+ Microsoft Universal C Runtime SDK version subdir.
+
+ Return
+ ------
+ str
+ subdir
"""
ucrtver = self.si.UniversalCRTSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@@ -1188,31 +1674,52 @@ class EnvironmentInfo:
@property
def FSharp(self):
"""
- Microsoft Visual F#
+ Microsoft Visual F#.
+
+ Return
+ ------
+ list of str
+ paths
"""
- if self.vc_ver < 11.0 and self.vc_ver > 12.0:
+ if 11.0 > self.vs_ver > 12.0:
return []
- return self.si.FSharpInstallDir
+ return [self.si.FSharpInstallDir]
@property
def VCRuntimeRedist(self):
"""
- Microsoft Visual C++ runtime redistribuable dll
- """
- arch_subdir = self.pi.target_dir(x64=True)
- if self.vc_ver < 15:
- redist_path = self.si.VCInstallDir
- vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
- else:
- redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist')
- vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
-
- # Visual Studio 2017 is still Visual C++ 14.0
- dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver
+ Microsoft Visual C++ runtime redistributable dll.
- vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver)
- return os.path.join(redist_path, vcruntime)
+ Return
+ ------
+ str
+ path
+ """
+ vcruntime = 'vcruntime%d0.dll' % self.vc_ver
+ arch_subdir = self.pi.target_dir(x64=True).strip('\\')
+
+ # Installation prefixes candidates
+ prefixes = []
+ tools_path = self.si.VCInstallDir
+ redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
+ if isdir(redist_path):
+ # Redist version may not be exactly the same as tools
+ redist_path = join(redist_path, listdir(redist_path)[-1])
+ prefixes += [redist_path, join(redist_path, 'onecore')]
+
+ prefixes += [join(tools_path, 'redist')] # VS14 legacy path
+
+ # CRT directory
+ crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10),
+ # Sometime store in directory with VS version instead of VC
+ 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10))
+
+ # vcruntime path
+ for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
+ path = join(prefix, arch_subdir, crt_dir, vcruntime)
+ if isfile(path):
+ return path
def return_env(self, exists=True):
"""
@@ -1222,6 +1729,11 @@ class EnvironmentInfo:
----------
exists: bool
It True, only return existing paths.
+
+ Return
+ ------
+ dict
+ environment
"""
env = dict(
include=self._build_paths('include',
@@ -1255,7 +1767,7 @@ class EnvironmentInfo:
self.FSharp],
exists),
)
- if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist):
+ if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
env['py_vcruntime_redist'] = self.VCRuntimeRedist
return env
@@ -1266,37 +1778,28 @@ class EnvironmentInfo:
unique, extant, directories from those paths and from
the environment variable. Raise an error if no paths
are resolved.
+
+ Parameters
+ ----------
+ name: str
+ Environment variable name
+ spec_path_lists: list of str
+ Paths
+ exists: bool
+ It True, only return existing paths.
+
+ Return
+ ------
+ str
+ Pathsep-separated paths
"""
# flatten spec_path_lists
spec_paths = itertools.chain.from_iterable(spec_path_lists)
- env_paths = safe_env.get(name, '').split(os.pathsep)
+ env_paths = environ.get(name, '').split(pathsep)
paths = itertools.chain(spec_paths, env_paths)
- extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
+ extant_paths = list(filter(isdir, paths)) if exists else paths
if not extant_paths:
msg = "%s environment variable is empty" % name.upper()
raise distutils.errors.DistutilsPlatformError(msg)
- unique_paths = self._unique_everseen(extant_paths)
- return os.pathsep.join(unique_paths)
-
- # from Python docs
- def _unique_everseen(self, iterable, key=None):
- """
- List unique elements, preserving order.
- Remember all elements ever seen.
-
- _unique_everseen('AAAABBBCCDAABBB') --> A B C D
-
- _unique_everseen('ABBCcAD', str.lower) --> A B C D
- """
- seen = set()
- seen_add = seen.add
- if key is None:
- for element in filterfalse(seen.__contains__, iterable):
- seen_add(element)
- yield element
- else:
- for element in iterable:
- k = key(element)
- if k not in seen:
- seen_add(k)
- yield element
+ unique_paths = unique_everseen(extant_paths)
+ return pathsep.join(unique_paths)
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index dc16106..44939e1 100755..100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -2,8 +2,6 @@ import os
from distutils import log
import itertools
-from setuptools.extern.six.moves import map
-
flatten = itertools.chain.from_iterable
@@ -47,13 +45,17 @@ class Installer:
"p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')",
- "m = has_mfs and "
+ (
+ "m = has_mfs and "
"sys.modules.setdefault(%(pkg)r, "
- "importlib.util.module_from_spec("
- "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
- "[os.path.dirname(p)])))",
- "m = m or "
- "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
+ "importlib.util.module_from_spec("
+ "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
+ "[os.path.dirname(p)])))"
+ ),
+ (
+ "m = m or "
+ "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"
+ ),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
@@ -68,8 +70,6 @@ class Installer:
return "sys._getframe(1).f_locals['sitedir']"
def _gen_nspkg_line(self, pkg):
- # ensure pkg is not a unicode string under Python 2.7
- pkg = str(pkg)
pth = tuple(pkg.split('.'))
root = self._get_root()
tmpl_lines = self._nspkg_tmpl
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index b6407be..14881d2 100755..100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -2,36 +2,39 @@
import sys
import os
import re
+import io
import shutil
import socket
import base64
import hashlib
import itertools
+import warnings
+import configparser
+import html
+import http.client
+import urllib.parse
+import urllib.request
+import urllib.error
from functools import wraps
-from setuptools.extern import six
-from setuptools.extern.six.moves import urllib, http_client, configparser, map
-
import setuptools
from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
Environment, find_distributions, safe_name, safe_version,
- to_filename, Requirement, DEVELOP_DIST, EGG_DIST,
+ to_filename, Requirement, DEVELOP_DIST, EGG_DIST, parse_version,
)
-from setuptools import ssl_support
from distutils import log
from distutils.errors import DistutilsError
from fnmatch import translate
-from setuptools.py27compat import get_all_headers
-from setuptools.py33compat import unescape
from setuptools.wheel import Wheel
+from setuptools.extern.more_itertools import unique_everseen
+
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
-HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
-# this is here to fix emacs' cruddy broken syntax highlighting
+HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
PYPI_MD5 = re.compile(
- '<a href="([^"#]+)">([^<]+)</a>\n\\s+\\(<a (?:title="MD5 hash"\n\\s+)'
- 'href="[^?]+\\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
+ r'<a href="([^"#]+)">([^<]+)</a>\n\s+\(<a (?:title="MD5 hash"\n\s+)'
+ r'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
@@ -44,16 +47,17 @@ __all__ = [
_SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
-user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools)
+user_agent = _tmpl.format(
+ py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
- except ValueError:
+ except ValueError as e:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
- )
+ ) from e
def parse_bdist_wininst(name):
@@ -158,7 +162,7 @@ def interpret_distro_name(
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
- # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+ # So, we generate each possible interpretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
@@ -180,25 +184,6 @@ def interpret_distro_name(
)
-# From Python 2.7 docs
-def unique_everseen(iterable, key=None):
- "List unique elements, preserving order. Remember all elements ever seen."
- # unique_everseen('AAAABBBCCDAABBB') --> A B C D
- # unique_everseen('ABBCcAD', str.lower) --> A B C D
- seen = set()
- seen_add = seen.add
- if key is None:
- for element in six.moves.filterfalse(seen.__contains__, iterable):
- seen_add(element)
- yield element
- else:
- for element in iterable:
- k = key(element)
- if k not in seen:
- seen_add(k)
- yield element
-
-
def unique_values(func):
"""
Wrap a function returning an iterable such that the resulting iterable
@@ -235,7 +220,7 @@ def find_external_links(url, page):
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
-class ContentChecker(object):
+class ContentChecker:
"""
A null content checker that defines the interface for checking content
"""
@@ -300,24 +285,25 @@ class PackageIndex(Environment):
self, index_url="https://pypi.org/simple/", hosts=('*',),
ca_bundle=None, verify_ssl=True, *args, **kw
):
- Environment.__init__(self, *args, **kw)
+ super().__init__(*args, **kw)
self.index_url = index_url + "/" [:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate, hosts))).match
self.to_scan = []
- use_ssl = (
- verify_ssl
- and ssl_support.is_available
- and (ca_bundle or ssl_support.find_ca_bundle())
- )
- if use_ssl:
- self.opener = ssl_support.opener_for(ca_bundle)
- else:
- self.opener = urllib.request.urlopen
+ self.opener = urllib.request.urlopen
+
+ def add(self, dist):
+ # ignore invalid versions
+ try:
+ parse_version(dist.version)
+ except Exception:
+ return
+ return super().add(dist)
- def process_url(self, url, retrieve=False):
+ # FIXME: 'PackageIndex.process_url' is too complex (14)
+ def process_url(self, url, retrieve=False): # noqa: C901
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
@@ -346,6 +332,8 @@ class PackageIndex(Environment):
f = self.open_url(url, tmpl % url)
if f is None:
return
+ if isinstance(f, urllib.error.HTTPError) and f.code == 401:
+ self.info("Authentication error: %s" % f.msg)
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
@@ -423,49 +411,53 @@ class PackageIndex(Environment):
dist.precedence = SOURCE_DIST
self.add(dist)
+ def _scan(self, link):
+ # Process a URL to see if it's for a package page
+ NO_MATCH_SENTINEL = None, None
+ if not link.startswith(self.index_url):
+ return NO_MATCH_SENTINEL
+
+ parts = list(map(
+ urllib.parse.unquote, link[len(self.index_url):].split('/')
+ ))
+ if len(parts) != 2 or '#' in parts[1]:
+ return NO_MATCH_SENTINEL
+
+ # it's a package page, sanitize and index it
+ pkg = safe_name(parts[0])
+ ver = safe_version(parts[1])
+ self.package_pages.setdefault(pkg.lower(), {})[link] = True
+ return to_filename(pkg), to_filename(ver)
+
def process_index(self, url, page):
"""Process the contents of a PyPI page"""
- def scan(link):
- # Process a URL to see if it's for a package page
- if link.startswith(self.index_url):
- parts = list(map(
- urllib.parse.unquote, link[len(self.index_url):].split('/')
- ))
- if len(parts) == 2 and '#' not in parts[1]:
- # it's a package page, sanitize and index it
- pkg = safe_name(parts[0])
- ver = safe_version(parts[1])
- self.package_pages.setdefault(pkg.lower(), {})[link] = True
- return to_filename(pkg), to_filename(ver)
- return None, None
-
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
- scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
+ self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
- pkg, ver = scan(url) # ensure this page is in the page index
- if pkg:
- # process individual package page
- for new_url in find_external_links(url, page):
- # Process the found URL
- base, frag = egg_info_for_url(new_url)
- if base.endswith('.py') and not frag:
- if ver:
- new_url += '#egg=%s-%s' % (pkg, ver)
- else:
- self.need_version_info(url)
- self.scan_url(new_url)
-
- return PYPI_MD5.sub(
- lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
- )
- else:
+ pkg, ver = self._scan(url) # ensure this page is in the page index
+ if not pkg:
return "" # no sense double-scanning non-package pages
+ # process individual package page
+ for new_url in find_external_links(url, page):
+ # Process the found URL
+ base, frag = egg_info_for_url(new_url)
+ if base.endswith('.py') and not frag:
+ if ver:
+ new_url += '#egg=%s-%s' % (pkg, ver)
+ else:
+ self.need_version_info(url)
+ self.scan_url(new_url)
+
+ return PYPI_MD5.sub(
+ lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
+ )
+
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
@@ -586,7 +578,7 @@ class PackageIndex(Environment):
spec = parse_requirement_arg(spec)
return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
- def fetch_distribution(
+ def fetch_distribution( # noqa: C901 # is too complex (14) # FIXME
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None):
"""Obtain a distribution suitable for fulfilling `requirement`
@@ -688,8 +680,7 @@ class PackageIndex(Environment):
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
- from setuptools.command.easy_install import samefile
- if not samefile(filename, dst):
+ if not (os.path.exists(dst) and os.path.samefile(filename, dst)):
shutil.copy2(filename, dst)
filename = dst
@@ -735,7 +726,7 @@ class PackageIndex(Environment):
size = -1
if "content-length" in headers:
# Some servers return multiple Content-Length headers :(
- sizes = get_all_headers(headers, 'Content-Length')
+ sizes = headers.get_all('Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
with open(filename, 'wb') as tfp:
@@ -757,17 +748,18 @@ class PackageIndex(Environment):
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
- def open_url(self, url, warning=None):
+ # FIXME:
+ def open_url(self, url, warning=None): # noqa: C901 # is too complex (12)
if url.startswith('file:'):
return local_open(url)
try:
return open_with_auth(url, self.opener)
- except (ValueError, http_client.InvalidURL) as v:
+ except (ValueError, http.client.InvalidURL) as v:
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
- raise DistutilsError('%s %s' % (url, msg))
+ raise DistutilsError('%s %s' % (url, msg)) from v
except urllib.error.HTTPError as v:
return v
except urllib.error.URLError as v:
@@ -775,8 +767,8 @@ class PackageIndex(Environment):
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
- % (url, v.reason))
- except http_client.BadStatusLine as v:
+ % (url, v.reason)) from v
+ except http.client.BadStatusLine as v:
if warning:
self.warn(warning, v.line)
else:
@@ -784,13 +776,13 @@ class PackageIndex(Environment):
'%s returned a bad status line. The server might be '
'down, %s' %
(url, v.line)
- )
- except (http_client.HTTPException, socket.error) as v:
+ ) from v
+ except (http.client.HTTPException, socket.error) as v:
if warning:
self.warn(warning, v)
else:
raise DistutilsError("Download error for %s: %s"
- % (url, v))
+ % (url, v)) from v
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
@@ -847,13 +839,14 @@ class PackageIndex(Environment):
raise DistutilsError("Unexpected HTML page found at " + url)
def _download_svn(self, url, filename):
+ warnings.warn("SVN download support is deprecated", UserWarning)
url = url.split('#', 1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/', 1)
- auth, host = urllib.parse.splituser(netloc)
+ auth, host = _splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':', 1)
@@ -894,7 +887,7 @@ class PackageIndex(Environment):
if rev is not None:
self.info("Checking out %s", rev)
- os.system("(cd %s && git checkout --quiet %s)" % (
+ os.system("git -C %s checkout --quiet %s" % (
filename,
rev,
))
@@ -910,7 +903,7 @@ class PackageIndex(Environment):
if rev is not None:
self.info("Updating to %s", rev)
- os.system("(cd %s && hg up -C -r %s -q)" % (
+ os.system("hg --cwd %s up -C -r %s -q" % (
filename,
rev,
))
@@ -933,12 +926,19 @@ entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def decode_entity(match):
- what = match.group(1)
- return unescape(what)
+ what = match.group(0)
+ return html.unescape(what)
def htmldecode(text):
- """Decode HTML entities in the given text."""
+ """
+ Decode HTML entities in the given text.
+
+ >>> htmldecode(
+ ... 'https://../package_name-0.1.2.tar.gz'
+ ... '?tokena=A&amp;tokenb=B">package_name-0.1.2.tar.gz')
+ 'https://../package_name-0.1.2.tar.gz?tokena=A&tokenb=B">package_name-0.1.2.tar.gz'
+ """
return entity_sub(decode_entity, text)
@@ -959,8 +959,7 @@ def socket_timeout(timeout=15):
def _encode_auth(auth):
"""
- A function compatible with Python 2.3-3.3 that will encode
- auth from a URL suitable for an HTTP header.
+ Encode auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ='
@@ -972,15 +971,14 @@ def _encode_auth(auth):
auth_s = urllib.parse.unquote(auth)
# convert to bytes
auth_bytes = auth_s.encode()
- # use the legacy interface for Python 2.3 support
- encoded_bytes = base64.encodestring(auth_bytes)
+ encoded_bytes = base64.b64encode(auth_bytes)
# convert back to a string
encoded = encoded_bytes.decode()
# strip the trailing carriage return
return encoded.replace('\n', '')
-class Credential(object):
+class Credential:
"""
A username/password pair. Use like a namedtuple.
"""
@@ -1003,7 +1001,7 @@ class PyPIConfig(configparser.RawConfigParser):
Load from ~/.pypirc
"""
defaults = dict.fromkeys(['username', 'password', 'repository'], '')
- configparser.RawConfigParser.__init__(self, defaults)
+ super().__init__(defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc):
@@ -1038,15 +1036,16 @@ class PyPIConfig(configparser.RawConfigParser):
def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
- scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+ parsed = urllib.parse.urlparse(url)
+ scheme, netloc, path, params, query, frag = parsed
- # Double scheme does not raise on Mac OS X as revealed by a
+ # Double scheme does not raise on macOS as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
- raise http_client.InvalidURL("nonnumeric port: ''")
+ raise http.client.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
- auth, host = urllib.parse.splituser(netloc)
+ auth, address = _splituser(netloc)
else:
auth = None
@@ -1059,7 +1058,7 @@ def open_with_auth(url, opener=urllib.request.urlopen):
if auth:
auth = "Basic " + _encode_auth(auth)
- parts = scheme, host, path, params, query, frag
+ parts = scheme, address, path, params, query, frag
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header("Authorization", auth)
@@ -1073,13 +1072,21 @@ def open_with_auth(url, opener=urllib.request.urlopen):
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
- if s2 == scheme and h2 == host:
+ if s2 == scheme and h2 == address:
parts = s2, netloc, path2, param2, query2, frag2
fp.url = urllib.parse.urlunparse(parts)
return fp
+# copy of urllib.parse._splituser from Python 3.8
+def _splituser(host):
+ """splituser('user[:passwd]@host[:port]')
+ --> 'user[:passwd]', 'host[:port]'."""
+ user, delim, host = host.rpartition('@')
+ return (user if delim else None), host
+
+
# adding a timeout to avoid freezing package_index
open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
@@ -1115,5 +1122,5 @@ def local_open(url):
status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'}
- body_stream = six.StringIO(body)
+ body_stream = io.StringIO(body)
return urllib.error.HTTPError(url, status, message, headers, body_stream)
diff --git a/setuptools/pep425tags.py b/setuptools/pep425tags.py
deleted file mode 100644
index 3bdd328..0000000
--- a/setuptools/pep425tags.py
+++ /dev/null
@@ -1,317 +0,0 @@
-# This file originally from pip:
-# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py
-"""Generate and work with PEP 425 Compatibility Tags."""
-from __future__ import absolute_import
-
-import distutils.util
-from distutils import log
-import platform
-import re
-import sys
-import sysconfig
-import warnings
-from collections import OrderedDict
-
-from . import glibc
-
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
-
-
-def get_config_var(var):
- try:
- return sysconfig.get_config_var(var)
- except IOError as e: # Issue #1074
- warnings.warn("{}".format(e), RuntimeWarning)
- return None
-
-
-def get_abbr_impl():
- """Return abbreviated implementation name."""
- if hasattr(sys, 'pypy_version_info'):
- pyimpl = 'pp'
- elif sys.platform.startswith('java'):
- pyimpl = 'jy'
- elif sys.platform == 'cli':
- pyimpl = 'ip'
- else:
- pyimpl = 'cp'
- return pyimpl
-
-
-def get_impl_ver():
- """Return implementation version."""
- impl_ver = get_config_var("py_version_nodot")
- if not impl_ver or get_abbr_impl() == 'pp':
- impl_ver = ''.join(map(str, get_impl_version_info()))
- return impl_ver
-
-
-def get_impl_version_info():
- """Return sys.version_info-like tuple for use in decrementing the minor
- version."""
- if get_abbr_impl() == 'pp':
- # as per https://github.com/pypa/pip/issues/2882
- return (sys.version_info[0], sys.pypy_version_info.major,
- sys.pypy_version_info.minor)
- else:
- return sys.version_info[0], sys.version_info[1]
-
-
-def get_impl_tag():
- """
- Returns the Tag for this specific implementation.
- """
- return "{}{}".format(get_abbr_impl(), get_impl_ver())
-
-
-def get_flag(var, fallback, expected=True, warn=True):
- """Use a fallback method for determining SOABI flags if the needed config
- var is unset or unavailable."""
- val = get_config_var(var)
- if val is None:
- if warn:
- log.debug("Config variable '%s' is unset, Python ABI tag may "
- "be incorrect", var)
- return fallback()
- return val == expected
-
-
-def get_abi_tag():
- """Return the ABI tag based on SOABI (if available) or emulate SOABI
- (CPython 2, PyPy)."""
- soabi = get_config_var('SOABI')
- impl = get_abbr_impl()
- if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
- d = ''
- m = ''
- u = ''
- if get_flag('Py_DEBUG',
- lambda: hasattr(sys, 'gettotalrefcount'),
- warn=(impl == 'cp')):
- d = 'd'
- if get_flag('WITH_PYMALLOC',
- lambda: impl == 'cp',
- warn=(impl == 'cp')):
- m = 'm'
- if get_flag('Py_UNICODE_SIZE',
- lambda: sys.maxunicode == 0x10ffff,
- expected=4,
- warn=(impl == 'cp' and
- sys.version_info < (3, 3))) \
- and sys.version_info < (3, 3):
- u = 'u'
- abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
- elif soabi and soabi.startswith('cpython-'):
- abi = 'cp' + soabi.split('-')[1]
- elif soabi:
- abi = soabi.replace('.', '_').replace('-', '_')
- else:
- abi = None
- return abi
-
-
-def _is_running_32bit():
- return sys.maxsize == 2147483647
-
-
-def get_platform():
- """Return our platform name 'win32', 'linux_x86_64'"""
- if sys.platform == 'darwin':
- # distutils.util.get_platform() returns the release based on the value
- # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
- # be significantly older than the user's current machine.
- release, _, machine = platform.mac_ver()
- split_ver = release.split('.')
-
- if machine == "x86_64" and _is_running_32bit():
- machine = "i386"
- elif machine == "ppc64" and _is_running_32bit():
- machine = "ppc"
-
- return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
-
- # XXX remove distutils dependency
- result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
- if result == "linux_x86_64" and _is_running_32bit():
- # 32 bit Python program (running on a 64 bit Linux): pip should only
- # install and run 32 bit compiled extensions in that case.
- result = "linux_i686"
-
- return result
-
-
-def is_manylinux1_compatible():
- # Only Linux, and only x86-64 / i686
- if get_platform() not in {"linux_x86_64", "linux_i686"}:
- return False
-
- # Check for presence of _manylinux module
- try:
- import _manylinux
- return bool(_manylinux.manylinux1_compatible)
- except (ImportError, AttributeError):
- # Fall through to heuristic check below
- pass
-
- # Check glibc version. CentOS 5 uses glibc 2.5.
- return glibc.have_compatible_glibc(2, 5)
-
-
-def get_darwin_arches(major, minor, machine):
- """Return a list of supported arches (including group arches) for
- the given major, minor and machine architecture of an macOS machine.
- """
- arches = []
-
- def _supports_arch(major, minor, arch):
- # Looking at the application support for macOS versions in the chart
- # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
- # our timeline looks roughly like:
- #
- # 10.0 - Introduces ppc support.
- # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
- # and x86_64 support is CLI only, and cannot be used for GUI
- # applications.
- # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
- # 10.6 - Drops support for ppc64
- # 10.7 - Drops support for ppc
- #
- # Given that we do not know if we're installing a CLI or a GUI
- # application, we must be conservative and assume it might be a GUI
- # application and behave as if ppc64 and x86_64 support did not occur
- # until 10.5.
- #
- # Note: The above information is taken from the "Application support"
- # column in the chart not the "Processor support" since I believe
- # that we care about what instruction sets an application can use
- # not which processors the OS supports.
- if arch == 'ppc':
- return (major, minor) <= (10, 5)
- if arch == 'ppc64':
- return (major, minor) == (10, 5)
- if arch == 'i386':
- return (major, minor) >= (10, 4)
- if arch == 'x86_64':
- return (major, minor) >= (10, 5)
- if arch in groups:
- for garch in groups[arch]:
- if _supports_arch(major, minor, garch):
- return True
- return False
-
- groups = OrderedDict([
- ("fat", ("i386", "ppc")),
- ("intel", ("x86_64", "i386")),
- ("fat64", ("x86_64", "ppc64")),
- ("fat32", ("x86_64", "i386", "ppc")),
- ])
-
- if _supports_arch(major, minor, machine):
- arches.append(machine)
-
- for garch in groups:
- if machine in groups[garch] and _supports_arch(major, minor, garch):
- arches.append(garch)
-
- arches.append('universal')
-
- return arches
-
-
-def get_supported(versions=None, noarch=False, platform=None,
- impl=None, abi=None):
- """Return a list of supported tags for each version specified in
- `versions`.
-
- :param versions: a list of string versions, of the form ["33", "32"],
- or None. The first version will be assumed to support our ABI.
- :param platform: specify the exact platform you want valid
- tags for, or None. If None, use the local system platform.
- :param impl: specify the exact implementation you want valid
- tags for, or None. If None, use the local interpreter impl.
- :param abi: specify the exact abi you want valid
- tags for, or None. If None, use the local interpreter abi.
- """
- supported = []
-
- # Versions must be given with respect to the preference
- if versions is None:
- versions = []
- version_info = get_impl_version_info()
- major = version_info[:-1]
- # Support all previous minor Python versions.
- for minor in range(version_info[-1], -1, -1):
- versions.append(''.join(map(str, major + (minor,))))
-
- impl = impl or get_abbr_impl()
-
- abis = []
-
- abi = abi or get_abi_tag()
- if abi:
- abis[0:0] = [abi]
-
- abi3s = set()
- import imp
- for suffix in imp.get_suffixes():
- if suffix[0].startswith('.abi'):
- abi3s.add(suffix[0].split('.', 2)[1])
-
- abis.extend(sorted(list(abi3s)))
-
- abis.append('none')
-
- if not noarch:
- arch = platform or get_platform()
- if arch.startswith('macosx'):
- # support macosx-10.6-intel on macosx-10.9-x86_64
- match = _osx_arch_pat.match(arch)
- if match:
- name, major, minor, actual_arch = match.groups()
- tpl = '{}_{}_%i_%s'.format(name, major)
- arches = []
- for m in reversed(range(int(minor) + 1)):
- for a in get_darwin_arches(int(major), m, actual_arch):
- arches.append(tpl % (m, a))
- else:
- # arch pattern didn't match (?!)
- arches = [arch]
- elif platform is None and is_manylinux1_compatible():
- arches = [arch.replace('linux', 'manylinux1'), arch]
- else:
- arches = [arch]
-
- # Current version, current API (built specifically for our Python):
- for abi in abis:
- for arch in arches:
- supported.append(('%s%s' % (impl, versions[0]), abi, arch))
-
- # abi3 modules compatible with older version of Python
- for version in versions[1:]:
- # abi3 was introduced in Python 3.2
- if version in {'31', '30'}:
- break
- for abi in abi3s: # empty set if not Python 3
- for arch in arches:
- supported.append(("%s%s" % (impl, version), abi, arch))
-
- # Has binaries, does not use the Python API:
- for arch in arches:
- supported.append(('py%s' % (versions[0][0]), 'none', arch))
-
- # No abi / arch, but requires our implementation:
- supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
- # Tagged specifically as being cross-version compatible
- # (with just the major version specified)
- supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
-
- # No abi / arch, generic Python
- for i, version in enumerate(versions):
- supported.append(('py%s' % (version,), 'none', 'any'))
- if i == 0:
- supported.append(('py%s' % (version[0]), 'none', 'any'))
-
- return supported
-
-
-implementation_tag = get_impl_tag()
diff --git a/setuptools/py27compat.py b/setuptools/py27compat.py
deleted file mode 100644
index 2985011..0000000
--- a/setuptools/py27compat.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Compatibility Support for Python 2.7 and earlier
-"""
-
-import platform
-
-from setuptools.extern import six
-
-
-def get_all_headers(message, key):
- """
- Given an HTTPMessage, return all headers matching a given key.
- """
- return message.get_all(key)
-
-
-if six.PY2:
- def get_all_headers(message, key):
- return message.getheaders(key)
-
-
-linux_py2_ascii = (
- platform.system() == 'Linux' and
- six.PY2
-)
-
-rmtree_safe = str if linux_py2_ascii else lambda x: x
-"""Workaround for http://bugs.python.org/issue24672"""
diff --git a/setuptools/py31compat.py b/setuptools/py31compat.py
deleted file mode 100644
index 4ea9532..0000000
--- a/setuptools/py31compat.py
+++ /dev/null
@@ -1,41 +0,0 @@
-__all__ = ['get_config_vars', 'get_path']
-
-try:
- # Python 2.7 or >=3.2
- from sysconfig import get_config_vars, get_path
-except ImportError:
- from distutils.sysconfig import get_config_vars, get_python_lib
-
- def get_path(name):
- if name not in ('platlib', 'purelib'):
- raise ValueError("Name must be purelib or platlib")
- return get_python_lib(name == 'platlib')
-
-
-try:
- # Python >=3.2
- from tempfile import TemporaryDirectory
-except ImportError:
- import shutil
- import tempfile
-
- class TemporaryDirectory(object):
- """
- Very simple temporary directory context manager.
- Will try to delete afterward, but will also ignore OS and similar
- errors on deletion.
- """
-
- def __init__(self):
- self.name = None # Handle mkdtemp raising an exception
- self.name = tempfile.mkdtemp()
-
- def __enter__(self):
- return self.name
-
- def __exit__(self, exctype, excvalue, exctrace):
- try:
- shutil.rmtree(self.name, True)
- except OSError: # removal errors are not the only possible
- pass
- self.name = None
diff --git a/setuptools/py33compat.py b/setuptools/py33compat.py
deleted file mode 100644
index 2a73ebb..0000000
--- a/setuptools/py33compat.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import dis
-import array
-import collections
-
-try:
- import html
-except ImportError:
- html = None
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import html_parser
-
-
-OpArg = collections.namedtuple('OpArg', 'opcode arg')
-
-
-class Bytecode_compat(object):
- def __init__(self, code):
- self.code = code
-
- def __iter__(self):
- """Yield '(op,arg)' pair for each operation in code object 'code'"""
-
- bytes = array.array('b', self.code.co_code)
- eof = len(self.code.co_code)
-
- ptr = 0
- extended_arg = 0
-
- while ptr < eof:
-
- op = bytes[ptr]
-
- if op >= dis.HAVE_ARGUMENT:
-
- arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg
- ptr += 3
-
- if op == dis.EXTENDED_ARG:
- long_type = six.integer_types[-1]
- extended_arg = arg * long_type(65536)
- continue
-
- else:
- arg = None
- ptr += 1
-
- yield OpArg(op, arg)
-
-
-Bytecode = getattr(dis, 'Bytecode', Bytecode_compat)
-
-
-unescape = getattr(html, 'unescape', html_parser.HTMLParser().unescape)
diff --git a/setuptools/py34compat.py b/setuptools/py34compat.py
new file mode 100644
index 0000000..3ad9172
--- /dev/null
+++ b/setuptools/py34compat.py
@@ -0,0 +1,13 @@
+import importlib
+
+try:
+ import importlib.util
+except ImportError:
+ pass
+
+
+try:
+ module_from_spec = importlib.util.module_from_spec
+except AttributeError:
+ def module_from_spec(spec):
+ return spec.loader.load_module(spec.name)
diff --git a/setuptools/py36compat.py b/setuptools/py36compat.py
deleted file mode 100644
index f527969..0000000
--- a/setuptools/py36compat.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import sys
-from distutils.errors import DistutilsOptionError
-from distutils.util import strtobool
-from distutils.debug import DEBUG
-
-
-class Distribution_parse_config_files:
- """
- Mix-in providing forward-compatibility for functionality to be
- included by default on Python 3.7.
-
- Do not edit the code in this class except to update functionality
- as implemented in distutils.
- """
- def parse_config_files(self, filenames=None):
- from configparser import ConfigParser
-
- # Ignore install directory options if we have a venv
- if sys.prefix != sys.base_prefix:
- ignore_options = [
- 'install-base', 'install-platbase', 'install-lib',
- 'install-platlib', 'install-purelib', 'install-headers',
- 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
- 'home', 'user', 'root']
- else:
- ignore_options = []
-
- ignore_options = frozenset(ignore_options)
-
- if filenames is None:
- filenames = self.find_config_files()
-
- if DEBUG:
- self.announce("Distribution.parse_config_files():")
-
- parser = ConfigParser(interpolation=None)
- for filename in filenames:
- if DEBUG:
- self.announce(" reading %s" % filename)
- parser.read(filename)
- for section in parser.sections():
- options = parser.options(section)
- opt_dict = self.get_option_dict(section)
-
- for opt in options:
- if opt != '__name__' and opt not in ignore_options:
- val = parser.get(section,opt)
- opt = opt.replace('-', '_')
- opt_dict[opt] = (filename, val)
-
- # Make the ConfigParser forget everything (so we retain
- # the original filenames that options come from)
- parser.__init__()
-
- # If there was a "global" section in the config file, use it
- # to set Distribution options.
-
- if 'global' in self.command_options:
- for (opt, (src, val)) in self.command_options['global'].items():
- alias = self.negative_opt.get(opt)
- try:
- if alias:
- setattr(self, alias, not strtobool(val))
- elif opt in ('verbose', 'dry_run'): # ugh!
- setattr(self, opt, strtobool(val))
- else:
- setattr(self, opt, val)
- except ValueError as msg:
- raise DistutilsOptionError(msg)
-
-
-if sys.version_info < (3,):
- # Python 2 behavior is sufficient
- class Distribution_parse_config_files:
- pass
-
-
-if False:
- # When updated behavior is available upstream,
- # disable override here.
- class Distribution_parse_config_files:
- pass
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 685f3f7..034fc80 100755..100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -8,11 +8,11 @@ import re
import contextlib
import pickle
import textwrap
+import builtins
-from setuptools.extern import six
-from setuptools.extern.six.moves import builtins, map
-
-import pkg_resources.py31compat
+import pkg_resources
+from distutils.errors import DistutilsError
+from pkg_resources import working_set
if sys.platform.startswith('java'):
import org.python.modules.posix.PosixModule as _os
@@ -23,12 +23,13 @@ try:
except NameError:
_file = None
_open = open
-from distutils.errors import DistutilsError
-from pkg_resources import working_set
__all__ = [
- "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+ "AbstractSandbox",
+ "DirectorySandbox",
+ "SandboxViolation",
+ "run_setup",
]
@@ -70,7 +71,7 @@ def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
- pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
+ os.makedirs(replacement, exist_ok=True)
saved = tempfile.tempdir
@@ -108,6 +109,7 @@ class UnpickleableException(Exception):
except Exception:
# get UnpickleableException inside the sandbox
from setuptools.sandbox import UnpickleableException as cls
+
return cls.dump(cls, cls(repr(exc)))
@@ -138,7 +140,7 @@ class ExceptionSaver:
return
type, exc = map(pickle.loads, self._saved)
- six.reraise(type, exc, self._tb)
+ raise exc.with_traceback(self._tb)
@contextlib.contextmanager
@@ -156,7 +158,8 @@ def save_modules():
sys.modules.update(saved)
# remove any modules imported since
del_modules = (
- mod_name for mod_name in sys.modules
+ mod_name
+ for mod_name in sys.modules
if mod_name not in saved
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
@@ -185,8 +188,8 @@ def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
- hide_setuptools()
with save_path():
+ hide_setuptools()
with save_argv():
with override_temp(temp_dir):
with pushd(setup_dir):
@@ -195,6 +198,15 @@ def setup_context(setup_dir):
yield
+_MODULES_TO_HIDE = {
+ 'setuptools',
+ 'distutils',
+ 'pkg_resources',
+ 'Cython',
+ '_distutils_hack',
+}
+
+
def _needs_hiding(mod_name):
"""
>>> _needs_hiding('setuptools')
@@ -212,8 +224,8 @@ def _needs_hiding(mod_name):
>>> _needs_hiding('Cython')
True
"""
- pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)')
- return bool(pattern.match(mod_name))
+ base_module = mod_name.split('.', 1)[0]
+ return base_module in _MODULES_TO_HIDE
def hide_setuptools():
@@ -223,6 +235,10 @@ def hide_setuptools():
necessary to avoid issues such as #315 where setuptools upgrading itself
would fail to find a function declared in the metadata.
"""
+ _distutils_hack = sys.modules.get('_distutils_hack', None)
+ if _distutils_hack is not None:
+ _distutils_hack.remove_shim()
+
modules = filter(_needs_hiding, sys.modules)
_clear_modules(modules)
@@ -238,15 +254,8 @@ def run_setup(setup_script, args):
working_set.__init__()
working_set.callbacks.append(lambda dist: dist.activate())
- # __file__ should be a byte string on Python 2 (#712)
- dunder_file = (
- setup_script
- if isinstance(setup_script, str) else
- setup_script.encode(sys.getfilesystemencoding())
- )
-
with DirectorySandbox(setup_dir):
- ns = dict(__file__=dunder_file, __name__='__main__')
+ ns = dict(__file__=setup_script, __name__='__main__')
_execfile(setup_script, ns)
except SystemExit as v:
if v.args and v.args[0]:
@@ -261,7 +270,8 @@ class AbstractSandbox:
def __init__(self):
self._attrs = [
- name for name in dir(_os)
+ name
+ for name in dir(_os)
if not name.startswith('_') and hasattr(self, name)
]
@@ -316,9 +326,25 @@ class AbstractSandbox:
_file = _mk_single_path_wrapper('file', _file)
_open = _mk_single_path_wrapper('open', _open)
for name in [
- "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
- "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
- "startfile", "mkfifo", "mknod", "pathconf", "access"
+ "stat",
+ "listdir",
+ "chdir",
+ "open",
+ "chmod",
+ "chown",
+ "mkdir",
+ "remove",
+ "unlink",
+ "rmdir",
+ "utime",
+ "lchown",
+ "chroot",
+ "lstat",
+ "startfile",
+ "mkfifo",
+ "mknod",
+ "pathconf",
+ "access",
]:
if hasattr(_os, name):
locals()[name] = _mk_single_path_wrapper(name)
@@ -369,12 +395,12 @@ class AbstractSandbox:
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation + '-from', src, *args, **kw),
- self._remap_input(operation + '-to', dst, *args, **kw)
+ self._remap_input(operation + '-to', dst, *args, **kw),
)
if hasattr(os, 'devnull'):
- _EXCEPTIONS = [os.devnull,]
+ _EXCEPTIONS = [os.devnull]
else:
_EXCEPTIONS = []
@@ -382,28 +408,38 @@ else:
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
- write_ops = dict.fromkeys([
- "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
- "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
- ])
+ write_ops = dict.fromkeys(
+ [
+ "open",
+ "chmod",
+ "chown",
+ "mkdir",
+ "remove",
+ "unlink",
+ "rmdir",
+ "utime",
+ "lchown",
+ "chroot",
+ "mkfifo",
+ "mknod",
+ "tempnam",
+ ]
+ )
- _exception_patterns = [
- # Allow lib2to3 to attempt to save a pickled grammar object (#121)
- r'.*lib2to3.*\.pickle$',
- ]
+ _exception_patterns = []
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox, '')
self._exceptions = [
- os.path.normcase(os.path.realpath(path))
- for path in exceptions
+ os.path.normcase(os.path.realpath(path)) for path in exceptions
]
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
from setuptools.sandbox import SandboxViolation
+
raise SandboxViolation(operation, args, kw)
if _file:
@@ -436,12 +472,10 @@ class DirectorySandbox(AbstractSandbox):
def _exempted(self, filepath):
start_matches = (
- filepath.startswith(exception)
- for exception in self._exceptions
+ filepath.startswith(exception) for exception in self._exceptions
)
pattern_matches = (
- re.match(pattern, filepath)
- for pattern in self._exception_patterns
+ re.match(pattern, filepath) for pattern in self._exception_patterns
)
candidates = itertools.chain(start_matches, pattern_matches)
return any(candidates)
@@ -466,15 +500,19 @@ class DirectorySandbox(AbstractSandbox):
WRITE_FLAGS = functools.reduce(
- operator.or_, [getattr(_os, a, 0) for a in
- "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
+ operator.or_,
+ [
+ getattr(_os, a, 0)
+ for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()
+ ],
)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
- tmpl = textwrap.dedent("""
+ tmpl = textwrap.dedent(
+ """
SandboxViolation: {cmd}{args!r} {kwargs}
The package setup script has attempted to modify files on your system
@@ -484,7 +522,8 @@ class SandboxViolation(DistutilsError):
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.
- """).lstrip()
+ """
+ ).lstrip()
def __str__(self):
cmd, args, kwargs = self.args
diff --git a/setuptools/script (dev).tmpl b/setuptools/script (dev).tmpl
index d58b1bb..39a24b0 100644
--- a/setuptools/script (dev).tmpl
+++ b/setuptools/script (dev).tmpl
@@ -2,4 +2,5 @@
__requires__ = %(spec)r
__import__('pkg_resources').require(%(spec)r)
__file__ = %(dev_path)r
-exec(compile(open(__file__).read(), __file__, 'exec'))
+with open(__file__) as f:
+ exec(compile(f.read(), __file__, 'exec'))
diff --git a/setuptools/site-patch.py b/setuptools/site-patch.py
deleted file mode 100644
index 0d2d2ff..0000000
--- a/setuptools/site-patch.py
+++ /dev/null
@@ -1,74 +0,0 @@
-def __boot():
- import sys
- import os
- PYTHONPATH = os.environ.get('PYTHONPATH')
- if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):
- PYTHONPATH = []
- else:
- PYTHONPATH = PYTHONPATH.split(os.pathsep)
-
- pic = getattr(sys, 'path_importer_cache', {})
- stdpath = sys.path[len(PYTHONPATH):]
- mydir = os.path.dirname(__file__)
-
- for item in stdpath:
- if item == mydir or not item:
- continue # skip if current dir. on Windows, or my own directory
- importer = pic.get(item)
- if importer is not None:
- loader = importer.find_module('site')
- if loader is not None:
- # This should actually reload the current module
- loader.load_module('site')
- break
- else:
- try:
- import imp # Avoid import loop in Python >= 3.3
- stream, path, descr = imp.find_module('site', [item])
- except ImportError:
- continue
- if stream is None:
- continue
- try:
- # This should actually reload the current module
- imp.load_module('site', stream, path, descr)
- finally:
- stream.close()
- break
- else:
- raise ImportError("Couldn't find the real 'site' module")
-
- known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp
-
- oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
- sys.__egginsert = 0 # and reset the current one
-
- for item in PYTHONPATH:
- addsitedir(item)
-
- sys.__egginsert += oldpos # restore effective old position
-
- d, nd = makepath(stdpath[0])
- insert_at = None
- new_path = []
-
- for item in sys.path:
- p, np = makepath(item)
-
- if np == nd and insert_at is None:
- # We've hit the first 'system' path entry, so added entries go here
- insert_at = len(new_path)
-
- if np in known_paths or insert_at is None:
- new_path.append(item)
- else:
- # new path after the insert point, back-insert it
- new_path.insert(insert_at, item)
- insert_at += 1
-
- sys.path[:] = new_path
-
-
-if __name__ == 'site':
- __boot()
- del __boot
diff --git a/setuptools/ssl_support.py b/setuptools/ssl_support.py
deleted file mode 100644
index 6362f1f..0000000
--- a/setuptools/ssl_support.py
+++ /dev/null
@@ -1,260 +0,0 @@
-import os
-import socket
-import atexit
-import re
-import functools
-
-from setuptools.extern.six.moves import urllib, http_client, map, filter
-
-from pkg_resources import ResolutionError, ExtractionError
-
-try:
- import ssl
-except ImportError:
- ssl = None
-
-__all__ = [
- 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
- 'opener_for'
-]
-
-cert_paths = """
-/etc/pki/tls/certs/ca-bundle.crt
-/etc/ssl/certs/ca-certificates.crt
-/usr/share/ssl/certs/ca-bundle.crt
-/usr/local/share/certs/ca-root.crt
-/etc/ssl/cert.pem
-/System/Library/OpenSSL/certs/cert.pem
-/usr/local/share/certs/ca-root-nss.crt
-/etc/ssl/ca-bundle.pem
-""".strip().split()
-
-try:
- HTTPSHandler = urllib.request.HTTPSHandler
- HTTPSConnection = http_client.HTTPSConnection
-except AttributeError:
- HTTPSHandler = HTTPSConnection = object
-
-is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
-
-
-try:
- from ssl import CertificateError, match_hostname
-except ImportError:
- try:
- from backports.ssl_match_hostname import CertificateError
- from backports.ssl_match_hostname import match_hostname
- except ImportError:
- CertificateError = None
- match_hostname = None
-
-if not CertificateError:
-
- class CertificateError(ValueError):
- pass
-
-
-if not match_hostname:
-
- def _dnsname_match(dn, hostname, max_wildcards=1):
- """Matching according to RFC 6125, section 6.4.3
-
- http://tools.ietf.org/html/rfc6125#section-6.4.3
- """
- pats = []
- if not dn:
- return False
-
- # Ported from python3-syntax:
- # leftmost, *remainder = dn.split(r'.')
- parts = dn.split(r'.')
- leftmost = parts[0]
- remainder = parts[1:]
-
- wildcards = leftmost.count('*')
- if wildcards > max_wildcards:
- # Issue #17980: avoid denials of service by refusing more
- # than one wildcard per fragment. A survey of established
- # policy among SSL implementations showed it to be a
- # reasonable choice.
- raise CertificateError(
- "too many wildcards in certificate DNS name: " + repr(dn))
-
- # speed up common case w/o wildcards
- if not wildcards:
- return dn.lower() == hostname.lower()
-
- # RFC 6125, section 6.4.3, subitem 1.
- # The client SHOULD NOT attempt to match a presented identifier in which
- # the wildcard character comprises a label other than the left-most label.
- if leftmost == '*':
- # When '*' is a fragment by itself, it matches a non-empty dotless
- # fragment.
- pats.append('[^.]+')
- elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
- # RFC 6125, section 6.4.3, subitem 3.
- # The client SHOULD NOT attempt to match a presented identifier
- # where the wildcard character is embedded within an A-label or
- # U-label of an internationalized domain name.
- pats.append(re.escape(leftmost))
- else:
- # Otherwise, '*' matches any dotless string, e.g. www*
- pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
- # add the remaining fragments, ignore any wildcards
- for frag in remainder:
- pats.append(re.escape(frag))
-
- pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
- return pat.match(hostname)
-
- def match_hostname(cert, hostname):
- """Verify that *cert* (in decoded format as returned by
- SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
- rules are followed, but IP addresses are not accepted for *hostname*.
-
- CertificateError is raised on failure. On success, the function
- returns nothing.
- """
- if not cert:
- raise ValueError("empty or no certificate")
- dnsnames = []
- san = cert.get('subjectAltName', ())
- for key, value in san:
- if key == 'DNS':
- if _dnsname_match(value, hostname):
- return
- dnsnames.append(value)
- if not dnsnames:
- # The subject is only checked when there is no dNSName entry
- # in subjectAltName
- for sub in cert.get('subject', ()):
- for key, value in sub:
- # XXX according to RFC 2818, the most specific Common Name
- # must be used.
- if key == 'commonName':
- if _dnsname_match(value, hostname):
- return
- dnsnames.append(value)
- if len(dnsnames) > 1:
- raise CertificateError("hostname %r "
- "doesn't match either of %s"
- % (hostname, ', '.join(map(repr, dnsnames))))
- elif len(dnsnames) == 1:
- raise CertificateError("hostname %r "
- "doesn't match %r"
- % (hostname, dnsnames[0]))
- else:
- raise CertificateError("no appropriate commonName or "
- "subjectAltName fields were found")
-
-
-class VerifyingHTTPSHandler(HTTPSHandler):
- """Simple verifying handler: no auth, subclasses, timeouts, etc."""
-
- def __init__(self, ca_bundle):
- self.ca_bundle = ca_bundle
- HTTPSHandler.__init__(self)
-
- def https_open(self, req):
- return self.do_open(
- lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
- )
-
-
-class VerifyingHTTPSConn(HTTPSConnection):
- """Simple verifying connection: no auth, subclasses, timeouts, etc."""
-
- def __init__(self, host, ca_bundle, **kw):
- HTTPSConnection.__init__(self, host, **kw)
- self.ca_bundle = ca_bundle
-
- def connect(self):
- sock = socket.create_connection(
- (self.host, self.port), getattr(self, 'source_address', None)
- )
-
- # Handle the socket if a (proxy) tunnel is present
- if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
- self.sock = sock
- self._tunnel()
- # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
- # change self.host to mean the proxy server host when tunneling is
- # being used. Adapt, since we are interested in the destination
- # host for the match_hostname() comparison.
- actual_host = self._tunnel_host
- else:
- actual_host = self.host
-
- if hasattr(ssl, 'create_default_context'):
- ctx = ssl.create_default_context(cafile=self.ca_bundle)
- self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)
- else:
- # This is for python < 2.7.9 and < 3.4?
- self.sock = ssl.wrap_socket(
- sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
- )
- try:
- match_hostname(self.sock.getpeercert(), actual_host)
- except CertificateError:
- self.sock.shutdown(socket.SHUT_RDWR)
- self.sock.close()
- raise
-
-
-def opener_for(ca_bundle=None):
- """Get a urlopen() replacement that uses ca_bundle for verification"""
- return urllib.request.build_opener(
- VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
- ).open
-
-
-# from jaraco.functools
-def once(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- if not hasattr(func, 'always_returns'):
- func.always_returns = func(*args, **kwargs)
- return func.always_returns
- return wrapper
-
-
-@once
-def get_win_certfile():
- try:
- import wincertstore
- except ImportError:
- return None
-
- class CertFile(wincertstore.CertFile):
- def __init__(self):
- super(CertFile, self).__init__()
- atexit.register(self.close)
-
- def close(self):
- try:
- super(CertFile, self).close()
- except OSError:
- pass
-
- _wincerts = CertFile()
- _wincerts.addstore('CA')
- _wincerts.addstore('ROOT')
- return _wincerts.name
-
-
-def find_ca_bundle():
- """Return an existing CA bundle path, or None"""
- extant_cert_paths = filter(os.path.isfile, cert_paths)
- return (
- get_win_certfile()
- or next(extant_cert_paths, None)
- or _certifi_where()
- )
-
-
-def _certifi_where():
- try:
- return __import__('certifi').where()
- except (ImportError, ResolutionError, ExtractionError):
- pass
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
index 54dd7d2..564adf2 100644
--- a/setuptools/tests/__init__.py
+++ b/setuptools/tests/__init__.py
@@ -2,5 +2,9 @@ import locale
import pytest
+
+__all__ = ['fail_on_ascii']
+
+
is_ascii = locale.getpreferredencoding() == 'ANSI_X3.4-1968'
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/tests/config/__init__.py
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
new file mode 100644
index 0000000..044f801
--- /dev/null
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -0,0 +1,254 @@
+"""Make sure that applying the configuration from pyproject.toml is equivalent to
+applying a similar configuration from setup.cfg
+"""
+import io
+import re
+from pathlib import Path
+from urllib.request import urlopen
+from unittest.mock import Mock
+
+import pytest
+from ini2toml.api import Translator
+
+import setuptools # noqa ensure monkey patch to metadata
+from setuptools.dist import Distribution
+from setuptools.config import setupcfg, pyprojecttoml
+from setuptools.config import expand
+
+
+EXAMPLES = (Path(__file__).parent / "setupcfg_examples.txt").read_text()
+EXAMPLE_URLS = [x for x in EXAMPLES.splitlines() if not x.startswith("#")]
+DOWNLOAD_DIR = Path(__file__).parent / "downloads"
+
+
+def makedist(path):
+ return Distribution({"src_root": path})
+
+
+@pytest.mark.parametrize("url", EXAMPLE_URLS)
+@pytest.mark.filterwarnings("ignore")
+@pytest.mark.uses_network
+def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
+ monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
+ setupcfg_example = retrieve_file(url, DOWNLOAD_DIR)
+ pyproject_example = Path(tmp_path, "pyproject.toml")
+ toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg")
+ pyproject_example.write_text(toml_config)
+
+ dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
+ dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
+
+ pkg_info_toml = core_metadata(dist_toml)
+ pkg_info_cfg = core_metadata(dist_cfg)
+ assert pkg_info_toml == pkg_info_cfg
+
+ if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
+ assert set(dist_toml.license_files) == set(dist_cfg.license_files)
+
+ if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
+ print(dist_cfg.entry_points)
+ ep_toml = {(k, *sorted(i.replace(" ", "") for i in v))
+ for k, v in dist_toml.entry_points.items()}
+ ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v))
+ for k, v in dist_cfg.entry_points.items()}
+ assert ep_toml == ep_cfg
+
+ if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
+ pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
+ pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
+ assert pkg_data_toml == pkg_data_cfg
+
+ if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
+ data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
+ data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
+ assert data_files_toml == data_files_cfg
+
+ assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
+ if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
+ if (
+ "testing" in dist_toml.extras_require
+ and "testing" not in dist_cfg.extras_require
+ ):
+ # ini2toml can automatically convert `tests_require` to `testing` extra
+ dist_toml.extras_require.pop("testing")
+ extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
+ extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
+ assert extra_req_toml == extra_req_cfg
+
+
+PEP621_EXAMPLE = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+description = "Lovely Spam! Wonderful Spam!"
+readme = "README.rst"
+requires-python = ">=3.8"
+license = {file = "LICENSE.txt"}
+keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
+authors = [
+ {email = "hi@pradyunsg.me"},
+ {name = "Tzu-Ping Chung"}
+]
+maintainers = [
+ {name = "Brett Cannon", email = "brett@python.org"}
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Programming Language :: Python"
+]
+
+dependencies = [
+ "httpx",
+ "gidgethub[httpx]>4.0.0",
+ "django>2.1; os_name != 'nt'",
+ "django>2.0; os_name == 'nt'"
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest < 5.0.0",
+ "pytest-cov[all]"
+]
+
+[project.urls]
+homepage = "http://example.com"
+documentation = "http://readthedocs.org"
+repository = "http://github.com"
+changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
+
+[project.scripts]
+spam-cli = "spam:main_cli"
+
+[project.gui-scripts]
+spam-gui = "spam:main_gui"
+
+[project.entry-points."spam.magical"]
+tomatoes = "spam:main_tomatoes"
+"""
+
+PEP621_EXAMPLE_SCRIPT = """
+def main_cli(): pass
+def main_gui(): pass
+def main_tomatoes(): pass
+"""
+
+
+def _pep621_example_project(tmp_path, readme="README.rst"):
+ pyproject = tmp_path / "pyproject.toml"
+ text = PEP621_EXAMPLE
+ replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
+ for orig, subst in replacements.items():
+ text = text.replace(orig, subst)
+ pyproject.write_text(text)
+
+ (tmp_path / readme).write_text("hello world")
+ (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---")
+ (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT)
+ return pyproject
+
+
+def test_pep621_example(tmp_path):
+ """Make sure the example in PEP 621 works"""
+ pyproject = _pep621_example_project(tmp_path)
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.license == "--- LICENSE stub ---"
+ assert set(dist.metadata.license_files) == {"LICENSE.txt"}
+
+
+@pytest.mark.parametrize(
+ "readme, ctype",
+ [
+ ("Readme.txt", "text/plain"),
+ ("readme.md", "text/markdown"),
+ ("text.rst", "text/x-rst"),
+ ]
+)
+def test_readme_content_type(tmp_path, readme, ctype):
+ pyproject = _pep621_example_project(tmp_path, readme)
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.long_description_content_type == ctype
+
+
+def test_undefined_content_type(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README.tex")
+ with pytest.raises(ValueError, match="Undefined content type for README.tex"):
+ pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
+def test_no_explicit_content_type_for_missing_extension(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README")
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert dist.metadata.long_description_content_type is None
+
+
+# TODO: After PEP 639 is accepted, we have to move the license-files
+# to the `project` table instead of `tool.setuptools`
+def test_license_and_license_files(tmp_path):
+ pyproject = _pep621_example_project(tmp_path, "README")
+ text = pyproject.read_text(encoding="utf-8")
+
+ # Sanity-check
+ assert 'license = {file = "LICENSE.txt"}' in text
+ assert "[tool.setuptools]" not in text
+
+ text += '\n[tool.setuptools]\nlicense-files = ["_FILE*"]\n'
+ pyproject.write_text(text, encoding="utf-8")
+ (tmp_path / "_FILE.txt").touch()
+ (tmp_path / "_FILE.rst").touch()
+
+ # Would normally match the `license_files` glob patterns, but we want to exclude it
+ # by being explicit. On the other hand, its contents should be added to `license`
+ (tmp_path / "LICENSE.txt").write_text("LicenseRef-Proprietary\n", encoding="utf-8")
+
+ dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+ assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
+ assert dist.metadata.license == "LicenseRef-Proprietary\n"
+
+
+# --- Auxiliary Functions ---
+
+
+NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
+
+
+def retrieve_file(url, download_dir):
+ file_name = url.strip()
+ for part in NAME_REMOVE:
+ file_name = file_name.replace(part, '').strip().strip('/:').strip()
+ file_name = re.sub(r"[^\-_\.\w\d]+", "_", file_name)
+ path = Path(download_dir, file_name)
+ if not path.exists():
+ download_dir.mkdir(exist_ok=True, parents=True)
+ download(url, path)
+ return path
+
+
+def download(url, dest):
+ with urlopen(url) as f:
+ data = f.read()
+
+ with open(dest, "wb") as f:
+ f.write(data)
+
+ assert Path(dest).exists()
+
+
+def core_metadata(dist) -> str:
+ with io.StringIO() as buffer:
+ dist.metadata.write_pkg_file(buffer)
+ value = "\n".join(buffer.getvalue().strip().splitlines())
+
+ # ---- DIFF NORMALISATION ----
+ # PEP 621 is very particular about author/maintainer metadata conversion, so skip
+ value = re.sub(r"^(Author|Maintainer)(-email)?:.*$", "", value, flags=re.M)
+ # May be redundant with Home-page
+ value = re.sub(r"^Project-URL: Homepage,.*$", "", value, flags=re.M)
+ # May be missing in original (relying on default) but backfilled in the TOML
+ value = re.sub(r"^Description-Content-Type:.*$", "", value, flags=re.M)
+ # ini2toml can automatically convert `tests_require` to `testing` extra
+ value = value.replace("Provides-Extra: testing\n", "")
+ # Remove empty lines
+ value = re.sub(r"^\s*$", "", value, flags=re.M)
+ value = re.sub(r"^\n", "", value, flags=re.M)
+
+ return value
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
new file mode 100644
index 0000000..3a59edb
--- /dev/null
+++ b/setuptools/tests/config/test_expand.py
@@ -0,0 +1,155 @@
+import os
+
+import pytest
+
+from distutils.errors import DistutilsOptionError
+from setuptools.config import expand
+from setuptools.discovery import find_package_path
+
+
+def write_files(files, root_dir):
+ for file, content in files.items():
+ path = root_dir / file
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.write_text(content)
+
+
+def test_glob_relative(tmp_path, monkeypatch):
+ files = {
+ "dir1/dir2/dir3/file1.txt",
+ "dir1/dir2/file2.txt",
+ "dir1/file3.txt",
+ "a.ini",
+ "b.ini",
+ "dir1/c.ini",
+ "dir1/dir2/a.ini",
+ }
+
+ write_files({k: "" for k in files}, tmp_path)
+ patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
+ monkeypatch.chdir(tmp_path)
+ assert set(expand.glob_relative(patterns)) == files
+ # Make sure the same APIs work outside cwd
+ assert set(expand.glob_relative(patterns, tmp_path)) == files
+
+
+def test_read_files(tmp_path, monkeypatch):
+
+ dir_ = tmp_path / "dir_"
+ (tmp_path / "_dir").mkdir(exist_ok=True)
+ (tmp_path / "a.txt").touch()
+ files = {
+ "a.txt": "a",
+ "dir1/b.txt": "b",
+ "dir1/dir2/c.txt": "c"
+ }
+ write_files(files, dir_)
+
+ with monkeypatch.context() as m:
+ m.chdir(dir_)
+ assert expand.read_files(list(files)) == "a\nb\nc"
+
+ cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
+ with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+ expand.read_files(["../a.txt"])
+
+ # Make sure the same APIs work outside cwd
+ assert expand.read_files(list(files), dir_) == "a\nb\nc"
+ with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
+ expand.read_files(["../a.txt"], dir_)
+
+
+class TestReadAttr:
+ def test_read_attr(self, tmp_path, monkeypatch):
+ files = {
+ "pkg/__init__.py": "",
+ "pkg/sub/__init__.py": "VERSION = '0.1.1'",
+ "pkg/sub/mod.py": (
+ "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n"
+ "raise SystemExit(1)"
+ ),
+ }
+ write_files(files, tmp_path)
+
+ with monkeypatch.context() as m:
+ m.chdir(tmp_path)
+ # Make sure it can read the attr statically without evaluating the module
+ assert expand.read_attr('pkg.sub.VERSION') == '0.1.1'
+ values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
+
+ assert values['a'] == 0
+ assert values['b'] == {42}
+
+ # Make sure the same APIs work outside cwd
+ assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+ values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
+ assert values['c'] == (0, 1, 1)
+
+ def test_import_order(self, tmp_path):
+ """
+ Sometimes the import machinery will import the parent package of a nested
+ module, which triggers side-effects and might create problems (see issue #3176)
+
+ ``read_attr`` should bypass these limitations by resolving modules statically
+ (via ast.literal_eval).
+ """
+ files = {
+ "src/pkg/__init__.py": "from .main import func\nfrom .about import version",
+ "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
+ "src/pkg/about.py": "version = '42'",
+ }
+ write_files(files, tmp_path)
+ attr_desc = "pkg.about.version"
+ package_dir = {"": "src"}
+ # `import super_complicated_dep` should not run, otherwise the build fails
+ assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
+
+
+@pytest.mark.parametrize(
+ 'package_dir, file, module, return_value',
+ [
+ ({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
+ ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
+ ({}, "single_module.py", "single_module", 70),
+ ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
+ ]
+)
+def test_resolve_class(tmp_path, package_dir, file, module, return_value):
+ files = {file: f"class Custom:\n def testing(self): return {return_value}"}
+ write_files(files, tmp_path)
+ cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
+ assert cls().testing() == return_value
+
+
+@pytest.mark.parametrize(
+ 'args, pkgs',
+ [
+ ({"where": ["."], "namespaces": False}, {"pkg", "other"}),
+ ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
+ ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
+ ({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
+ ]
+)
+def test_find_packages(tmp_path, monkeypatch, args, pkgs):
+ files = {
+ "pkg/__init__.py",
+ "other/__init__.py",
+ "dir1/dir2/__init__.py",
+ }
+ write_files({k: "" for k in files}, tmp_path)
+
+ package_dir = {}
+ kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
+ where = kwargs.get("where", ["."])
+ assert set(expand.find_packages(**kwargs)) == pkgs
+ for pkg in pkgs:
+ pkg_path = find_package_path(pkg, package_dir, tmp_path)
+ assert os.path.exists(pkg_path)
+
+ # Make sure the same APIs work outside cwd
+ where = [
+ str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
+ for p in args.pop("where", ["."])
+ ]
+
+ assert set(expand.find_packages(where=where, **args)) == pkgs
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
new file mode 100644
index 0000000..1b5b90e
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -0,0 +1,350 @@
+import logging
+from configparser import ConfigParser
+from inspect import cleandoc
+
+import pytest
+import tomli_w
+from path import Path as _Path
+
+from setuptools.config.pyprojecttoml import (
+ read_configuration,
+ expand_configuration,
+ validate,
+)
+from setuptools.errors import OptionError
+
+
+import setuptools # noqa -- force distutils.core to be patched
+import distutils.core
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+ 'importlib-metadata>=0.12;python_version<"3.8"',
+ 'importlib-resources>=1.0;python_version<"3.7"',
+ 'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+ "sphinx>=3",
+ "sphinx-argparse>=0.2.5",
+ "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+ "pytest>=1",
+ "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["_files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+def create_example(path, pkg_root):
+ pyproject = path / "pyproject.toml"
+
+ files = [
+ f"{pkg_root}/pkg/__init__.py",
+ "_files/file.txt",
+ ]
+ if pkg_root != ".": # flat-layout will raise error for multi-package dist
+ # Ensure namespaces are discovered
+ files.append(f"{pkg_root}/other/nested/__init__.py")
+
+ for file in files:
+ (path / file).parent.mkdir(exist_ok=True, parents=True)
+ (path / file).touch()
+
+ pyproject.write_text(EXAMPLE)
+ (path / "README.md").write_text("hello world")
+ (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass")
+ (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)")
+ (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+
+def verify_example(config, path, pkg_root):
+ pyproject = path / "pyproject.toml"
+ pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
+ expanded = expand_configuration(config, path)
+ expanded_project = expanded["project"]
+ assert read_configuration(pyproject, expand=True) == expanded
+ assert expanded_project["version"] == "3.10"
+ assert expanded_project["readme"]["text"] == "hello world"
+ assert "packages" in expanded["tool"]["setuptools"]
+ if pkg_root == ".":
+ # Auto-discovery will raise error for multi-package dist
+ assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
+ else:
+ assert set(expanded["tool"]["setuptools"]["packages"]) == {
+ "pkg",
+ "other",
+ "other.nested",
+ }
+ assert expanded["tool"]["setuptools"]["include-package-data"] is True
+ assert "" in expanded["tool"]["setuptools"]["package-data"]
+ assert "*" not in expanded["tool"]["setuptools"]["package-data"]
+ assert expanded["tool"]["setuptools"]["data-files"] == [
+ ("data", ["_files/file.txt"])
+ ]
+
+
+def test_read_configuration(tmp_path):
+ create_example(tmp_path, "src")
+ pyproject = tmp_path / "pyproject.toml"
+
+ config = read_configuration(pyproject, expand=False)
+ assert config["project"].get("version") is None
+ assert config["project"].get("readme") is None
+
+ verify_example(config, tmp_path, "src")
+
+
+@pytest.mark.parametrize(
+ "pkg_root, opts",
+ [
+ (".", {}),
+ ("src", {}),
+ ("lib", {"packages": {"find": {"where": ["lib"]}}}),
+ ],
+)
+def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
+ create_example(tmp_path, pkg_root)
+
+ pyproject = tmp_path / "pyproject.toml"
+
+ config = read_configuration(pyproject, expand=False)
+ assert config["project"].get("version") is None
+ assert config["project"].get("readme") is None
+ config["tool"]["setuptools"].pop("packages", None)
+ config["tool"]["setuptools"].pop("package-dir", None)
+
+ config["tool"]["setuptools"].update(opts)
+ verify_example(config, tmp_path, pkg_root)
+
+
+ENTRY_POINTS = {
+ "console_scripts": {"a": "mod.a:func"},
+ "gui_scripts": {"b": "mod.b:func"},
+ "other": {"c": "mod.c:func [extra]"},
+}
+
+
+def test_expand_entry_point(tmp_path):
+ entry_points = ConfigParser()
+ entry_points.read_dict(ENTRY_POINTS)
+ with open(tmp_path / "entry-points.txt", "w") as f:
+ entry_points.write(f)
+
+ tool = {"setuptools": {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}}
+ project = {"dynamic": ["scripts", "gui-scripts", "entry-points"]}
+ pyproject = {"project": project, "tool": tool}
+ expanded = expand_configuration(pyproject, tmp_path)
+ expanded_project = expanded["project"]
+ assert len(expanded_project["scripts"]) == 1
+ assert expanded_project["scripts"]["a"] == "mod.a:func"
+ assert len(expanded_project["gui-scripts"]) == 1
+ assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
+ assert len(expanded_project["entry-points"]) == 1
+ assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
+
+ project = {"dynamic": ["entry-points"]}
+ pyproject = {"project": project, "tool": tool}
+ expanded = expand_configuration(pyproject, tmp_path)
+ expanded_project = expanded["project"]
+ assert len(expanded_project["entry-points"]) == 3
+ assert "scripts" not in expanded_project
+ assert "gui-scripts" not in expanded_project
+
+
+class TestClassifiers:
+ def test_dynamic(self, tmp_path):
+ # Let's create a project example that has dynamic classifiers
+ # coming from a txt file.
+ create_example(tmp_path, "src")
+ classifiers = """\
+ Framework :: Flask
+ Programming Language :: Haskell
+ """
+ (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers))
+
+ pyproject = tmp_path / "pyproject.toml"
+ config = read_configuration(pyproject, expand=False)
+ dynamic = config["project"]["dynamic"]
+ config["project"]["dynamic"] = list({*dynamic, "classifiers"})
+ dynamic_config = config["tool"]["setuptools"]["dynamic"]
+ dynamic_config["classifiers"] = {"file": "classifiers.txt"}
+
+ # When the configuration is expanded,
+ # each line of the file should be an different classifier.
+ validate(config, pyproject)
+ expanded = expand_configuration(config, tmp_path)
+
+ assert set(expanded["project"]["classifiers"]) == {
+ "Framework :: Flask",
+ "Programming Language :: Haskell",
+ }
+
+ def test_dynamic_without_config(self, tmp_path):
+ config = """
+ [project]
+ name = "myproj"
+ version = '42'
+ dynamic = ["classifiers"]
+ """
+
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(config))
+ with pytest.raises(OptionError, match="No configuration found"):
+ read_configuration(pyproject)
+
+ def test_dynamic_without_file(self, tmp_path):
+ config = """
+ [project]
+ name = "myproj"
+ version = '42'
+ dynamic = ["classifiers"]
+
+ [tool.setuptools.dynamic]
+ classifiers = {file = ["classifiers.txt"]}
+ """
+
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(config))
+ with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
+ expanded = read_configuration(pyproject)
+ assert not expanded["project"]["classifiers"]
+
+
+@pytest.mark.parametrize(
+ "example",
+ (
+ """
+ [project]
+ name = "myproj"
+ version = "1.2"
+
+ [my-tool.that-disrespect.pep518]
+ value = 42
+ """,
+ ),
+)
+def test_ignore_unrelated_config(tmp_path, example):
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(example))
+
+ # Make sure no error is raised due to 3rd party configs in pyproject.toml
+ assert read_configuration(pyproject) is not None
+
+
+@pytest.mark.parametrize(
+ "example, error_msg, value_shown_in_debug",
+ [
+ (
+ """
+ [project]
+ name = "myproj"
+ version = "1.2"
+ requires = ['pywin32; platform_system=="Windows"' ]
+ """,
+ "configuration error: `project` must not contain {'requires'} properties",
+ '"requires": ["pywin32; platform_system==\\"Windows\\""]',
+ ),
+ ],
+)
+def test_invalid_example(tmp_path, caplog, example, error_msg, value_shown_in_debug):
+ caplog.set_level(logging.DEBUG)
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(cleandoc(example))
+
+ caplog.clear()
+ with pytest.raises(ValueError, match="invalid pyproject.toml"):
+ read_configuration(pyproject)
+
+ # Make sure the logs give guidance to the user
+ error_log = caplog.record_tuples[0]
+ assert error_log[1] == logging.ERROR
+ assert error_msg in error_log[2]
+
+ debug_log = caplog.record_tuples[1]
+ assert debug_log[1] == logging.DEBUG
+ debug_msg = "".join(line.strip() for line in debug_log[2].splitlines())
+ assert value_shown_in_debug in debug_msg
+
+
+@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
+def test_empty(tmp_path, config):
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(config)
+
+ # Make sure no error is raised
+ assert read_configuration(pyproject) == {}
+
+
+@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
+def test_include_package_data_by_default(tmp_path, config):
+ """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
+ default.
+ """
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text(config)
+
+ config = read_configuration(pyproject)
+ assert config["tool"]["setuptools"]["include-package-data"] is True
+
+
+def test_include_package_data_in_setuppy(tmp_path):
+ """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
+ ``setup.py``.
+
+ See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
+ """
+ pyproject = tmp_path / "pyproject.toml"
+ pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n")
+ setuppy = tmp_path / "setup.py"
+ setuppy.write_text("__import__('setuptools').setup(include_package_data=False)")
+
+ with _Path(tmp_path):
+ dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
+
+ assert dist.get_name() == "myproj"
+ assert dist.get_version() == "42"
+ assert dist.include_package_data is False
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
new file mode 100644
index 0000000..1f35f83
--- /dev/null
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -0,0 +1,924 @@
+import configparser
+import contextlib
+import inspect
+from pathlib import Path
+from unittest.mock import Mock, patch
+
+import pytest
+
+from distutils.errors import DistutilsOptionError, DistutilsFileError
+from setuptools.dist import Distribution, _Distribution
+from setuptools.config.setupcfg import ConfigHandler, read_configuration
+from ..textwrap import DALS
+
+
+class ErrConfigHandler(ConfigHandler):
+ """Erroneous handler. Fails to implement required methods."""
+ section_prefix = "**err**"
+
+
+def make_package_dir(name, base_dir, ns=False):
+ dir_package = base_dir
+ for dir_name in name.split('/'):
+ dir_package = dir_package.mkdir(dir_name)
+ init_file = None
+ if not ns:
+ init_file = dir_package.join('__init__.py')
+ init_file.write('')
+ return dir_package, init_file
+
+
+def fake_env(
+ tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
+):
+
+ if setup_py is None:
+ setup_py = 'from setuptools import setup\n' 'setup()\n'
+
+ tmpdir.join('setup.py').write(setup_py)
+ config = tmpdir.join('setup.cfg')
+ config.write(setup_cfg.encode(encoding), mode='wb')
+
+ package_dir, init_file = make_package_dir(package_path, tmpdir)
+
+ init_file.write(
+ 'VERSION = (1, 2, 3)\n'
+ '\n'
+ 'VERSION_MAJOR = 1'
+ '\n'
+ 'def get_version():\n'
+ ' return [3, 4, 5, "dev"]\n'
+ '\n'
+ )
+
+ return package_dir, config
+
+
+@contextlib.contextmanager
+def get_dist(tmpdir, kwargs_initial=None, parse=True):
+ kwargs_initial = kwargs_initial or {}
+
+ with tmpdir.as_cwd():
+ dist = Distribution(kwargs_initial)
+ dist.script_name = 'setup.py'
+ parse and dist.parse_config_files()
+
+ yield dist
+
+
+def test_parsers_implemented():
+
+ with pytest.raises(NotImplementedError):
+ handler = ErrConfigHandler(None, {}, False, Mock())
+ handler.parsers
+
+
+class TestConfigurationReader:
+ def test_basic(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = 10.1.1\n'
+ 'keywords = one, two\n'
+ '\n'
+ '[options]\n'
+ 'scripts = bin/a.py, bin/b.py\n',
+ )
+ config_dict = read_configuration('%s' % config)
+ assert config_dict['metadata']['version'] == '10.1.1'
+ assert config_dict['metadata']['keywords'] == ['one', 'two']
+ assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
+
+ def test_no_config(self, tmpdir):
+ with pytest.raises(DistutilsFileError):
+ read_configuration('%s' % tmpdir.join('setup.cfg'))
+
+ def test_ignore_errors(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n' 'version = attr: none.VERSION\n' 'keywords = one, two\n',
+ )
+ with pytest.raises(ImportError):
+ read_configuration('%s' % config)
+
+ config_dict = read_configuration('%s' % config, ignore_option_errors=True)
+
+ assert config_dict['metadata']['keywords'] == ['one', 'two']
+ assert 'version' not in config_dict['metadata']
+
+ config.remove()
+
+
+class TestMetadata:
+ def test_basic(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = 10.1.1\n'
+ 'description = Some description\n'
+ 'long_description_content_type = text/something\n'
+ 'long_description = file: README\n'
+ 'name = fake_name\n'
+ 'keywords = one, two\n'
+ 'provides = package, package.sub\n'
+ 'license = otherlic\n'
+ 'download_url = http://test.test.com/test/\n'
+ 'maintainer_email = test@test.com\n',
+ )
+
+ tmpdir.join('README').write('readme contents\nline2')
+
+ meta_initial = {
+ # This will be used so `otherlic` won't replace it.
+ 'license': 'BSD 3-Clause License',
+ }
+
+ with get_dist(tmpdir, meta_initial) as dist:
+ metadata = dist.metadata
+
+ assert metadata.version == '10.1.1'
+ assert metadata.description == 'Some description'
+ assert metadata.long_description_content_type == 'text/something'
+ assert metadata.long_description == 'readme contents\nline2'
+ assert metadata.provides == ['package', 'package.sub']
+ assert metadata.license == 'BSD 3-Clause License'
+ assert metadata.name == 'fake_name'
+ assert metadata.keywords == ['one', 'two']
+ assert metadata.download_url == 'http://test.test.com/test/'
+ assert metadata.maintainer_email == 'test@test.com'
+
+ def test_license_cfg(self, tmpdir):
+ fake_env(
+ tmpdir,
+ DALS(
+ """
+ [metadata]
+ name=foo
+ version=0.0.1
+ license=Apache 2.0
+ """
+ ),
+ )
+
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+
+ assert metadata.name == "foo"
+ assert metadata.version == "0.0.1"
+ assert metadata.license == "Apache 2.0"
+
+ def test_file_mixed(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n' 'long_description = file: README.rst, CHANGES.rst\n' '\n',
+ )
+
+ tmpdir.join('README.rst').write('readme contents\nline2')
+ tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.long_description == (
+ 'readme contents\nline2\n' 'changelog contents\nand stuff'
+ )
+
+ def test_file_sandboxed(self, tmpdir):
+
+ tmpdir.ensure("README")
+ project = tmpdir.join('depth1', 'depth2')
+ project.ensure(dir=True)
+ fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n')
+
+ with get_dist(project, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files() # file: out of sandbox
+
+ def test_aliases(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'author_email = test@test.com\n'
+ 'home_page = http://test.test.com/test/\n'
+ 'summary = Short summary\n'
+ 'platform = a, b\n'
+ 'classifier =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3.5\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.author_email == 'test@test.com'
+ assert metadata.url == 'http://test.test.com/test/'
+ assert metadata.description == 'Short summary'
+ assert metadata.platforms == ['a', 'b']
+ assert metadata.classifiers == [
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3.5',
+ ]
+
+ def test_multiline(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'name = fake_name\n'
+ 'keywords =\n'
+ ' one\n'
+ ' two\n'
+ 'classifiers =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3.5\n',
+ )
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.keywords == ['one', 'two']
+ assert metadata.classifiers == [
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3.5',
+ ]
+
+ def test_dict(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'project_urls =\n'
+ ' Link One = https://example.com/one/\n'
+ ' Link Two = https://example.com/two/\n',
+ )
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+ assert metadata.project_urls == {
+ 'Link One': 'https://example.com/one/',
+ 'Link Two': 'https://example.com/two/',
+ }
+
+ def test_version(self, tmpdir):
+
+ package_dir, config = fake_env(
+ tmpdir, '[metadata]\n' 'version = attr: fake_package.VERSION\n'
+ )
+
+ sub_a = package_dir.mkdir('subpkg_a')
+ sub_a.join('__init__.py').write('')
+ sub_a.join('mod.py').write('VERSION = (2016, 11, 26)')
+
+ sub_b = package_dir.mkdir('subpkg_b')
+ sub_b.join('__init__.py').write('')
+ sub_b.join('mod.py').write(
+ 'import third_party_module\n' 'VERSION = (2016, 11, 26)'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ config.write('[metadata]\n' 'version = attr: fake_package.get_version\n')
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '3.4.5.dev'
+
+ config.write('[metadata]\n' 'version = attr: fake_package.VERSION_MAJOR\n')
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1'
+
+ config.write(
+ '[metadata]\n' 'version = attr: fake_package.subpkg_a.mod.VERSION\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '2016.11.26'
+
+ config.write(
+ '[metadata]\n' 'version = attr: fake_package.subpkg_b.mod.VERSION\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '2016.11.26'
+
+ def test_version_file(self, tmpdir):
+
+ _, config = fake_env(
+ tmpdir, '[metadata]\n' 'version = file: fake_package/version.txt\n'
+ )
+ tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
+ with pytest.raises(DistutilsOptionError):
+ with get_dist(tmpdir) as dist:
+ dist.metadata.version
+
+ def test_version_with_package_dir_simple(self, tmpdir):
+
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = attr: fake_package_simple.VERSION\n'
+ '[options]\n'
+ 'package_dir =\n'
+ ' = src\n',
+ package_path='src/fake_package_simple',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ def test_version_with_package_dir_rename(self, tmpdir):
+
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = attr: fake_package_rename.VERSION\n'
+ '[options]\n'
+ 'package_dir =\n'
+ ' fake_package_rename = fake_dir\n',
+ package_path='fake_dir',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ def test_version_with_package_dir_complex(self, tmpdir):
+
+ _, config = fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = attr: fake_package_complex.VERSION\n'
+ '[options]\n'
+ 'package_dir =\n'
+ ' fake_package_complex = src/fake_dir\n',
+ package_path='src/fake_dir',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.version == '1.2.3'
+
+ def test_unknown_meta_item(self, tmpdir):
+
+ fake_env(tmpdir, '[metadata]\n' 'name = fake_name\n' 'unknown = some\n')
+ with get_dist(tmpdir, parse=False) as dist:
+ dist.parse_config_files() # Skip unknown.
+
+ def test_usupported_section(self, tmpdir):
+
+ fake_env(tmpdir, '[metadata.some]\n' 'key = val\n')
+ with get_dist(tmpdir, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files()
+
+ def test_classifiers(self, tmpdir):
+ expected = set(
+ [
+ 'Framework :: Django',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ ]
+ )
+
+ # From file.
+ _, config = fake_env(tmpdir, '[metadata]\n' 'classifiers = file: classifiers\n')
+
+ tmpdir.join('classifiers').write(
+ 'Framework :: Django\n'
+ 'Programming Language :: Python :: 3\n'
+ 'Programming Language :: Python :: 3.5\n'
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert set(dist.metadata.classifiers) == expected
+
+ # From list notation
+ config.write(
+ '[metadata]\n'
+ 'classifiers =\n'
+ ' Framework :: Django\n'
+ ' Programming Language :: Python :: 3\n'
+ ' Programming Language :: Python :: 3.5\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert set(dist.metadata.classifiers) == expected
+
+ def test_deprecated_config_handlers(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'version = 10.1.1\n'
+ 'description = Some description\n'
+ 'requires = some, requirement\n',
+ )
+
+ with pytest.deprecated_call():
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+
+ assert metadata.version == '10.1.1'
+ assert metadata.description == 'Some description'
+ assert metadata.requires == ['some', 'requirement']
+
+ def test_interpolation(self, tmpdir):
+ fake_env(tmpdir, '[metadata]\n' 'description = %(message)s\n')
+ with pytest.raises(configparser.InterpolationMissingOptionError):
+ with get_dist(tmpdir):
+ pass
+
+ def test_non_ascii_1(self, tmpdir):
+ fake_env(tmpdir, '[metadata]\n' 'description = éàïôñ\n', encoding='utf-8')
+ with get_dist(tmpdir):
+ pass
+
+ def test_non_ascii_3(self, tmpdir):
+ fake_env(tmpdir, '\n' '# -*- coding: invalid\n')
+ with get_dist(tmpdir):
+ pass
+
+ def test_non_ascii_4(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '# -*- coding: utf-8\n' '[metadata]\n' 'description = éàïôñ\n',
+ encoding='utf-8',
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.description == 'éàïôñ'
+
+ def test_not_utf8(self, tmpdir):
+ """
+ Config files encoded not in UTF-8 will fail
+ """
+ fake_env(
+ tmpdir,
+ '# vim: set fileencoding=iso-8859-15 :\n'
+ '[metadata]\n'
+ 'description = éàïôñ\n',
+ encoding='iso-8859-15',
+ )
+ with pytest.raises(UnicodeDecodeError):
+ with get_dist(tmpdir):
+ pass
+
+ def test_warn_dash_deprecation(self, tmpdir):
+ # warn_dash_deprecation() is a method in setuptools.dist
+ # remove this test and the method when no longer needed
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'author-email = test@test.com\n'
+ 'maintainer_email = foo@foo.com\n',
+ )
+ msg = (
+ "Usage of dash-separated 'author-email' will not be supported "
+ "in future versions. "
+ "Please use the underscore name 'author_email' instead"
+ )
+ with pytest.warns(UserWarning, match=msg):
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+
+ assert metadata.author_email == 'test@test.com'
+ assert metadata.maintainer_email == 'foo@foo.com'
+
+ def test_make_option_lowercase(self, tmpdir):
+ # remove this test and the method make_option_lowercase() in setuptools.dist
+ # when no longer needed
+ fake_env(
+ tmpdir, '[metadata]\n' 'Name = foo\n' 'description = Some description\n'
+ )
+ msg = (
+ "Usage of uppercase key 'Name' in 'metadata' will be deprecated in "
+ "future versions. "
+ "Please use lowercase 'name' instead"
+ )
+ with pytest.warns(UserWarning, match=msg):
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+
+ assert metadata.name == 'foo'
+ assert metadata.description == 'Some description'
+
+
+class TestOptions:
+ def test_basic(self, tmpdir):
+
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'zip_safe = True\n'
+ 'include_package_data = yes\n'
+ 'package_dir = b=c, =src\n'
+ 'packages = pack_a, pack_b.subpack\n'
+ 'namespace_packages = pack1, pack2\n'
+ 'scripts = bin/one.py, bin/two.py\n'
+ 'eager_resources = bin/one.py, bin/two.py\n'
+ 'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
+ 'tests_require = mock==0.7.2; pytest\n'
+ 'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
+ 'dependency_links = http://some.com/here/1, '
+ 'http://some.com/there/2\n'
+ 'python_requires = >=1.0, !=2.8\n'
+ 'py_modules = module1, module2\n',
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.zip_safe
+ assert dist.include_package_data
+ assert dist.package_dir == {'': 'src', 'b': 'c'}
+ assert dist.packages == ['pack_a', 'pack_b.subpack']
+ assert dist.namespace_packages == ['pack1', 'pack2']
+ assert dist.scripts == ['bin/one.py', 'bin/two.py']
+ assert dist.dependency_links == (
+ ['http://some.com/here/1', 'http://some.com/there/2']
+ )
+ assert dist.install_requires == (
+ ['docutils>=0.3', 'pack==1.1,==1.3', 'hey']
+ )
+ assert dist.setup_requires == (
+ ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there']
+ )
+ assert dist.tests_require == ['mock==0.7.2', 'pytest']
+ assert dist.python_requires == '>=1.0, !=2.8'
+ assert dist.py_modules == ['module1', 'module2']
+
+ def test_multiline(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options]\n'
+ 'package_dir = \n'
+ ' b=c\n'
+ ' =src\n'
+ 'packages = \n'
+ ' pack_a\n'
+ ' pack_b.subpack\n'
+ 'namespace_packages = \n'
+ ' pack1\n'
+ ' pack2\n'
+ 'scripts = \n'
+ ' bin/one.py\n'
+ ' bin/two.py\n'
+ 'eager_resources = \n'
+ ' bin/one.py\n'
+ ' bin/two.py\n'
+ 'install_requires = \n'
+ ' docutils>=0.3\n'
+ ' pack ==1.1, ==1.3\n'
+ ' hey\n'
+ 'tests_require = \n'
+ ' mock==0.7.2\n'
+ ' pytest\n'
+ 'setup_requires = \n'
+ ' docutils>=0.3\n'
+ ' spack ==1.1, ==1.3\n'
+ ' there\n'
+ 'dependency_links = \n'
+ ' http://some.com/here/1\n'
+ ' http://some.com/there/2\n',
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.package_dir == {'': 'src', 'b': 'c'}
+ assert dist.packages == ['pack_a', 'pack_b.subpack']
+ assert dist.namespace_packages == ['pack1', 'pack2']
+ assert dist.scripts == ['bin/one.py', 'bin/two.py']
+ assert dist.dependency_links == (
+ ['http://some.com/here/1', 'http://some.com/there/2']
+ )
+ assert dist.install_requires == (
+ ['docutils>=0.3', 'pack==1.1,==1.3', 'hey']
+ )
+ assert dist.setup_requires == (
+ ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there']
+ )
+ assert dist.tests_require == ['mock==0.7.2', 'pytest']
+
+ def test_package_dir_fail(self, tmpdir):
+ fake_env(tmpdir, '[options]\n' 'package_dir = a b\n')
+ with get_dist(tmpdir, parse=False) as dist:
+ with pytest.raises(DistutilsOptionError):
+ dist.parse_config_files()
+
+ def test_package_data(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.package_data]\n'
+ '* = *.txt, *.rst\n'
+ 'hello = *.msg\n'
+ '\n'
+ '[options.exclude_package_data]\n'
+ '* = fake1.txt, fake2.txt\n'
+ 'hello = *.dat\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.package_data == {
+ '': ['*.txt', '*.rst'],
+ 'hello': ['*.msg'],
+ }
+ assert dist.exclude_package_data == {
+ '': ['fake1.txt', 'fake2.txt'],
+ 'hello': ['*.dat'],
+ }
+
+ def test_packages(self, tmpdir):
+ fake_env(tmpdir, '[options]\n' 'packages = find:\n')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.packages == ['fake_package']
+
+ def test_find_directive(self, tmpdir):
+ dir_package, config = fake_env(tmpdir, '[options]\n' 'packages = find:\n')
+
+ dir_sub_one, _ = make_package_dir('sub_one', dir_package)
+ dir_sub_two, _ = make_package_dir('sub_two', dir_package)
+
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == set(
+ ['fake_package', 'fake_package.sub_two', 'fake_package.sub_one']
+ )
+
+ config.write(
+ '[options]\n'
+ 'packages = find:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'where = .\n'
+ 'include =\n'
+ ' fake_package.sub_one\n'
+ ' two\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.packages == ['fake_package.sub_one']
+
+ config.write(
+ '[options]\n'
+ 'packages = find:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'exclude =\n'
+ ' fake_package.sub_one\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two'])
+
+ def test_find_namespace_directive(self, tmpdir):
+ dir_package, config = fake_env(
+ tmpdir, '[options]\n' 'packages = find_namespace:\n'
+ )
+
+ dir_sub_one, _ = make_package_dir('sub_one', dir_package)
+ dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True)
+
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == {
+ 'fake_package',
+ 'fake_package.sub_two',
+ 'fake_package.sub_one',
+ }
+
+ config.write(
+ '[options]\n'
+ 'packages = find_namespace:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'where = .\n'
+ 'include =\n'
+ ' fake_package.sub_one\n'
+ ' two\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.packages == ['fake_package.sub_one']
+
+ config.write(
+ '[options]\n'
+ 'packages = find_namespace:\n'
+ '\n'
+ '[options.packages.find]\n'
+ 'exclude =\n'
+ ' fake_package.sub_one\n'
+ )
+ with get_dist(tmpdir) as dist:
+ assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'}
+
+ def test_extras_require(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.extras_require]\n'
+ 'pdf = ReportLab>=1.2; RXP\n'
+ 'rest = \n'
+ ' docutils>=0.3\n'
+ ' pack ==1.1, ==1.3\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.extras_require == {
+ 'pdf': ['ReportLab>=1.2', 'RXP'],
+ 'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
+ }
+ assert dist.metadata.provides_extras == set(['pdf', 'rest'])
+
+ def test_dash_preserved_extras_require(self, tmpdir):
+ fake_env(tmpdir, '[options.extras_require]\n' 'foo-a = foo\n' 'foo_b = test\n')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
+
+ def test_entry_points(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[options.entry_points]\n'
+ 'group1 = point1 = pack.module:func, '
+ '.point2 = pack.module2:func_rest [rest]\n'
+ 'group2 = point3 = pack.module:func2\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.entry_points == {
+ 'group1': [
+ 'point1 = pack.module:func',
+ '.point2 = pack.module2:func_rest [rest]',
+ ],
+ 'group2': ['point3 = pack.module:func2'],
+ }
+
+ expected = (
+ '[blogtool.parsers]\n'
+ '.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
+ )
+
+ tmpdir.join('entry_points').write(expected)
+
+ # From file.
+ config.write('[options]\n' 'entry_points = file: entry_points\n')
+
+ with get_dist(tmpdir) as dist:
+ assert dist.entry_points == expected
+
+ def test_case_sensitive_entry_points(self, tmpdir):
+ _, config = fake_env(
+ tmpdir,
+ '[options.entry_points]\n'
+ 'GROUP1 = point1 = pack.module:func, '
+ '.point2 = pack.module2:func_rest [rest]\n'
+ 'group2 = point3 = pack.module:func2\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ assert dist.entry_points == {
+ 'GROUP1': [
+ 'point1 = pack.module:func',
+ '.point2 = pack.module2:func_rest [rest]',
+ ],
+ 'group2': ['point3 = pack.module:func2'],
+ }
+
+ def test_data_files(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.data_files]\n'
+ 'cfg =\n'
+ ' a/b.conf\n'
+ ' c/d.conf\n'
+ 'data = e/f.dat, g/h.dat\n',
+ )
+
+ with get_dist(tmpdir) as dist:
+ expected = [
+ ('cfg', ['a/b.conf', 'c/d.conf']),
+ ('data', ['e/f.dat', 'g/h.dat']),
+ ]
+ assert sorted(dist.data_files) == sorted(expected)
+
+ def test_data_files_globby(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[options.data_files]\n'
+ 'cfg =\n'
+ ' a/b.conf\n'
+ ' c/d.conf\n'
+ 'data = *.dat\n'
+ 'icons = \n'
+ ' *.ico\n'
+ 'audio = \n'
+ ' *.wav\n'
+ ' sounds.db\n'
+ )
+
+ # Create dummy files for glob()'s sake:
+ tmpdir.join('a.dat').write('')
+ tmpdir.join('b.dat').write('')
+ tmpdir.join('c.dat').write('')
+ tmpdir.join('a.ico').write('')
+ tmpdir.join('b.ico').write('')
+ tmpdir.join('c.ico').write('')
+ tmpdir.join('beep.wav').write('')
+ tmpdir.join('boop.wav').write('')
+ tmpdir.join('sounds.db').write('')
+
+ with get_dist(tmpdir) as dist:
+ expected = [
+ ('cfg', ['a/b.conf', 'c/d.conf']),
+ ('data', ['a.dat', 'b.dat', 'c.dat']),
+ ('icons', ['a.ico', 'b.ico', 'c.ico']),
+ ('audio', ['beep.wav', 'boop.wav', 'sounds.db']),
+ ]
+ assert sorted(dist.data_files) == sorted(expected)
+
+ def test_python_requires_simple(self, tmpdir):
+ fake_env(
+ tmpdir,
+ DALS(
+ """
+ [options]
+ python_requires=>=2.7
+ """
+ ),
+ )
+ with get_dist(tmpdir) as dist:
+ dist.parse_config_files()
+
+ def test_python_requires_compound(self, tmpdir):
+ fake_env(
+ tmpdir,
+ DALS(
+ """
+ [options]
+ python_requires=>=2.7,!=3.0.*
+ """
+ ),
+ )
+ with get_dist(tmpdir) as dist:
+ dist.parse_config_files()
+
+ def test_python_requires_invalid(self, tmpdir):
+ fake_env(
+ tmpdir,
+ DALS(
+ """
+ [options]
+ python_requires=invalid
+ """
+ ),
+ )
+ with pytest.raises(Exception):
+ with get_dist(tmpdir) as dist:
+ dist.parse_config_files()
+
+ def test_cmdclass(self, tmpdir):
+ module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
+ module_path.parent.mkdir(parents=True, exist_ok=True)
+ module_path.write_text(
+ "from distutils.core import Command\n"
+ "class CustomCmd(Command): pass\n"
+ )
+
+ setup_cfg = """
+ [options]
+ cmdclass =
+ customcmd = custom_build.CustomCmd
+ """
+ fake_env(tmpdir, inspect.cleandoc(setup_cfg))
+
+ with get_dist(tmpdir) as dist:
+ cmdclass = dist.cmdclass['customcmd']
+ assert cmdclass.__name__ == "CustomCmd"
+ assert cmdclass.__module__ == "custom_build"
+ assert module_path.samefile(inspect.getfile(cmdclass))
+
+
+saved_dist_init = _Distribution.__init__
+
+
+class TestExternalSetters:
+ # During creation of the setuptools Distribution() object, we call
+ # the init of the parent distutils Distribution object via
+ # _Distribution.__init__ ().
+ #
+ # It's possible distutils calls out to various keyword
+ # implementations (i.e. distutils.setup_keywords entry points)
+ # that may set a range of variables.
+ #
+ # This wraps distutil's Distribution.__init__ and simulates
+ # pbr or something else setting these values.
+ def _fake_distribution_init(self, dist, attrs):
+ saved_dist_init(dist, attrs)
+ # see self._DISTUTUILS_UNSUPPORTED_METADATA
+ setattr(dist.metadata, 'long_description_content_type', 'text/something')
+ # Test overwrite setup() args
+ setattr(
+ dist.metadata,
+ 'project_urls',
+ {
+ 'Link One': 'https://example.com/one/',
+ 'Link Two': 'https://example.com/two/',
+ },
+ )
+ return None
+
+ @patch.object(_Distribution, '__init__', autospec=True)
+ def test_external_setters(self, mock_parent_init, tmpdir):
+ mock_parent_init.side_effect = self._fake_distribution_init
+
+ dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}})
+
+ assert dist.metadata.long_description_content_type == 'text/something'
+ assert dist.metadata.project_urls == {
+ 'Link One': 'https://example.com/one/',
+ 'Link Two': 'https://example.com/two/',
+ }
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
index 535ae10..5894882 100644
--- a/setuptools/tests/contexts.py
+++ b/setuptools/tests/contexts.py
@@ -4,9 +4,10 @@ import shutil
import sys
import contextlib
import site
+import io
-from setuptools.extern import six
import pkg_resources
+from filelock import FileLock
@contextlib.contextmanager
@@ -58,8 +59,8 @@ def quiet():
old_stdout = sys.stdout
old_stderr = sys.stderr
- new_stdout = sys.stdout = six.StringIO()
- new_stderr = sys.stderr = six.StringIO()
+ new_stdout = sys.stdout = io.StringIO()
+ new_stderr = sys.stderr = io.StringIO()
try:
yield new_stdout, new_stderr
finally:
@@ -96,3 +97,29 @@ def suppress_exceptions(*excs):
yield
except excs:
pass
+
+
+def multiproc(request):
+ """
+ Return True if running under xdist and multiple
+ workers are used.
+ """
+ try:
+ worker_id = request.getfixturevalue('worker_id')
+ except Exception:
+ return False
+ return worker_id != 'master'
+
+
+@contextlib.contextmanager
+def session_locked_tmp_dir(request, tmp_path_factory, name):
+ """Uses a file lock to guarantee only one worker can access a temp dir"""
+ # get the temp directory shared by all workers
+ base = tmp_path_factory.getbasetemp()
+ shared_dir = base.parent if multiproc(request) else base
+
+ locked_dir = shared_dir / name
+ with FileLock(locked_dir.with_suffix(".lock")):
+ # ^-- prevent multiple workers to access the directory at once
+ locked_dir.mkdir(exist_ok=True, parents=True)
+ yield locked_dir
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index c67898c..bcf2960 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -1,9 +1,38 @@
import os
import sys
+import subprocess
import unicodedata
-
from subprocess import Popen as _Popen, PIPE as _PIPE
+import jaraco.envs
+
+
+class VirtualEnv(jaraco.envs.VirtualEnv):
+ name = '.env'
+ # Some version of PyPy will import distutils on startup, implicitly
+ # importing setuptools, and thus leading to BackendInvalid errors
+ # when upgrading Setuptools. Bypass this behavior by avoiding the
+ # early availability and need to upgrade.
+ create_opts = ['--no-setuptools']
+
+ def run(self, cmd, *args, **kwargs):
+ cmd = [self.exe(cmd[0])] + cmd[1:]
+ kwargs = {"cwd": self.root, **kwargs} # Allow overriding
+ # In some environments (eg. downstream distro packaging), where:
+ # - tox isn't used to run tests and
+ # - PYTHONPATH is set to point to a specific setuptools codebase and
+ # - no custom env is explicitly set by a test
+ # PYTHONPATH will leak into the spawned processes.
+ # In that case tests look for module in the wrong place (on PYTHONPATH).
+ # Unless the test sets its own special env, pass a copy of the existing
+ # environment with removed PYTHONPATH to the subprocesses.
+ if "env" not in kwargs:
+ env = dict(os.environ)
+ if "PYTHONPATH" in env:
+ del env["PYTHONPATH"]
+ kwargs["env"] = env
+ return subprocess.check_output(cmd, *args, **kwargs)
+
def _which_dirs(cmd):
result = set()
@@ -29,7 +58,7 @@ def run_setup_py(cmd, pypath=None, path=None,
if pypath is not None:
env["PYTHONPATH"] = pypath
- # overide the execution path if needed
+ # override the execution path if needed
if path is not None:
env["PATH"] = path
if not env.get("PATH", ""):
@@ -46,6 +75,8 @@ def run_setup_py(cmd, pypath=None, path=None,
cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env,
)
+ if isinstance(data_stream, tuple):
+ data_stream = slice(*data_stream)
data = proc.communicate()[data_stream]
except OSError:
return 1, ''
diff --git a/setuptools/tests/files.py b/setuptools/tests/files.py
deleted file mode 100644
index f5f0e6b..0000000
--- a/setuptools/tests/files.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import os
-
-
-from setuptools.extern.six import binary_type
-import pkg_resources.py31compat
-
-
-def build_files(file_defs, prefix=""):
- """
- Build a set of files/directories, as described by the file_defs dictionary.
-
- Each key/value pair in the dictionary is interpreted as a filename/contents
- pair. If the contents value is a dictionary, a directory is created, and the
- dictionary interpreted as the files within it, recursively.
-
- For example:
-
- {"README.txt": "A README file",
- "foo": {
- "__init__.py": "",
- "bar": {
- "__init__.py": "",
- },
- "baz.py": "# Some code",
- }
- }
- """
- for name, contents in file_defs.items():
- full_name = os.path.join(prefix, name)
- if isinstance(contents, dict):
- pkg_resources.py31compat.makedirs(full_name, exist_ok=True)
- build_files(contents, prefix=full_name)
- else:
- if isinstance(contents, binary_type):
- with open(full_name, 'wb') as f:
- f.write(contents)
- else:
- with open(full_name, 'w') as f:
- f.write(contents)
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index 5204c8d..25ab49f 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -1,9 +1,16 @@
+import os
+import contextlib
+import sys
+import subprocess
+from pathlib import Path
+
import pytest
+import path
-from . import contexts
+from . import contexts, environment
-@pytest.yield_fixture
+@pytest.fixture
def user_override(monkeypatch):
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
@@ -17,7 +24,117 @@ def user_override(monkeypatch):
yield
-@pytest.yield_fixture
+@pytest.fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig
+
+
+@pytest.fixture(autouse=True, scope="session")
+def workaround_xdist_376(request):
+ """
+ Workaround pytest-dev/pytest-xdist#376
+
+ ``pytest-xdist`` tends to inject '' into ``sys.path``,
+ which may break certain isolation expectations.
+ Remove the entry so the import
+ machinery behaves the same irrespective of xdist.
+ """
+ if not request.config.pluginmanager.has_plugin('xdist'):
+ return
+
+ with contextlib.suppress(ValueError):
+ sys.path.remove('')
+
+
+@pytest.fixture
+def sample_project(tmp_path):
+ """
+ Clone the 'sampleproject' and return a path to it.
+ """
+ cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
+ try:
+ subprocess.check_call(cmd, cwd=str(tmp_path))
+ except Exception:
+ pytest.skip("Unable to clone sampleproject")
+ return tmp_path / 'sampleproject'
+
+
+# sdist and wheel artifacts should be stable across a round of tests
+# so we can build them once per session and use the files as "readonly"
+
+
+@pytest.fixture(scope="session")
+def setuptools_sdist(tmp_path_factory, request):
+ if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"):
+ return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve()
+
+ with contexts.session_locked_tmp_dir(
+ request, tmp_path_factory, "sdist_build") as tmp:
+ dist = next(tmp.glob("*.tar.gz"), None)
+ if dist:
+ return dist
+
+ subprocess.check_call([
+ sys.executable, "-m", "build", "--sdist",
+ "--outdir", str(tmp), str(request.config.rootdir)
+ ])
+ return next(tmp.glob("*.tar.gz"))
+
+
+@pytest.fixture(scope="session")
+def setuptools_wheel(tmp_path_factory, request):
+ if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"):
+ return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve()
+
+ with contexts.session_locked_tmp_dir(
+ request, tmp_path_factory, "wheel_build") as tmp:
+ dist = next(tmp.glob("*.whl"), None)
+ if dist:
+ return dist
+
+ subprocess.check_call([
+ sys.executable, "-m", "build", "--wheel",
+ "--outdir", str(tmp) , str(request.config.rootdir)
+ ])
+ return next(tmp.glob("*.whl"))
+
+
+@pytest.fixture
+def venv(tmp_path, setuptools_wheel):
+ """Virtual env with the version of setuptools under test installed"""
+ env = environment.VirtualEnv()
+ env.root = path.Path(tmp_path / 'venv')
+ env.req = str(setuptools_wheel)
+ # In some environments (eg. downstream distro packaging),
+ # where tox isn't used to run tests and PYTHONPATH is set to point to
+ # a specific setuptools codebase, PYTHONPATH will leak into the spawned
+ # processes.
+ # env.create() should install the just created setuptools
+ # wheel, but it doesn't if it finds another existing matching setuptools
+ # installation present on PYTHONPATH:
+ # `setuptools is already installed with the same version as the provided
+ # wheel. Use --force-reinstall to force an installation of the wheel.`
+ # This prevents leaking PYTHONPATH to the created environment.
+ with contexts.environment(PYTHONPATH=None):
+ return env.create()
+
+
+@pytest.fixture
+def venv_without_setuptools(tmp_path):
+ """Virtual env without any version of setuptools installed"""
+ env = environment.VirtualEnv()
+ env.root = path.Path(tmp_path / 'venv_without_setuptools')
+ env.create_opts = ['--no-setuptools']
+ env.ensure_env()
+ return env
+
+
+@pytest.fixture
+def bare_venv(tmp_path):
+ """Virtual env without any common packages installed"""
+ env = environment.VirtualEnv()
+ env.root = path.Path(tmp_path / 'bare_venv')
+ env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
+ env.ensure_env()
+ return env
diff --git a/setuptools/tests/integration/__init__.py b/setuptools/tests/integration/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setuptools/tests/integration/__init__.py
diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py
new file mode 100644
index 0000000..24c02be
--- /dev/null
+++ b/setuptools/tests/integration/helpers.py
@@ -0,0 +1,75 @@
+"""Reusable functions and classes for different types of integration tests.
+
+For example ``Archive`` can be used to check the contents of distribution built
+with setuptools, and ``run`` will always try to be as verbose as possible to
+facilitate debugging.
+"""
+import os
+import subprocess
+import tarfile
+from zipfile import ZipFile
+from pathlib import Path
+
+
+def run(cmd, env=None):
+ r = subprocess.run(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ env={**os.environ, **(env or {})}
+ # ^-- allow overwriting instead of discarding the current env
+ )
+
+ out = r.stdout + "\n" + r.stderr
+ # pytest omits stdout/err by default, if the test fails they help debugging
+ print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+ print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}")
+
+ if r.returncode == 0:
+ return out
+ raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr)
+
+
+class Archive:
+ """Compatibility layer for ZipFile/Info and TarFile/Info"""
+ def __init__(self, filename):
+ self._filename = filename
+ if filename.endswith("tar.gz"):
+ self._obj = tarfile.open(filename, "r:gz")
+ elif filename.endswith("zip"):
+ self._obj = ZipFile(filename)
+ else:
+ raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz")
+
+ def __iter__(self):
+ if hasattr(self._obj, "infolist"):
+ return iter(self._obj.infolist())
+ return iter(self._obj)
+
+ def get_name(self, zip_or_tar_info):
+ if hasattr(zip_or_tar_info, "filename"):
+ return zip_or_tar_info.filename
+ return zip_or_tar_info.name
+
+ def get_content(self, zip_or_tar_info):
+ if hasattr(self._obj, "extractfile"):
+ content = self._obj.extractfile(zip_or_tar_info)
+ if content is None:
+ msg = f"Invalid {zip_or_tar_info.name} in {self._filename}"
+ raise ValueError(msg)
+ return str(content.read(), "utf-8")
+ return str(self._obj.read(zip_or_tar_info), "utf-8")
+
+
+def get_sdist_members(sdist_path):
+ with tarfile.open(sdist_path, "r:gz") as tar:
+ files = [Path(f) for f in tar.getnames()]
+ # remove root folder
+ relative_files = ("/".join(f.parts[1:]) for f in files)
+ return {f for f in relative_files if f}
+
+
+def get_wheel_members(wheel_path):
+ with ZipFile(wheel_path) as zipfile:
+ return set(zipfile.namelist())
diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
new file mode 100644
index 0000000..9d11047
--- /dev/null
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -0,0 +1,219 @@
+"""Integration tests for setuptools that focus on building packages via pip.
+
+The idea behind these tests is not to exhaustively check all the possible
+combinations of packages, operating systems, supporting libraries, etc, but
+rather check a limited number of popular packages and how they interact with
+the exposed public API. This way if any change in API is introduced, we hope to
+identify backward compatibility problems before publishing a release.
+
+The number of tested packages is purposefully kept small, to minimise duration
+and the associated maintenance cost (changes in the way these packages define
+their build process may require changes in the tests).
+"""
+import json
+import os
+import shutil
+import sys
+from enum import Enum
+from glob import glob
+from hashlib import md5
+from urllib.request import urlopen
+
+import pytest
+from packaging.requirements import Requirement
+
+from .helpers import Archive, run
+
+
+pytestmark = pytest.mark.integration
+
+LATEST, = list(Enum("v", "LATEST"))
+"""Default version to be checked"""
+# There are positive and negative aspects of checking the latest version of the
+# packages.
+# The main positive aspect is that the latest version might have already
+# removed the use of APIs deprecated in previous releases of setuptools.
+
+
+# Packages to be tested:
+# (Please notice the test environment cannot support EVERY library required for
+# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume
+# that `build-essential`, `gfortran` and `libopenblas-dev` are installed,
+# due to their relevance to the numerical/scientific programming ecosystem)
+EXAMPLES = [
+ ("pandas", LATEST), # cython + custom build_ext
+ ("sphinx", LATEST), # custom setup.py
+ ("pip", LATEST), # just in case...
+ ("pytest", LATEST), # uses setuptools_scm
+ ("mypy", LATEST), # custom build_py + ext_modules
+
+ # --- Popular packages: https://hugovk.github.io/top-pypi-packages/ ---
+ ("botocore", LATEST),
+ ("kiwisolver", "1.3.2"), # build_ext, version pinned due to setup_requires
+ ("brotli", LATEST), # not in the list but used by urllib3
+
+ # When adding packages to this list, make sure they expose a `__version__`
+ # attribute, or modify the tests below
+]
+
+
+# Some packages have "optional" dependencies that modify their build behaviour
+# and are not listed in pyproject.toml, others still use `setup_requires`
+EXTRA_BUILD_DEPS = {
+ "sphinx": ("babel>=1.3",),
+ "kiwisolver": ("cppy>=1.1.0",)
+}
+
+
+VIRTUALENV = (sys.executable, "-m", "virtualenv")
+
+
+# By default, pip will try to build packages in isolation (PEP 517), which
+# means it will download the previous stable version of setuptools.
+# `pip` flags can avoid that (the version of setuptools under test
+# should be the one to be used)
+SDIST_OPTIONS = (
+ "--ignore-installed",
+ "--no-build-isolation",
+ # We don't need "--no-binary :all:" since we specify the path to the sdist.
+ # It also helps with performance, since dependencies can come from wheels.
+)
+# The downside of `--no-build-isolation` is that pip will not download build
+# dependencies. The test script will have to also handle that.
+
+
+@pytest.fixture
+def venv_python(tmp_path):
+ run([*VIRTUALENV, str(tmp_path / ".venv")])
+ possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*"))
+ return shutil.which("python", path=os.pathsep.join(possible_path))
+
+
+@pytest.fixture(autouse=True)
+def _prepare(tmp_path, venv_python, monkeypatch, request):
+ download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
+ os.makedirs(download_path, exist_ok=True)
+
+ # Environment vars used for building some of the packages
+ monkeypatch.setenv("USE_MYPYC", "1")
+
+ def _debug_info():
+ # Let's provide the maximum amount of information possible in the case
+ # it is necessary to debug the tests directly from the CI logs.
+ print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+ print("Temporary directory:")
+ map(print, tmp_path.glob("*"))
+ print("Virtual environment:")
+ run([venv_python, "-m", "pip", "freeze"])
+ request.addfinalizer(_debug_info)
+
+
+ALREADY_LOADED = ("pytest", "mypy") # loaded by pytest/pytest-enabler
+
+
+@pytest.mark.parametrize('package, version', EXAMPLES)
+@pytest.mark.uses_network
+def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
+ venv_pip = (venv_python, "-m", "pip")
+ sdist = retrieve_sdist(package, version, tmp_path)
+ deps = build_deps(package, sdist)
+ if deps:
+ print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+ print("Dependencies:", deps)
+ run([*venv_pip, "install", *deps])
+
+ # Use a virtualenv to simulate PEP 517 isolation
+ # but install fresh setuptools wheel to ensure the version under development
+ run([*venv_pip, "install", "-I", setuptools_wheel])
+ run([*venv_pip, "install", *SDIST_OPTIONS, sdist])
+
+ # Execute a simple script to make sure the package was installed correctly
+ script = f"import {package}; print(getattr({package}, '__version__', 0))"
+ run([venv_python, "-c", script])
+
+
+# ---- Helper Functions ----
+
+
+def retrieve_sdist(package, version, tmp_path):
+ """Either use cached sdist file or download it from PyPI"""
+ # `pip download` cannot be used due to
+ # https://github.com/pypa/pip/issues/1884
+ # https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686
+ # We have to find the correct distribution file and download it
+ download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
+ dist = retrieve_pypi_sdist_metadata(package, version)
+
+ # Remove old files to prevent cache to grow indefinitely
+ for file in glob(os.path.join(download_path, f"{package}*")):
+ if dist["filename"] != file:
+ os.unlink(file)
+
+ dist_file = os.path.join(download_path, dist["filename"])
+ if not os.path.exists(dist_file):
+ download(dist["url"], dist_file, dist["md5_digest"])
+ return dist_file
+
+
+def retrieve_pypi_sdist_metadata(package, version):
+ # https://warehouse.pypa.io/api-reference/json.html
+ id_ = package if version is LATEST else f"{package}/{version}"
+ with urlopen(f"https://pypi.org/pypi/{id_}/json") as f:
+ metadata = json.load(f)
+
+ if metadata["info"]["yanked"]:
+ raise ValueError(f"Release for {package} {version} was yanked")
+
+ version = metadata["info"]["version"]
+ release = metadata["releases"][version]
+ dists = [d for d in release if d["packagetype"] == "sdist"]
+ if len(dists) == 0:
+ raise ValueError(f"No sdist found for {package} {version}")
+
+ for dist in dists:
+ if dist["filename"].endswith(".tar.gz"):
+ return dist
+
+ # Not all packages are publishing tar.gz
+ return dist
+
+
+def download(url, dest, md5_digest):
+ with urlopen(url) as f:
+ data = f.read()
+
+ assert md5(data).hexdigest() == md5_digest
+
+ with open(dest, "wb") as f:
+ f.write(data)
+
+ assert os.path.exists(dest)
+
+
+def build_deps(package, sdist_file):
+ """Find out what are the build dependencies for a package.
+
+ We need to "manually" install them, since pip will not install build
+ deps with `--no-build-isolation`.
+ """
+ import tomli as toml
+
+ # delay importing, since pytest discovery phase may hit this file from a
+ # testenv without tomli
+
+ archive = Archive(sdist_file)
+ pyproject = _read_pyproject(archive)
+
+ info = toml.loads(pyproject)
+ deps = info.get("build-system", {}).get("requires", [])
+ deps += EXTRA_BUILD_DEPS.get(package, [])
+ # Remove setuptools from requirements (and deduplicate)
+ requirements = {Requirement(d).name: d for d in deps}
+ return [v for k, v in requirements.items() if k != "setuptools"]
+
+
+def _read_pyproject(archive):
+ for member in archive:
+ if os.path.basename(archive.get_name(member)) == "pyproject.toml":
+ return archive.get_content(member)
+ return ""
diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py
index ef5ecda..245cf8e 100644
--- a/setuptools/tests/namespaces.py
+++ b/setuptools/tests/namespaces.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, unicode_literals
-
import textwrap
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
index 3531212..6717c05 100644
--- a/setuptools/tests/server.py
+++ b/setuptools/tests/server.py
@@ -1,13 +1,15 @@
"""Basic http server for tests to simulate PyPI or custom indexes
"""
+import os
import time
import threading
+import http.server
+import urllib.parse
+import urllib.request
-from setuptools.extern.six.moves import BaseHTTPServer, SimpleHTTPServer
-
-class IndexServer(BaseHTTPServer.HTTPServer):
+class IndexServer(http.server.HTTPServer):
"""Basic single-threaded http server simulating a package index
You can use this server in unittest like this::
@@ -19,10 +21,11 @@ class IndexServer(BaseHTTPServer.HTTPServer):
s.stop()
"""
- def __init__(self, server_address=('', 0),
- RequestHandlerClass=SimpleHTTPServer.SimpleHTTPRequestHandler):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
+ def __init__(
+ self, server_address=('', 0),
+ RequestHandlerClass=http.server.SimpleHTTPRequestHandler):
+ http.server.HTTPServer.__init__(
+ self, server_address, RequestHandlerClass)
self._run = True
def start(self):
@@ -44,29 +47,44 @@ class IndexServer(BaseHTTPServer.HTTPServer):
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
-class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler):
+class RequestRecorder(http.server.BaseHTTPRequestHandler):
def do_GET(self):
requests = vars(self.server).setdefault('requests', [])
requests.append(self)
self.send_response(200, 'OK')
-class MockServer(BaseHTTPServer.HTTPServer, threading.Thread):
+class MockServer(http.server.HTTPServer, threading.Thread):
"""
A simple HTTP Server that records the requests made to it.
"""
- def __init__(self, server_address=('', 0),
+ def __init__(
+ self, server_address=('', 0),
RequestHandlerClass=RequestRecorder):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
+ http.server.HTTPServer.__init__(
+ self, server_address, RequestHandlerClass)
threading.Thread.__init__(self)
- self.setDaemon(True)
+ self.daemon = True
self.requests = []
def run(self):
self.serve_forever()
@property
+ def netloc(self):
+ return 'localhost:%s' % self.server_port
+
+ @property
def url(self):
- return 'http://localhost:%(server_port)s/' % vars(self)
+ return 'http://%s/' % self.netloc
+
+
+def path_to_url(path, authority=None):
+ """ Convert a path to a file: URL. """
+ path = os.path.normpath(os.path.abspath(path))
+ base = 'file:'
+ if authority is not None:
+ base += '//' + authority
+ url = urllib.parse.urljoin(base, urllib.request.pathname2url(path))
+ return url
diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py
index b789e9a..7f99624 100644
--- a/setuptools/tests/test_archive_util.py
+++ b/setuptools/tests/test_archive_util.py
@@ -3,8 +3,6 @@
import tarfile
import io
-from setuptools.extern import six
-
import pytest
from setuptools import archive_util
@@ -22,8 +20,6 @@ def tarfile_with_unicode(tmpdir):
data = b""
filename = "testimäge.png"
- if six.PY2:
- filename = filename.decode('utf-8')
t = tarfile.TarInfo(filename)
t.size = len(data)
@@ -39,4 +35,4 @@ def tarfile_with_unicode(tmpdir):
@pytest.mark.xfail(reason="#710 and #712")
def test_unicode_files(tarfile_with_unicode, tmpdir):
target = tmpdir / 'out'
- archive_util.unpack_archive(tarfile_with_unicode, six.text_type(target))
+ archive_util.unpack_archive(tarfile_with_unicode, str(target))
diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py
new file mode 100644
index 0000000..1a900c6
--- /dev/null
+++ b/setuptools/tests/test_bdist_deprecations.py
@@ -0,0 +1,27 @@
+"""develop tests
+"""
+import mock
+import sys
+
+import pytest
+
+from setuptools.dist import Distribution
+from setuptools import SetuptoolsDeprecationWarning
+
+
+@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
+@mock.patch('distutils.command.bdist_rpm.bdist_rpm')
+def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd):
+ dist = Distribution(
+ dict(
+ script_name='setup.py',
+ script_args=['bdist_rpm'],
+ name='foo',
+ py_modules=['hi'],
+ )
+ )
+ dist.parse_command_line()
+ with pytest.warns(SetuptoolsDeprecationWarning):
+ dist.run_commands()
+
+ distutils_cmd.run.assert_called_once()
diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py
index 54742aa..67f788c 100644
--- a/setuptools/tests/test_bdist_egg.py
+++ b/setuptools/tests/test_bdist_egg.py
@@ -13,7 +13,7 @@ from . import contexts
SETUP_PY = """\
from setuptools import setup
-setup(name='foo', py_modules=['hi'])
+setup(py_modules=['hi'])
"""
@@ -42,7 +42,7 @@ class Test:
# let's see if we got our egg link at the right place
[content] = os.listdir('dist')
- assert re.match(r'foo-0.0.0-py[23].\d.egg$', content)
+ assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
@pytest.mark.xfail(
os.environ.get('PYTHONDONTWRITEBYTECODE'),
@@ -52,7 +52,6 @@ class Test:
dist = Distribution(dict(
script_name='setup.py',
script_args=['bdist_egg', '--exclude-source-files'],
- name='foo',
py_modules=['hi'],
))
with contexts.quiet():
diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py
index aebcc35..48bea2b 100644
--- a/setuptools/tests/test_build_clib.py
+++ b/setuptools/tests/test_build_clib.py
@@ -1,6 +1,4 @@
import pytest
-import os
-import shutil
import mock
from distutils.errors import DistutilsSetupError
@@ -10,8 +8,7 @@ from setuptools.dist import Distribution
class TestBuildCLib:
@mock.patch(
- 'setuptools.command.build_clib.newer_pairwise_group'
- )
+ 'setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
@@ -40,13 +37,14 @@ class TestBuildCLib:
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
- mock_newer.return_value = ([],[])
+ mock_newer.return_value = ([], [])
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
- libs = [('example', {'sources': ['example.c'] ,'obj_deps': obj_deps})]
+ libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
- assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
+ assert [['example.c', 'global.h', 'example.h']] in \
+ mock_newer.call_args[0]
assert not cmd.compiler.compile.called
assert cmd.compiler.create_static_lib.call_count == 1
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 6025715..3177a2c 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -1,13 +1,20 @@
+import os
import sys
import distutils.command.build_ext as orig
from distutils.sysconfig import get_config_var
-from setuptools.extern import six
+from jaraco import path
from setuptools.command.build_ext import build_ext, get_abi3_suffix
from setuptools.dist import Distribution
from setuptools.extension import Extension
+from . import environment
+from .textwrap import DALS
+
+
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
class TestBuildExt:
def test_get_ext_filename(self):
@@ -37,9 +44,107 @@ class TestBuildExt:
assert 'spam.eggs' in cmd.ext_map
res = cmd.get_ext_filename('spam.eggs')
- if six.PY2 or not get_abi3_suffix():
- assert res.endswith(get_config_var('SO'))
+ if not get_abi3_suffix():
+ assert res.endswith(get_config_var('EXT_SUFFIX'))
elif sys.platform == 'win32':
assert res.endswith('eggs.pyd')
else:
assert 'abi3' in res
+
+ def test_ext_suffix_override(self):
+ """
+ SETUPTOOLS_EXT_SUFFIX variable always overrides
+ default extension options.
+ """
+ dist = Distribution()
+ cmd = build_ext(dist)
+ cmd.ext_map['for_abi3'] = ext = Extension(
+ 'for_abi3',
+ ['s.c'],
+ # Override shouldn't affect abi3 modules
+ py_limited_api=True,
+ )
+ # Mock value needed to pass tests
+ ext._links_to_dynamic = False
+
+ if not IS_PYPY:
+ expect = cmd.get_ext_filename('for_abi3')
+ else:
+ # PyPy builds do not use ABI3 tag, so they will
+ # also get the overridden suffix.
+ expect = 'for_abi3.test-suffix'
+
+ try:
+ os.environ['SETUPTOOLS_EXT_SUFFIX'] = '.test-suffix'
+ res = cmd.get_ext_filename('normal')
+ assert 'normal.test-suffix' == res
+ res = cmd.get_ext_filename('for_abi3')
+ assert expect == res
+ finally:
+ del os.environ['SETUPTOOLS_EXT_SUFFIX']
+
+
+def test_build_ext_config_handling(tmpdir_cwd):
+ files = {
+ 'setup.py': DALS(
+ """
+ from setuptools import Extension, setup
+ setup(
+ name='foo',
+ version='0.0.0',
+ ext_modules=[Extension('foo', ['foo.c'])],
+ )
+ """),
+ 'foo.c': DALS(
+ """
+ #include "Python.h"
+
+ #if PY_MAJOR_VERSION >= 3
+
+ static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "foo",
+ NULL,
+ 0,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ NULL
+ };
+
+ #define INITERROR return NULL
+
+ PyMODINIT_FUNC PyInit_foo(void)
+
+ #else
+
+ #define INITERROR return
+
+ void initfoo(void)
+
+ #endif
+ {
+ #if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&moduledef);
+ #else
+ PyObject *module = Py_InitModule("extension", NULL);
+ #endif
+ if (module == NULL)
+ INITERROR;
+ #if PY_MAJOR_VERSION >= 3
+ return module;
+ #endif
+ }
+ """),
+ 'setup.cfg': DALS(
+ """
+ [build]
+ build_base = foo_build
+ """),
+ }
+ path.build(files)
+ code, output = environment.run_setup_py(
+ cmd=['build'], data_stream=(0, 2),
+ )
+ assert code == 0, '\nSTDOUT:\n%s\nSTDERR:\n%s' % output
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 659c1a6..36940e7 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -1,17 +1,35 @@
import os
+import sys
+import shutil
+import signal
+import tarfile
+import importlib
+import contextlib
+from concurrent import futures
+import re
+from zipfile import ZipFile
import pytest
+from jaraco import path
-from .files import build_files
from .textwrap import DALS
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
-futures = pytest.importorskip('concurrent.futures')
-importlib = pytest.importorskip('importlib')
+TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
+IS_PYPY = '__pypy__' in sys.builtin_module_names
-class BuildBackendBase(object):
- def __init__(self, cwd=None, env={}, backend_name='setuptools.build_meta'):
+
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32" and IS_PYPY,
+ reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
+ "is flaky and problematic"
+)
+
+
+class BuildBackendBase:
+ def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'):
self.cwd = cwd
self.env = env
self.backend_name = backend_name
@@ -19,108 +37,730 @@ class BuildBackendBase(object):
class BuildBackend(BuildBackendBase):
"""PEP 517 Build Backend"""
+
def __init__(self, *args, **kwargs):
super(BuildBackend, self).__init__(*args, **kwargs)
- self.pool = futures.ProcessPoolExecutor()
+ self.pool = futures.ProcessPoolExecutor(max_workers=1)
def __getattr__(self, name):
"""Handles aribrary function invocations on the build backend."""
+
def method(*args, **kw):
root = os.path.abspath(self.cwd)
caller = BuildBackendCaller(root, self.env, self.backend_name)
- return self.pool.submit(caller, name, *args, **kw).result()
+ pid = None
+ try:
+ pid = self.pool.submit(os.getpid).result(TIMEOUT)
+ return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
+ except futures.TimeoutError:
+ self.pool.shutdown(wait=False) # doesn't stop already running processes
+ self._kill(pid)
+ pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
+ except (futures.process.BrokenProcessPool, MemoryError, OSError):
+ if IS_PYPY:
+ pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
+ raise
return method
+ def _kill(self, pid):
+ if pid is None:
+ return
+ with contextlib.suppress(ProcessLookupError, OSError):
+ os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
+
class BuildBackendCaller(BuildBackendBase):
+ def __init__(self, *args, **kwargs):
+ super(BuildBackendCaller, self).__init__(*args, **kwargs)
+
+ (self.backend_name, _,
+ self.backend_obj) = self.backend_name.partition(':')
+
def __call__(self, name, *args, **kw):
"""Handles aribrary function invocations on the build backend."""
os.chdir(self.cwd)
os.environ.update(self.env)
mod = importlib.import_module(self.backend_name)
- return getattr(mod, name)(*args, **kw)
+ if self.backend_obj:
+ backend = getattr(mod, self.backend_obj)
+ else:
+ backend = mod
-defns = [{
- 'setup.py': DALS("""
+ return getattr(backend, name)(*args, **kw)
+
+
+defns = [
+ { # simple setup.py script
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ },
+ { # setup.py that relies on __name__
+ 'setup.py': DALS("""
+ assert __name__ == '__main__'
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ },
+ { # setup.py script that runs arbitrary code
+ 'setup.py': DALS("""
+ variable = True
+ def function():
+ return variable
+ assert variable
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello'],
+ setup_requires=['six'],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ },
+ { # setup.py script that constructs temp files to be included in the distribution
+ 'setup.py': DALS("""
+ # Some packages construct files on the fly, include them in the package,
+ # and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
+ # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
+ # to obtain a distribution object first, and then run the distutils
+ # commands later, because these files will be removed in the meantime.
+
+ with open('world.py', 'w') as f:
+ f.write('x = 42')
+
+ try:
__import__('setuptools').setup(
name='foo',
- py_modules=['hello'],
+ version='0.0.0',
+ py_modules=['world'],
setup_requires=['six'],
)
+ finally:
+ # Some packages will clean temporary files
+ __import__('os').unlink('world.py')
+ """),
+ },
+ { # setup.cfg only
+ 'setup.cfg': DALS("""
+ [metadata]
+ name = foo
+ version = 0.0.0
+
+ [options]
+ py_modules=hello
+ setup_requires=six
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """)
+ },
+ { # setup.cfg and setup.py
+ 'setup.cfg': DALS("""
+ [metadata]
+ name = foo
+ version = 0.0.0
+
+ [options]
+ py_modules=hello
+ setup_requires=six
+ """),
+ 'setup.py': "__import__('setuptools').setup()",
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """)
+ },
+]
+
+
+class TestBuildMetaBackend:
+ backend_name = 'setuptools.build_meta'
+
+ def get_build_backend(self):
+ return BuildBackend(backend_name=self.backend_name)
+
+ @pytest.fixture(params=defns)
+ def build_backend(self, tmpdir, request):
+ path.build(request.param, prefix=str(tmpdir))
+ with tmpdir.as_cwd():
+ yield self.get_build_backend()
+
+ def test_get_requires_for_build_wheel(self, build_backend):
+ actual = build_backend.get_requires_for_build_wheel()
+ expected = ['six', 'wheel']
+ assert sorted(actual) == sorted(expected)
+
+ def test_get_requires_for_build_sdist(self, build_backend):
+ actual = build_backend.get_requires_for_build_sdist()
+ expected = ['six']
+ assert sorted(actual) == sorted(expected)
+
+ def test_build_wheel(self, build_backend):
+ dist_dir = os.path.abspath('pip-wheel')
+ os.makedirs(dist_dir)
+ wheel_name = build_backend.build_wheel(dist_dir)
+
+ wheel_file = os.path.join(dist_dir, wheel_name)
+ assert os.path.isfile(wheel_file)
+
+ # Temporary files should be removed
+ assert not os.path.isfile('world.py')
+
+ with ZipFile(wheel_file) as zipfile:
+ wheel_contents = set(zipfile.namelist())
+
+ # Each one of the examples have a single module
+ # that should be included in the distribution
+ python_scripts = (f for f in wheel_contents if f.endswith('.py'))
+ modules = [f for f in python_scripts if not f.endswith('setup.py')]
+ assert len(modules) == 1
+
+ @pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
+ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
+ # Building a sdist/wheel should still succeed if there's
+ # already a sdist/wheel in the destination directory.
+ files = {
+ 'setup.py': "from setuptools import setup\nsetup()",
+ 'VERSION': "0.0.1",
+ 'setup.cfg': DALS("""
+ [metadata]
+ name = foo
+ version = file: VERSION
+ """),
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+ """),
+ }
+
+ path.build(files)
+
+ dist_dir = os.path.abspath('preexisting-' + build_type)
+
+ build_backend = self.get_build_backend()
+ build_method = getattr(build_backend, 'build_' + build_type)
+
+ # Build a first sdist/wheel.
+ # Note: this also check the destination directory is
+ # successfully created if it does not exist already.
+ first_result = build_method(dist_dir)
+
+ # Change version.
+ with open("VERSION", "wt") as version_file:
+ version_file.write("0.0.2")
+
+ # Build a *second* sdist/wheel.
+ second_result = build_method(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, first_result))
+ assert first_result != second_result
+
+ # And if rebuilding the exact same sdist/wheel?
+ open(os.path.join(dist_dir, second_result), 'w').close()
+ third_result = build_method(dist_dir)
+ assert third_result == second_result
+ assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
+
+ @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
+ def test_build_with_pyproject_config(self, tmpdir, setup_script):
+ files = {
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "foo"
+ license = {text = "MIT"}
+ description = "This is a Python package"
+ dynamic = ["version", "readme"]
+ classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers"
+ ]
+ urls = {Homepage = "http://github.com"}
+ dependencies = [
+ "appdirs",
+ ]
+
+ [project.optional-dependencies]
+ all = [
+ "tomli>=1",
+ "pyscaffold>=4,<5",
+ 'importlib; python_version == "2.6"',
+ ]
+
+ [project.scripts]
+ foo = "foo.cli:main"
+
+ [tool.setuptools]
+ zip-safe = false
+ package-dir = {"" = "src"}
+ packages = {find = {where = ["src"]}}
+ license-files = ["LICENSE*"]
+
+ [tool.setuptools.dynamic]
+ version = {attr = "foo.__version__"}
+ readme = {file = "README.rst"}
+
+ [tool.distutils.sdist]
+ formats = "gztar"
+
+ [tool.distutils.bdist_wheel]
+ universal = true
+ """),
+ "MANIFEST.in": DALS("""
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ """),
+ "README.rst": "This is a ``README``",
+ "LICENSE.txt": "---- placeholder MIT license ----",
+ "src": {
+ "foo": {
+ "__init__.py": "__version__ = '0.1'",
+ "cli.py": "def main(): print('hello world')",
+ "data.txt": "def main(): print('hello world')",
+ }
+ }
+ }
+ if setup_script:
+ files["setup.py"] = setup_script
+
+ build_backend = self.get_build_backend()
+ with tmpdir.as_cwd():
+ path.build(files)
+ sdist_path = build_backend.build_sdist("temp")
+ wheel_file = build_backend.build_wheel("temp")
+
+ with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+ sdist_contents = set(tar.getnames())
+
+ with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+ wheel_contents = set(zipfile.namelist())
+ metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
+ license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+ epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
+
+ assert sdist_contents - {"foo-0.1/setup.py"} == {
+ 'foo-0.1',
+ 'foo-0.1/LICENSE.txt',
+ 'foo-0.1/MANIFEST.in',
+ 'foo-0.1/PKG-INFO',
+ 'foo-0.1/README.rst',
+ 'foo-0.1/pyproject.toml',
+ 'foo-0.1/setup.cfg',
+ 'foo-0.1/src',
+ 'foo-0.1/src/foo',
+ 'foo-0.1/src/foo/__init__.py',
+ 'foo-0.1/src/foo/cli.py',
+ 'foo-0.1/src/foo/data.txt',
+ 'foo-0.1/src/foo.egg-info',
+ 'foo-0.1/src/foo.egg-info/PKG-INFO',
+ 'foo-0.1/src/foo.egg-info/SOURCES.txt',
+ 'foo-0.1/src/foo.egg-info/dependency_links.txt',
+ 'foo-0.1/src/foo.egg-info/entry_points.txt',
+ 'foo-0.1/src/foo.egg-info/requires.txt',
+ 'foo-0.1/src/foo.egg-info/top_level.txt',
+ 'foo-0.1/src/foo.egg-info/not-zip-safe',
+ }
+ assert wheel_contents == {
+ "foo/__init__.py",
+ "foo/cli.py",
+ "foo/data.txt", # include_package_data defaults to True
+ "foo-0.1.dist-info/LICENSE.txt",
+ "foo-0.1.dist-info/METADATA",
+ "foo-0.1.dist-info/WHEEL",
+ "foo-0.1.dist-info/entry_points.txt",
+ "foo-0.1.dist-info/top_level.txt",
+ "foo-0.1.dist-info/RECORD",
+ }
+ assert license == "---- placeholder MIT license ----"
+ for line in (
+ "Summary: This is a Python package",
+ "License: MIT",
+ "Classifier: Intended Audience :: Developers",
+ "Requires-Dist: appdirs",
+ "Requires-Dist: tomli (>=1) ; extra == 'all'",
+ "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'"
+ ):
+ assert line in metadata
+
+ assert metadata.strip().endswith("This is a ``README``")
+ assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
+
+ def test_static_metadata_in_pyproject_config(self, tmpdir):
+ # Make sure static metadata in pyproject.toml is not overwritten by setup.py
+ # as required by PEP 621
+ files = {
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "foo"
+ description = "This is a Python package"
+ version = "42"
+ dependencies = ["six"]
"""),
'hello.py': DALS("""
def run():
print('hello')
"""),
- },
- {
'setup.py': DALS("""
- assert __name__ == '__main__'
__import__('setuptools').setup(
- name='foo',
- py_modules=['hello'],
- setup_requires=['six'],
+ name='bar',
+ version='13',
)
"""),
- 'hello.py': DALS("""
- def run():
- print('hello')
+ }
+ build_backend = self.get_build_backend()
+ with tmpdir.as_cwd():
+ path.build(files)
+ sdist_path = build_backend.build_sdist("temp")
+ wheel_file = build_backend.build_wheel("temp")
+
+ assert (tmpdir / "temp/foo-42.tar.gz").exists()
+ assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/bar-13.tar.gz").exists()
+ assert not (tmpdir / "temp/bar-42.tar.gz").exists()
+ assert not (tmpdir / "temp/foo-13.tar.gz").exists()
+ assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
+ assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
+
+ with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+ pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
+ members = tar.getnames()
+ assert "bar-13/PKG-INFO" not in members
+
+ with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+ metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
+ members = zipfile.namelist()
+ assert "bar-13.dist-info/METADATA" not in members
+
+ for file in pkg_info, metadata:
+ for line in ("Name: foo", "Version: 42"):
+ assert line in file
+ for line in ("Name: bar", "Version: 13"):
+ assert line not in file
+
+ def test_build_sdist(self, build_backend):
+ dist_dir = os.path.abspath('pip-sdist')
+ os.makedirs(dist_dir)
+ sdist_name = build_backend.build_sdist(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+
+ def test_prepare_metadata_for_build_wheel(self, build_backend):
+ dist_dir = os.path.abspath('pip-dist-info')
+ os.makedirs(dist_dir)
+
+ dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
+
+ assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
+
+ def test_build_sdist_explicit_dist(self, build_backend):
+ # explicitly specifying the dist folder should work
+ # the folder sdist_directory and the ``--dist-dir`` can be the same
+ dist_dir = os.path.abspath('dist')
+ sdist_name = build_backend.build_sdist(dist_dir)
+ assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+
+ def test_build_sdist_version_change(self, build_backend):
+ sdist_into_directory = os.path.abspath("out_sdist")
+ os.makedirs(sdist_into_directory)
+
+ sdist_name = build_backend.build_sdist(sdist_into_directory)
+ assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
+
+ # if the setup.py changes subsequent call of the build meta
+ # should still succeed, given the
+ # sdist_directory the frontend specifies is empty
+ setup_loc = os.path.abspath("setup.py")
+ if not os.path.exists(setup_loc):
+ setup_loc = os.path.abspath("setup.cfg")
+
+ with open(setup_loc, 'rt') as file_handler:
+ content = file_handler.read()
+ with open(setup_loc, 'wt') as file_handler:
+ file_handler.write(
+ content.replace("version='0.0.0'", "version='0.0.1'"))
+
+ shutil.rmtree(sdist_into_directory)
+ os.makedirs(sdist_into_directory)
+
+ sdist_name = build_backend.build_sdist("out_sdist")
+ assert os.path.isfile(
+ os.path.join(os.path.abspath("out_sdist"), sdist_name))
+
+ def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
+ files = {
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello']
+ )"""),
+ 'hello.py': '',
+ 'pyproject.toml': DALS("""
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
"""),
- },
- {
+ }
+ path.build(files)
+ build_backend = self.get_build_backend()
+ targz_path = build_backend.build_sdist("temp")
+ with tarfile.open(os.path.join("temp", targz_path)) as tar:
+ assert any('pyproject.toml' in name for name in tar.getnames())
+
+ def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
+ # If build_sdist is called from a script other than setup.py,
+ # ensure setup.py is included
+ path.build(defns[0])
+
+ build_backend = self.get_build_backend()
+ targz_path = build_backend.build_sdist("temp")
+ with tarfile.open(os.path.join("temp", targz_path)) as tar:
+ assert any('setup.py' in name for name in tar.getnames())
+
+ def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
+ # Ensure that MANIFEST.in can exclude setup.py
+ files = {
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello']
+ )"""),
+ 'hello.py': '',
+ 'MANIFEST.in': DALS("""
+ exclude setup.py
+ """)
+ }
+
+ path.build(files)
+
+ build_backend = self.get_build_backend()
+ targz_path = build_backend.build_sdist("temp")
+ with tarfile.open(os.path.join("temp", targz_path)) as tar:
+ assert not any('setup.py' in name for name in tar.getnames())
+
+ def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
+ files = {
'setup.py': DALS("""
- variable = True
- def function():
- return variable
- assert variable
__import__('setuptools').setup(
name='foo',
- py_modules=['hello'],
- setup_requires=['six'],
+ version='0.0.0',
+ py_modules=['hello']
+ )"""),
+ 'hello.py': '',
+ 'setup.cfg': DALS("""
+ [sdist]
+ formats=zip
+ """)
+ }
+
+ path.build(files)
+
+ build_backend = self.get_build_backend()
+ build_backend.build_sdist("temp")
+
+ _relative_path_import_files = {
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ version=__import__('hello').__version__,
+ py_modules=['hello']
+ )"""),
+ 'hello.py': '__version__ = "0.0.0"',
+ 'setup.cfg': DALS("""
+ [sdist]
+ formats=zip
+ """)
+ }
+
+ def test_build_sdist_relative_path_import(self, tmpdir_cwd):
+ path.build(self._relative_path_import_files)
+ build_backend = self.get_build_backend()
+ with pytest.raises(ImportError, match="^No module named 'hello'$"):
+ build_backend.build_sdist("temp")
+
+ @pytest.mark.parametrize('setup_literal, requirements', [
+ ("'foo'", ['foo']),
+ ("['foo']", ['foo']),
+ (r"'foo\n'", ['foo']),
+ (r"'foo\n\n'", ['foo']),
+ ("['foo', 'bar']", ['foo', 'bar']),
+ (r"'# Has a comment line\nfoo'", ['foo']),
+ (r"'foo # Has an inline comment'", ['foo']),
+ (r"'foo \\\n >=3.0'", ['foo>=3.0']),
+ (r"'foo\nbar'", ['foo', 'bar']),
+ (r"'foo\nbar\n'", ['foo', 'bar']),
+ (r"['foo\n', 'bar\n']", ['foo', 'bar']),
+ ])
+ @pytest.mark.parametrize('use_wheel', [True, False])
+ def test_setup_requires(self, setup_literal, requirements, use_wheel,
+ tmpdir_cwd):
+
+ files = {
+ 'setup.py': DALS("""
+ from setuptools import setup
+
+ setup(
+ name="qux",
+ version="0.0.0",
+ py_modules=["hello"],
+ setup_requires={setup_literal},
)
- """),
+ """).format(setup_literal=setup_literal),
'hello.py': DALS("""
- def run():
- print('hello')
- """),
- }]
+ def run():
+ print('hello')
+ """),
+ }
+
+ path.build(files)
+
+ build_backend = self.get_build_backend()
+
+ if use_wheel:
+ base_requirements = ['wheel']
+ get_requires = build_backend.get_requires_for_build_wheel
+ else:
+ base_requirements = []
+ get_requires = build_backend.get_requires_for_build_sdist
+
+ # Ensure that the build requirements are properly parsed
+ expected = sorted(base_requirements + requirements)
+ actual = get_requires()
+
+ assert expected == sorted(actual)
+
+ def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
+ # Make sure patches introduced to retrieve setup_requires don't accidentally
+ # activate auto-discovery and cause problems due to the incomplete set of
+ # attributes passed to MinimalDistribution
+ files = {
+ 'pyproject.toml': DALS("""
+ [project]
+ name = "proj"
+ version = "42"
+ """),
+ "setup.py": DALS("""
+ __import__('setuptools').setup(
+ setup_requires=["foo"],
+ py_modules = ["hello", "world"]
+ )
+ """),
+ 'hello.py': "'hello'",
+ 'world.py': "'world'",
+ }
+ path.build(files)
+ build_backend = self.get_build_backend()
+ setup_requires = build_backend.get_requires_for_build_wheel()
+ assert setup_requires == ["wheel", "foo"]
+
+ def test_dont_install_setup_requires(self, tmpdir_cwd):
+ files = {
+ 'setup.py': DALS("""
+ from setuptools import setup
+
+ setup(
+ name="qux",
+ version="0.0.0",
+ py_modules=["hello"],
+ setup_requires=["does-not-exist >99"],
+ )
+ """),
+ 'hello.py': DALS("""
+ def run():
+ print('hello')
+ """),
+ }
+
+ path.build(files)
+
+ build_backend = self.get_build_backend()
+ dist_dir = os.path.abspath('pip-dist-info')
+ os.makedirs(dist_dir)
-@pytest.fixture(params=defns)
-def build_backend(tmpdir, request):
- build_files(request.param, prefix=str(tmpdir))
- with tmpdir.as_cwd():
- yield BuildBackend(cwd='.')
+ # does-not-exist can't be satisfied, so if it attempts to install
+ # setup_requires, it will fail.
+ build_backend.prepare_metadata_for_build_wheel(dist_dir)
+ _sys_argv_0_passthrough = {
+ 'setup.py': DALS("""
+ import os
+ import sys
-def test_get_requires_for_build_wheel(build_backend):
- actual = build_backend.get_requires_for_build_wheel()
- expected = ['six', 'setuptools', 'wheel']
- assert sorted(actual) == sorted(expected)
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ )
+ sys_argv = os.path.abspath(sys.argv[0])
+ file_path = os.path.abspath('setup.py')
+ assert sys_argv == file_path
+ """)
+ }
-def test_build_wheel(build_backend):
- dist_dir = os.path.abspath('pip-wheel')
- os.makedirs(dist_dir)
- wheel_name = build_backend.build_wheel(dist_dir)
+ def test_sys_argv_passthrough(self, tmpdir_cwd):
+ path.build(self._sys_argv_0_passthrough)
+ build_backend = self.get_build_backend()
+ with pytest.raises(AssertionError):
+ build_backend.build_sdist("temp")
- assert os.path.isfile(os.path.join(dist_dir, wheel_name))
+ @pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
+ def test_build_with_empty_setuppy(self, build_backend, build_hook):
+ files = {'setup.py': ''}
+ path.build(files)
+ with pytest.raises(
+ ValueError,
+ match=re.escape('No distribution was found.')):
+ getattr(build_backend, build_hook)("temp")
-def test_build_sdist(build_backend):
- dist_dir = os.path.abspath('pip-sdist')
- os.makedirs(dist_dir)
- sdist_name = build_backend.build_sdist(dist_dir)
- assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
+ backend_name = 'setuptools.build_meta:__legacy__'
+ # build_meta_legacy-specific tests
+ def test_build_sdist_relative_path_import(self, tmpdir_cwd):
+ # This must fail in build_meta, but must pass in build_meta_legacy
+ path.build(self._relative_path_import_files)
-def test_prepare_metadata_for_build_wheel(build_backend):
- dist_dir = os.path.abspath('pip-dist-info')
- os.makedirs(dist_dir)
+ build_backend = self.get_build_backend()
+ build_backend.build_sdist("temp")
- dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
+ def test_sys_argv_passthrough(self, tmpdir_cwd):
+ path.build(self._sys_argv_0_passthrough)
- assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
+ build_backend = self.get_build_backend()
+ build_backend.build_sdist("temp")
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index cc701ae..19c8b78 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -1,17 +1,13 @@
import os
+import stat
+import shutil
import pytest
from setuptools.dist import Distribution
-@pytest.yield_fixture
-def tmpdir_as_cwd(tmpdir):
- with tmpdir.as_cwd():
- yield tmpdir
-
-
-def test_directories_in_package_data_glob(tmpdir_as_cwd):
+def test_directories_in_package_data_glob(tmpdir_cwd):
"""
Directories matching the glob in package_data should
not be included in the package data.
@@ -22,9 +18,64 @@ def test_directories_in_package_data_glob(tmpdir_as_cwd):
script_name='setup.py',
script_args=['build_py'],
packages=[''],
- name='foo',
package_data={'': ['path/*']},
))
os.makedirs('path/subpath')
dist.parse_command_line()
dist.run_commands()
+
+
+def test_read_only(tmpdir_cwd):
+ """
+ Ensure read-only flag is not preserved in copy
+ for package modules and package data, as that
+ causes problems with deleting read-only files on
+ Windows.
+
+ #1451
+ """
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['build_py'],
+ packages=['pkg'],
+ package_data={'pkg': ['data.dat']},
+ ))
+ os.makedirs('pkg')
+ open('pkg/__init__.py', 'w').close()
+ open('pkg/data.dat', 'w').close()
+ os.chmod('pkg/__init__.py', stat.S_IREAD)
+ os.chmod('pkg/data.dat', stat.S_IREAD)
+ dist.parse_command_line()
+ dist.run_commands()
+ shutil.rmtree('build')
+
+
+@pytest.mark.xfail(
+ 'platform.system() == "Windows"',
+ reason="On Windows, files do not have executable bits",
+ raises=AssertionError,
+ strict=True,
+)
+def test_executable_data(tmpdir_cwd):
+ """
+ Ensure executable bit is preserved in copy for
+ package data, as users rely on it for scripts.
+
+ #2041
+ """
+ dist = Distribution(dict(
+ script_name='setup.py',
+ script_args=['build_py'],
+ packages=['pkg'],
+ package_data={'pkg': ['run-me']},
+ ))
+ os.makedirs('pkg')
+ open('pkg/__init__.py', 'w').close()
+ open('pkg/run-me', 'w').close()
+ os.chmod('pkg/run-me', 0o700)
+
+ dist.parse_command_line()
+ dist.run_commands()
+
+ assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, \
+ "Script is not executable"
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/test_config.py
deleted file mode 100644
index abb953a..0000000
--- a/setuptools/tests/test_config.py
+++ /dev/null
@@ -1,583 +0,0 @@
-import contextlib
-import pytest
-from distutils.errors import DistutilsOptionError, DistutilsFileError
-from setuptools.dist import Distribution
-from setuptools.config import ConfigHandler, read_configuration
-
-
-class ErrConfigHandler(ConfigHandler):
- """Erroneous handler. Fails to implement required methods."""
-
-
-def make_package_dir(name, base_dir):
- dir_package = base_dir.mkdir(name)
- init_file = dir_package.join('__init__.py')
- init_file.write('')
- return dir_package, init_file
-
-
-def fake_env(tmpdir, setup_cfg, setup_py=None):
-
- if setup_py is None:
- setup_py = (
- 'from setuptools import setup\n'
- 'setup()\n'
- )
-
- tmpdir.join('setup.py').write(setup_py)
- config = tmpdir.join('setup.cfg')
- config.write(setup_cfg)
-
- package_dir, init_file = make_package_dir('fake_package', tmpdir)
-
- init_file.write(
- 'VERSION = (1, 2, 3)\n'
- '\n'
- 'VERSION_MAJOR = 1'
- '\n'
- 'def get_version():\n'
- ' return [3, 4, 5, "dev"]\n'
- '\n'
- )
- return package_dir, config
-
-
-@contextlib.contextmanager
-def get_dist(tmpdir, kwargs_initial=None, parse=True):
- kwargs_initial = kwargs_initial or {}
-
- with tmpdir.as_cwd():
- dist = Distribution(kwargs_initial)
- dist.script_name = 'setup.py'
- parse and dist.parse_config_files()
-
- yield dist
-
-
-def test_parsers_implemented():
-
- with pytest.raises(NotImplementedError):
- handler = ErrConfigHandler(None, {})
- handler.parsers
-
-
-class TestConfigurationReader:
-
- def test_basic(self, tmpdir):
- _, config = fake_env(
- tmpdir,
- '[metadata]\n'
- 'version = 10.1.1\n'
- 'keywords = one, two\n'
- '\n'
- '[options]\n'
- 'scripts = bin/a.py, bin/b.py\n'
- )
- config_dict = read_configuration('%s' % config)
- assert config_dict['metadata']['version'] == '10.1.1'
- assert config_dict['metadata']['keywords'] == ['one', 'two']
- assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
-
- def test_no_config(self, tmpdir):
- with pytest.raises(DistutilsFileError):
- read_configuration('%s' % tmpdir.join('setup.cfg'))
-
- def test_ignore_errors(self, tmpdir):
- _, config = fake_env(
- tmpdir,
- '[metadata]\n'
- 'version = attr: none.VERSION\n'
- 'keywords = one, two\n'
- )
- with pytest.raises(ImportError):
- read_configuration('%s' % config)
-
- config_dict = read_configuration(
- '%s' % config, ignore_option_errors=True)
-
- assert config_dict['metadata']['keywords'] == ['one', 'two']
- assert 'version' not in config_dict['metadata']
-
- config.remove()
-
-
-class TestMetadata:
-
- def test_basic(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'version = 10.1.1\n'
- 'description = Some description\n'
- 'long_description_content_type = text/something\n'
- 'long_description = file: README\n'
- 'name = fake_name\n'
- 'keywords = one, two\n'
- 'provides = package, package.sub\n'
- 'license = otherlic\n'
- 'download_url = http://test.test.com/test/\n'
- 'maintainer_email = test@test.com\n'
- )
-
- tmpdir.join('README').write('readme contents\nline2')
-
- meta_initial = {
- # This will be used so `otherlic` won't replace it.
- 'license': 'BSD 3-Clause License',
- }
-
- with get_dist(tmpdir, meta_initial) as dist:
- metadata = dist.metadata
-
- assert metadata.version == '10.1.1'
- assert metadata.description == 'Some description'
- assert metadata.long_description_content_type == 'text/something'
- assert metadata.long_description == 'readme contents\nline2'
- assert metadata.provides == ['package', 'package.sub']
- assert metadata.license == 'BSD 3-Clause License'
- assert metadata.name == 'fake_name'
- assert metadata.keywords == ['one', 'two']
- assert metadata.download_url == 'http://test.test.com/test/'
- assert metadata.maintainer_email == 'test@test.com'
-
- def test_file_mixed(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'long_description = file: README.rst, CHANGES.rst\n'
- '\n'
- )
-
- tmpdir.join('README.rst').write('readme contents\nline2')
- tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
-
- with get_dist(tmpdir) as dist:
- assert dist.metadata.long_description == (
- 'readme contents\nline2\n'
- 'changelog contents\nand stuff'
- )
-
- def test_file_sandboxed(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'long_description = file: ../../README\n'
- )
-
- with get_dist(tmpdir, parse=False) as dist:
- with pytest.raises(DistutilsOptionError):
- dist.parse_config_files() # file: out of sandbox
-
- def test_aliases(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'author-email = test@test.com\n'
- 'home-page = http://test.test.com/test/\n'
- 'summary = Short summary\n'
- 'platform = a, b\n'
- 'classifier =\n'
- ' Framework :: Django\n'
- ' Programming Language :: Python :: 3.5\n'
- )
-
- with get_dist(tmpdir) as dist:
- metadata = dist.metadata
- assert metadata.author_email == 'test@test.com'
- assert metadata.url == 'http://test.test.com/test/'
- assert metadata.description == 'Short summary'
- assert metadata.platforms == ['a', 'b']
- assert metadata.classifiers == [
- 'Framework :: Django',
- 'Programming Language :: Python :: 3.5',
- ]
-
- def test_multiline(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'name = fake_name\n'
- 'keywords =\n'
- ' one\n'
- ' two\n'
- 'classifiers =\n'
- ' Framework :: Django\n'
- ' Programming Language :: Python :: 3.5\n'
- )
- with get_dist(tmpdir) as dist:
- metadata = dist.metadata
- assert metadata.keywords == ['one', 'two']
- assert metadata.classifiers == [
- 'Framework :: Django',
- 'Programming Language :: Python :: 3.5',
- ]
-
- def test_dict(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'project_urls =\n'
- ' Link One = https://example.com/one/\n'
- ' Link Two = https://example.com/two/\n'
- )
- with get_dist(tmpdir) as dist:
- metadata = dist.metadata
- assert metadata.project_urls == {
- 'Link One': 'https://example.com/one/',
- 'Link Two': 'https://example.com/two/',
- }
-
- def test_version(self, tmpdir):
-
- _, config = fake_env(
- tmpdir,
- '[metadata]\n'
- 'version = attr: fake_package.VERSION\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.metadata.version == '1.2.3'
-
- config.write(
- '[metadata]\n'
- 'version = attr: fake_package.get_version\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.metadata.version == '3.4.5.dev'
-
- config.write(
- '[metadata]\n'
- 'version = attr: fake_package.VERSION_MAJOR\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.metadata.version == '1'
-
- subpack = tmpdir.join('fake_package').mkdir('subpackage')
- subpack.join('__init__.py').write('')
- subpack.join('submodule.py').write('VERSION = (2016, 11, 26)')
-
- config.write(
- '[metadata]\n'
- 'version = attr: fake_package.subpackage.submodule.VERSION\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.metadata.version == '2016.11.26'
-
- def test_unknown_meta_item(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata]\n'
- 'name = fake_name\n'
- 'unknown = some\n'
- )
- with get_dist(tmpdir, parse=False) as dist:
- dist.parse_config_files() # Skip unknown.
-
- def test_usupported_section(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[metadata.some]\n'
- 'key = val\n'
- )
- with get_dist(tmpdir, parse=False) as dist:
- with pytest.raises(DistutilsOptionError):
- dist.parse_config_files()
-
- def test_classifiers(self, tmpdir):
- expected = set([
- 'Framework :: Django',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
- ])
-
- # From file.
- _, config = fake_env(
- tmpdir,
- '[metadata]\n'
- 'classifiers = file: classifiers\n'
- )
-
- tmpdir.join('classifiers').write(
- 'Framework :: Django\n'
- 'Programming Language :: Python :: 3\n'
- 'Programming Language :: Python :: 3.5\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert set(dist.metadata.classifiers) == expected
-
- # From list notation
- config.write(
- '[metadata]\n'
- 'classifiers =\n'
- ' Framework :: Django\n'
- ' Programming Language :: Python :: 3\n'
- ' Programming Language :: Python :: 3.5\n'
- )
- with get_dist(tmpdir) as dist:
- assert set(dist.metadata.classifiers) == expected
-
-
-class TestOptions:
-
- def test_basic(self, tmpdir):
-
- fake_env(
- tmpdir,
- '[options]\n'
- 'zip_safe = True\n'
- 'use_2to3 = 1\n'
- 'include_package_data = yes\n'
- 'package_dir = b=c, =src\n'
- 'packages = pack_a, pack_b.subpack\n'
- 'namespace_packages = pack1, pack2\n'
- 'use_2to3_fixers = your.fixers, or.here\n'
- 'use_2to3_exclude_fixers = one.here, two.there\n'
- 'convert_2to3_doctests = src/tests/one.txt, src/two.txt\n'
- 'scripts = bin/one.py, bin/two.py\n'
- 'eager_resources = bin/one.py, bin/two.py\n'
- 'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
- 'tests_require = mock==0.7.2; pytest\n'
- 'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
- 'dependency_links = http://some.com/here/1, '
- 'http://some.com/there/2\n'
- 'python_requires = >=1.0, !=2.8\n'
- 'py_modules = module1, module2\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.zip_safe
- assert dist.use_2to3
- assert dist.include_package_data
- assert dist.package_dir == {'': 'src', 'b': 'c'}
- assert dist.packages == ['pack_a', 'pack_b.subpack']
- assert dist.namespace_packages == ['pack1', 'pack2']
- assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
- assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
- assert dist.convert_2to3_doctests == ([
- 'src/tests/one.txt', 'src/two.txt'])
- assert dist.scripts == ['bin/one.py', 'bin/two.py']
- assert dist.dependency_links == ([
- 'http://some.com/here/1',
- 'http://some.com/there/2'
- ])
- assert dist.install_requires == ([
- 'docutils>=0.3',
- 'pack==1.1,==1.3',
- 'hey'
- ])
- assert dist.setup_requires == ([
- 'docutils>=0.3',
- 'spack ==1.1, ==1.3',
- 'there'
- ])
- assert dist.tests_require == ['mock==0.7.2', 'pytest']
- assert dist.python_requires == '>=1.0, !=2.8'
- assert dist.py_modules == ['module1', 'module2']
-
- def test_multiline(self, tmpdir):
- fake_env(
- tmpdir,
- '[options]\n'
- 'package_dir = \n'
- ' b=c\n'
- ' =src\n'
- 'packages = \n'
- ' pack_a\n'
- ' pack_b.subpack\n'
- 'namespace_packages = \n'
- ' pack1\n'
- ' pack2\n'
- 'use_2to3_fixers = \n'
- ' your.fixers\n'
- ' or.here\n'
- 'use_2to3_exclude_fixers = \n'
- ' one.here\n'
- ' two.there\n'
- 'convert_2to3_doctests = \n'
- ' src/tests/one.txt\n'
- ' src/two.txt\n'
- 'scripts = \n'
- ' bin/one.py\n'
- ' bin/two.py\n'
- 'eager_resources = \n'
- ' bin/one.py\n'
- ' bin/two.py\n'
- 'install_requires = \n'
- ' docutils>=0.3\n'
- ' pack ==1.1, ==1.3\n'
- ' hey\n'
- 'tests_require = \n'
- ' mock==0.7.2\n'
- ' pytest\n'
- 'setup_requires = \n'
- ' docutils>=0.3\n'
- ' spack ==1.1, ==1.3\n'
- ' there\n'
- 'dependency_links = \n'
- ' http://some.com/here/1\n'
- ' http://some.com/there/2\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.package_dir == {'': 'src', 'b': 'c'}
- assert dist.packages == ['pack_a', 'pack_b.subpack']
- assert dist.namespace_packages == ['pack1', 'pack2']
- assert dist.use_2to3_fixers == ['your.fixers', 'or.here']
- assert dist.use_2to3_exclude_fixers == ['one.here', 'two.there']
- assert dist.convert_2to3_doctests == (
- ['src/tests/one.txt', 'src/two.txt'])
- assert dist.scripts == ['bin/one.py', 'bin/two.py']
- assert dist.dependency_links == ([
- 'http://some.com/here/1',
- 'http://some.com/there/2'
- ])
- assert dist.install_requires == ([
- 'docutils>=0.3',
- 'pack==1.1,==1.3',
- 'hey'
- ])
- assert dist.setup_requires == ([
- 'docutils>=0.3',
- 'spack ==1.1, ==1.3',
- 'there'
- ])
- assert dist.tests_require == ['mock==0.7.2', 'pytest']
-
- def test_package_dir_fail(self, tmpdir):
- fake_env(
- tmpdir,
- '[options]\n'
- 'package_dir = a b\n'
- )
- with get_dist(tmpdir, parse=False) as dist:
- with pytest.raises(DistutilsOptionError):
- dist.parse_config_files()
-
- def test_package_data(self, tmpdir):
- fake_env(
- tmpdir,
- '[options.package_data]\n'
- '* = *.txt, *.rst\n'
- 'hello = *.msg\n'
- '\n'
- '[options.exclude_package_data]\n'
- '* = fake1.txt, fake2.txt\n'
- 'hello = *.dat\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert dist.package_data == {
- '': ['*.txt', '*.rst'],
- 'hello': ['*.msg'],
- }
- assert dist.exclude_package_data == {
- '': ['fake1.txt', 'fake2.txt'],
- 'hello': ['*.dat'],
- }
-
- def test_packages(self, tmpdir):
- fake_env(
- tmpdir,
- '[options]\n'
- 'packages = find:\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert dist.packages == ['fake_package']
-
- def test_find_directive(self, tmpdir):
- dir_package, config = fake_env(
- tmpdir,
- '[options]\n'
- 'packages = find:\n'
- )
-
- dir_sub_one, _ = make_package_dir('sub_one', dir_package)
- dir_sub_two, _ = make_package_dir('sub_two', dir_package)
-
- with get_dist(tmpdir) as dist:
- assert set(dist.packages) == set([
- 'fake_package', 'fake_package.sub_two', 'fake_package.sub_one'
- ])
-
- config.write(
- '[options]\n'
- 'packages = find:\n'
- '\n'
- '[options.packages.find]\n'
- 'where = .\n'
- 'include =\n'
- ' fake_package.sub_one\n'
- ' two\n'
- )
- with get_dist(tmpdir) as dist:
- assert dist.packages == ['fake_package.sub_one']
-
- config.write(
- '[options]\n'
- 'packages = find:\n'
- '\n'
- '[options.packages.find]\n'
- 'exclude =\n'
- ' fake_package.sub_one\n'
- )
- with get_dist(tmpdir) as dist:
- assert set(dist.packages) == set(
- ['fake_package', 'fake_package.sub_two'])
-
- def test_extras_require(self, tmpdir):
- fake_env(
- tmpdir,
- '[options.extras_require]\n'
- 'pdf = ReportLab>=1.2; RXP\n'
- 'rest = \n'
- ' docutils>=0.3\n'
- ' pack ==1.1, ==1.3\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert dist.extras_require == {
- 'pdf': ['ReportLab>=1.2', 'RXP'],
- 'rest': ['docutils>=0.3', 'pack==1.1,==1.3']
- }
- assert dist.metadata.provides_extras == set(['pdf', 'rest'])
-
- def test_entry_points(self, tmpdir):
- _, config = fake_env(
- tmpdir,
- '[options.entry_points]\n'
- 'group1 = point1 = pack.module:func, '
- '.point2 = pack.module2:func_rest [rest]\n'
- 'group2 = point3 = pack.module:func2\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert dist.entry_points == {
- 'group1': [
- 'point1 = pack.module:func',
- '.point2 = pack.module2:func_rest [rest]',
- ],
- 'group2': ['point3 = pack.module:func2']
- }
-
- expected = (
- '[blogtool.parsers]\n'
- '.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
- )
-
- tmpdir.join('entry_points').write(expected)
-
- # From file.
- config.write(
- '[options]\n'
- 'entry_points = file: entry_points\n'
- )
-
- with get_dist(tmpdir) as dist:
- assert dist.entry_points == expected
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
new file mode 100644
index 0000000..fac365f
--- /dev/null
+++ b/setuptools/tests/test_config_discovery.py
@@ -0,0 +1,581 @@
+import os
+import sys
+from configparser import ConfigParser
+from itertools import product
+
+from setuptools.command.sdist import sdist
+from setuptools.dist import Distribution
+from setuptools.discovery import find_package_path, find_parent_package
+from setuptools.errors import PackageDiscoveryError
+
+import setuptools # noqa -- force distutils.core to be patched
+import distutils.core
+
+import pytest
+import jaraco.path
+from path import Path as _Path
+
+from .contexts import quiet
+from .integration.helpers import get_sdist_members, get_wheel_members, run
+from .textwrap import DALS
+
+
+class TestFindParentPackage:
+ def test_single_package(self, tmp_path):
+ # find_parent_package should find a non-namespace parent package
+ (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
+ (tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
+ (tmp_path / "src/namespace/pkg/__init__.py").touch()
+ packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
+ assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
+
+ def test_multiple_toplevel(self, tmp_path):
+ # find_parent_package should return null if the given list of packages does not
+ # have a single parent package
+ multiple = ["pkg", "pkg1", "pkg2"]
+ for name in multiple:
+ (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
+ (tmp_path / f"src/{name}/__init__.py").touch()
+ assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
+
+
+class TestDiscoverPackagesAndPyModules:
+ """Make sure discovered values for ``packages`` and ``py_modules`` work
+ similarly to explicit configuration for the simple scenarios.
+ """
+ OPTIONS = {
+ # Different options according to the circumstance being tested
+ "explicit-src": {
+ "package_dir": {"": "src"},
+ "packages": ["pkg"]
+ },
+ "variation-lib": {
+ "package_dir": {"": "lib"}, # variation of the source-layout
+ },
+ "explicit-flat": {
+ "packages": ["pkg"]
+ },
+ "explicit-single_module": {
+ "py_modules": ["pkg"]
+ },
+ "explicit-namespace": {
+ "packages": ["ns", "ns.pkg"]
+ },
+ "automatic-src": {},
+ "automatic-flat": {},
+ "automatic-single_module": {},
+ "automatic-namespace": {}
+ }
+ FILES = {
+ "src": ["src/pkg/__init__.py", "src/pkg/main.py"],
+ "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
+ "flat": ["pkg/__init__.py", "pkg/main.py"],
+ "single_module": ["pkg.py"],
+ "namespace": ["ns/pkg/__init__.py"]
+ }
+
+ def _get_info(self, circumstance):
+ _, _, layout = circumstance.partition("-")
+ files = self.FILES[layout]
+ options = self.OPTIONS[circumstance]
+ return files, options
+
+ @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+ def test_sdist_filelist(self, tmp_path, circumstance):
+ files, options = self._get_info(circumstance)
+ _populate_project_dir(tmp_path, files, options)
+
+ _, cmd = _run_sdist_programatically(tmp_path, options)
+
+ manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
+ for file in files:
+ assert any(f.endswith(file) for f in manifest)
+
+ @pytest.mark.parametrize("circumstance", OPTIONS.keys())
+ def test_project(self, tmp_path, circumstance):
+ files, options = self._get_info(circumstance)
+ _populate_project_dir(tmp_path, files, options)
+
+ # Simulate a pre-existing `build` directory
+ (tmp_path / "build").mkdir()
+ (tmp_path / "build/lib").mkdir()
+ (tmp_path / "build/bdist.linux-x86_64").mkdir()
+ (tmp_path / "build/bdist.linux-x86_64/file.py").touch()
+ (tmp_path / "build/lib/__init__.py").touch()
+ (tmp_path / "build/lib/file.py").touch()
+ (tmp_path / "dist").mkdir()
+ (tmp_path / "dist/file.py").touch()
+
+ _run_build(tmp_path)
+
+ sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+ print("~~~~~ sdist_members ~~~~~")
+ print('\n'.join(sdist_files))
+ assert sdist_files >= set(files)
+
+ wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+ print("~~~~~ wheel_members ~~~~~")
+ print('\n'.join(wheel_files))
+ orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
+ assert wheel_files >= orig_files
+
+ # Make sure build files are not included by mistake
+ for file in wheel_files:
+ assert "build" not in files
+ assert "dist" not in files
+
+ PURPOSEFULLY_EMPY = {
+ "setup.cfg": DALS(
+ """
+ [metadata]
+ name = myproj
+ version = 0.0.0
+
+ [options]
+ {param} =
+ """
+ ),
+ "setup.py": DALS(
+ """
+ __import__('setuptools').setup(
+ name="myproj",
+ version="0.0.0",
+ {param}=[]
+ )
+ """
+ ),
+ "pyproject.toml": DALS(
+ """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+
+ [project]
+ name = "myproj"
+ version = "0.0.0"
+
+ [tool.setuptools]
+ {param} = []
+ """
+ ),
+ "template-pyproject.toml": DALS(
+ """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+ """
+ )
+ }
+
+ @pytest.mark.parametrize(
+ "config_file, param, circumstance",
+ product(
+ ["setup.cfg", "setup.py", "pyproject.toml"],
+ ["packages", "py_modules"],
+ FILES.keys()
+ )
+ )
+ def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
+ files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
+ _populate_project_dir(tmp_path, files, {})
+
+ if config_file == "pyproject.toml":
+ template_param = param.replace("_", "-")
+ else:
+ # Make sure build works with or without setup.cfg
+ pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
+ (tmp_path / "pyproject.toml").write_text(pyproject)
+ template_param = param
+
+ config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
+ (tmp_path / config_file).write_text(config)
+
+ dist = _get_dist(tmp_path, {})
+ # When either parameter package or py_modules is an empty list,
+ # then there should be no discovery
+ assert getattr(dist, param) == []
+ other = {"py_modules": "packages", "packages": "py_modules"}[param]
+ assert getattr(dist, other) is None
+
+ @pytest.mark.parametrize(
+ "extra_files, pkgs",
+ [
+ (["venv/bin/simulate_venv"], {"pkg"}),
+ (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
+ (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
+ (
+ # Type stubs can also be namespaced
+ ["namespace-stubs/pkg/__init__.pyi"],
+ {"pkg", "namespace-stubs", "namespace-stubs.pkg"},
+ ),
+ (
+ # Just the top-level package can have `-stubs`, ignore nested ones
+ ["namespace-stubs/pkg-stubs/__init__.pyi"],
+ {"pkg", "namespace-stubs"}
+ ),
+ (["_hidden/file.py"], {"pkg"}),
+ (["news/finalize.py"], {"pkg"}),
+ ]
+ )
+ def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
+ files = self.FILES["flat"] + extra_files
+ _populate_project_dir(tmp_path, files, {})
+ dist = _get_dist(tmp_path, {})
+ assert set(dist.packages) == pkgs
+
+ @pytest.mark.parametrize(
+ "extra_files",
+ [
+ ["other/__init__.py"],
+ ["other/finalize.py"],
+ ]
+ )
+ def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
+ files = self.FILES["flat"] + extra_files
+ _populate_project_dir(tmp_path, files, {})
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+ def test_flat_layout_with_single_module(self, tmp_path):
+ files = self.FILES["single_module"] + ["invalid-module-name.py"]
+ _populate_project_dir(tmp_path, files, {})
+ dist = _get_dist(tmp_path, {})
+ assert set(dist.py_modules) == {"pkg"}
+
+ def test_flat_layout_with_multiple_modules(self, tmp_path):
+ files = self.FILES["single_module"] + ["valid_module_name.py"]
+ _populate_project_dir(tmp_path, files, {})
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+
+class TestNoConfig:
+ DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
+
+ EXAMPLES = {
+ "pkg1": ["src/pkg1.py"],
+ "pkg2": ["src/pkg2/__init__.py"],
+ "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
+ "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
+ "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
+ "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_discover_name(self, tmp_path, example):
+ _populate_project_dir(tmp_path, self.EXAMPLES[example], {})
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == example
+
+ def test_build_with_discovered_name(self, tmp_path):
+ files = ["src/ns/nested/pkg/__init__.py"]
+ _populate_project_dir(tmp_path, files, {})
+ _run_build(tmp_path, "--sdist")
+ # Expected distribution file
+ dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz"
+ assert dist_file.is_file()
+
+
+class TestWithAttrDirective:
+ @pytest.mark.parametrize(
+ "folder, opts",
+ [
+ ("src", {}),
+ ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
+ ]
+ )
+ def test_setupcfg_metadata(self, tmp_path, folder, opts):
+ files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
+ _populate_project_dir(tmp_path, files, opts)
+ (tmp_path / folder / "pkg/__init__.py").write_text("version = 42")
+ (tmp_path / "setup.cfg").write_text(
+ "[metadata]\nversion = attr: pkg.version\n"
+ + (tmp_path / "setup.cfg").read_text()
+ )
+
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == "pkg"
+ assert dist.get_version() == "42"
+ assert dist.package_dir
+ package_path = find_package_path("pkg", dist.package_dir, tmp_path)
+ assert os.path.exists(package_path)
+ assert folder in _Path(package_path).parts()
+
+ _run_build(tmp_path, "--sdist")
+ dist_file = tmp_path / "dist/pkg-42.tar.gz"
+ assert dist_file.is_file()
+
+ def test_pyproject_metadata(self, tmp_path):
+ _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
+ (tmp_path / "src/pkg/__init__.py").write_text("version = 42")
+ (tmp_path / "pyproject.toml").write_text(
+ "[project]\nname = 'pkg'\ndynamic = ['version']\n"
+ "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
+ )
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_version() == "42"
+ assert dist.package_dir == {"": "src"}
+
+
+class TestWithCExtension:
+ def _simulate_package_with_extension(self, tmp_path):
+ # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
+ files = [
+ "benchmarks/file.py",
+ "docs/Makefile",
+ "docs/requirements.txt",
+ "docs/source/conf.py",
+ "proj/header.h",
+ "proj/file.py",
+ "py/proj.cpp",
+ "py/other.cpp",
+ "py/file.py",
+ "py/py.typed",
+ "py/tests/test_proj.py",
+ "README.rst",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+
+ setup_script = """
+ from setuptools import Extension, setup
+
+ ext_modules = [
+ Extension(
+ "proj",
+ ["py/proj.cpp", "py/other.cpp"],
+ include_dirs=["."],
+ language="c++",
+ ),
+ ]
+ setup(ext_modules=ext_modules)
+ """
+ (tmp_path / "setup.py").write_text(DALS(setup_script))
+
+ def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
+ """Ensure that auto-discovery is not triggered when the project is based on
+ C-extensions only, for backward compatibility.
+ """
+ self._simulate_package_with_extension(tmp_path)
+
+ pyproject = """
+ [build-system]
+ requires = []
+ build-backend = 'setuptools.build_meta'
+ """
+ (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+
+ setupcfg = """
+ [metadata]
+ name = proj
+ version = 42
+ """
+ (tmp_path / "setup.cfg").write_text(DALS(setupcfg))
+
+ dist = _get_dist(tmp_path, {})
+ assert dist.get_name() == "proj"
+ assert dist.get_version() == "42"
+ assert dist.py_modules is None
+ assert dist.packages is None
+ assert len(dist.ext_modules) == 1
+ assert dist.ext_modules[0].name == "proj"
+
+ def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
+ """When opting-in to pyproject.toml metadata, auto-discovery will be active if
+ the package lists C-extensions, but does not configure py-modules or packages.
+
+ This way we ensure users with complex package layouts that would lead to the
+ discovery of multiple top-level modules/packages see errors and are forced to
+ explicitly set ``packages`` or ``py-modules``.
+ """
+ self._simulate_package_with_extension(tmp_path)
+
+ pyproject = """
+ [project]
+ name = 'proj'
+ version = '42'
+ """
+ (tmp_path / "pyproject.toml").write_text(DALS(pyproject))
+ with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
+ _get_dist(tmp_path, {})
+
+
+class TestWithPackageData:
+ def _simulate_package_with_data_files(self, tmp_path, src_root):
+ files = [
+ f"{src_root}/proj/__init__.py",
+ f"{src_root}/proj/file1.txt",
+ f"{src_root}/proj/nested/file2.txt",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+
+ manifest = """
+ global-include *.py *.txt
+ """
+ (tmp_path / "MANIFEST.in").write_text(DALS(manifest))
+
+ EXAMPLE_SETUPCFG = """
+ [metadata]
+ name = proj
+ version = 42
+
+ [options]
+ include_package_data = True
+ """
+ EXAMPLE_PYPROJECT = """
+ [project]
+ name = "proj"
+ version = "42"
+ """
+
+ PYPROJECT_PACKAGE_DIR = """
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ """
+
+ @pytest.mark.parametrize(
+ "src_root, files",
+ [
+ (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+ (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+ ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
+ ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
+ (
+ "src",
+ {
+ "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS(
+ """
+ packages = find:
+ package_dir =
+ =src
+
+ [options.packages.find]
+ where = src
+ """
+ )
+ }
+ ),
+ (
+ "src",
+ {
+ "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS(
+ """
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ """
+ )
+ },
+ ),
+ ]
+ )
+ def test_include_package_data(self, tmp_path, src_root, files):
+ """
+ Make sure auto-discovery does not affect package include_package_data.
+ See issue #3196.
+ """
+ jaraco.path.build(files, prefix=str(tmp_path))
+ self._simulate_package_with_data_files(tmp_path, src_root)
+
+ expected = {
+ os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
+ os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
+ }
+
+ _run_build(tmp_path)
+
+ sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
+ print("~~~~~ sdist_members ~~~~~")
+ print('\n'.join(sdist_files))
+ assert sdist_files >= expected
+
+ wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
+ print("~~~~~ wheel_members ~~~~~")
+ print('\n'.join(wheel_files))
+ orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
+ assert wheel_files >= orig_files
+
+
+def test_compatible_with_numpy_configuration(tmp_path):
+ files = [
+ "dir1/__init__.py",
+ "dir2/__init__.py",
+ "file.py",
+ ]
+ _populate_project_dir(tmp_path, files, {})
+ dist = Distribution({})
+ dist.configuration = object()
+ dist.set_defaults()
+ assert dist.py_modules is None
+ assert dist.packages is None
+
+
+def _populate_project_dir(root, files, options):
+ # NOTE: Currently pypa/build will refuse to build the project if no
+ # `pyproject.toml` or `setup.py` is found. So it is impossible to do
+ # completely "config-less" projects.
+ (root / "setup.py").write_text("import setuptools\nsetuptools.setup()")
+ (root / "README.md").write_text("# Example Package")
+ (root / "LICENSE").write_text("Copyright (c) 2018")
+ _write_setupcfg(root, options)
+ paths = (root / f for f in files)
+ for path in paths:
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.touch()
+
+
+def _write_setupcfg(root, options):
+ if not options:
+ print("~~~~~ **NO** setup.cfg ~~~~~")
+ return
+ setupcfg = ConfigParser()
+ setupcfg.add_section("options")
+ for key, value in options.items():
+ if key == "packages.find":
+ setupcfg.add_section(f"options.{key}")
+ setupcfg[f"options.{key}"].update(value)
+ elif isinstance(value, list):
+ setupcfg["options"][key] = ", ".join(value)
+ elif isinstance(value, dict):
+ str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
+ setupcfg["options"][key] = "\n" + str_value
+ else:
+ setupcfg["options"][key] = str(value)
+ with open(root / "setup.cfg", "w") as f:
+ setupcfg.write(f)
+ print("~~~~~ setup.cfg ~~~~~")
+ print((root / "setup.cfg").read_text())
+
+
+def _run_build(path, *flags):
+ cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
+ return run(cmd, env={'DISTUTILS_DEBUG': ''})
+
+
+def _get_dist(dist_path, attrs):
+ root = "/".join(os.path.split(dist_path)) # POSIX-style
+
+ script = dist_path / 'setup.py'
+ if script.exists():
+ with _Path(dist_path):
+ dist = distutils.core.run_setup("setup.py", {}, stop_after="init")
+ else:
+ dist = Distribution(attrs)
+
+ dist.src_root = root
+ dist.script_name = "setup.py"
+ with _Path(dist_path):
+ dist.parse_config_files()
+
+ dist.set_defaults()
+ return dist
+
+
+def _run_sdist_programatically(dist_path, attrs):
+ dist = _get_dist(dist_path, attrs)
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+ assert cmd.distribution.packages or cmd.distribution.py_modules
+
+ with quiet(), _Path(dist_path):
+ cmd.run()
+
+ return dist, cmd
diff --git a/setuptools/tests/test_depends.py b/setuptools/tests/test_depends.py
index e0cfa88..bff1dfb 100644
--- a/setuptools/tests/test_depends.py
+++ b/setuptools/tests/test_depends.py
@@ -5,12 +5,12 @@ from setuptools import depends
class TestGetModuleConstant:
- def test_basic(self):
- """
- Invoke get_module_constant on a module in
- the test package.
- """
- mod_name = 'setuptools.tests.mod_with_constant'
- val = depends.get_module_constant(mod_name, 'value')
- assert val == 'three, sir!'
- assert 'setuptools.tests.mod_with_constant' not in sys.modules
+ def test_basic(self):
+ """
+ Invoke get_module_constant on a module in
+ the test package.
+ """
+ mod_name = 'setuptools.tests.mod_with_constant'
+ val = depends.get_module_constant(mod_name, 'value')
+ assert val == 'three, sir!'
+ assert 'setuptools.tests.mod_with_constant' not in sys.modules
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index 00d4bd9..c52072a 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -1,19 +1,16 @@
"""develop tests
"""
-from __future__ import absolute_import, unicode_literals
-
import os
-import site
import sys
-import io
import subprocess
import platform
+import pathlib
-from setuptools.extern import six
from setuptools.command import test
import pytest
+import pip_run.launch
from setuptools.command.develop import develop
from setuptools.dist import Distribution
@@ -25,7 +22,6 @@ from setuptools import setup
setup(name='foo',
packages=['foo'],
- use_2to3=True,
)
"""
@@ -33,7 +29,7 @@ INIT_PY = """print "foo"
"""
-@pytest.yield_fixture
+@pytest.fixture
def temp_user(monkeypatch):
with contexts.tempdir() as user_base:
with contexts.tempdir() as user_site:
@@ -42,7 +38,7 @@ def temp_user(monkeypatch):
yield
-@pytest.yield_fixture
+@pytest.fixture
def test_env(tmpdir, temp_user):
target = tmpdir
foo = target.mkdir('foo')
@@ -62,42 +58,6 @@ class TestDevelop:
in_virtualenv = hasattr(sys, 'real_prefix')
in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
- @pytest.mark.skipif(
- in_virtualenv or in_venv,
- reason="Cannot run when invoked in a virtualenv or venv")
- def test_2to3_user_mode(self, test_env):
- settings = dict(
- name='foo',
- packages=['foo'],
- use_2to3=True,
- version='0.0',
- )
- dist = Distribution(settings)
- dist.script_name = 'setup.py'
- cmd = develop(dist)
- cmd.user = 1
- cmd.ensure_finalized()
- cmd.install_dir = site.USER_SITE
- cmd.user = 1
- with contexts.quiet():
- cmd.run()
-
- # let's see if we got our egg link at the right place
- content = os.listdir(site.USER_SITE)
- content.sort()
- assert content == ['easy-install.pth', 'foo.egg-link']
-
- # Check that we are using the right code.
- fn = os.path.join(site.USER_SITE, 'foo.egg-link')
- with io.open(fn) as egg_link_file:
- path = egg_link_file.read().split()[0].strip()
- fn = os.path.join(path, 'foo', '__init__.py')
- with io.open(fn) as init_file:
- init = init_file.read().strip()
-
- expected = 'print("foo")' if six.PY3 else 'print "foo"'
- assert init == expected
-
def test_console_scripts(self, tmpdir):
"""
Test that console scripts are installed and that they reference
@@ -105,7 +65,8 @@ class TestDevelop:
"""
pytest.skip(
"TODO: needs a fixture to cause 'develop' "
- "to be invoked without mutating environment.")
+ "to be invoked without mutating environment."
+ )
settings = dict(
name='foo',
packages=['foo'],
@@ -131,6 +92,7 @@ class TestResolver:
of what _resolve_setup_path is intending to do. Come up with
more meaningful cases that look like real-world scenarios.
"""
+
def test_resolve_setup_path_cwd(self):
assert develop._resolve_setup_path('.', '.', '.') == '.'
@@ -142,7 +104,6 @@ class TestResolver:
class TestNamespaces:
-
@staticmethod
def install_develop(src_dir, target):
@@ -150,7 +111,8 @@ class TestNamespaces:
sys.executable,
'setup.py',
'develop',
- '--install-dir', str(target),
+ '--install-dir',
+ str(target),
]
with src_dir.as_cwd():
with test.test.paths_on_pythonpath([str(target)]):
@@ -161,7 +123,7 @@ class TestNamespaces:
reason="https://github.com/pypa/setuptools/issues/851",
)
@pytest.mark.skipif(
- platform.python_implementation() == 'PyPy' and six.PY3,
+ platform.python_implementation() == 'PyPy',
reason="https://github.com/pypa/setuptools/issues/1202",
)
def test_namespace_package_importable(self, tmpdir):
@@ -181,14 +143,16 @@ class TestNamespaces:
'pip',
'install',
str(pkg_A),
- '-t', str(target),
+ '-t',
+ str(target),
]
subprocess.check_call(install_cmd)
self.install_develop(pkg_B, target)
namespaces.make_site_dir(target)
try_import = [
sys.executable,
- '-c', 'import myns.pkgA; import myns.pkgB',
+ '-c',
+ 'import myns.pkgA; import myns.pkgB',
]
with test.test.paths_on_pythonpath([str(target)]):
subprocess.check_call(try_import)
@@ -196,7 +160,50 @@ class TestNamespaces:
# additionally ensure that pkg_resources import works
pkg_resources_imp = [
sys.executable,
- '-c', 'import pkg_resources',
+ '-c',
+ 'import pkg_resources',
]
with test.test.paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp)
+
+ @pytest.mark.xfail(
+ platform.python_implementation() == 'PyPy',
+ reason="Workaround fails on PyPy (why?)",
+ )
+ def test_editable_prefix(self, tmp_path, sample_project):
+ """
+ Editable install to a prefix should be discoverable.
+ """
+ prefix = tmp_path / 'prefix'
+
+ # figure out where pip will likely install the package
+ site_packages = prefix / next(
+ pathlib.Path(path).relative_to(sys.prefix)
+ for path in sys.path
+ if 'site-packages' in path and path.startswith(sys.prefix)
+ )
+ site_packages.mkdir(parents=True)
+
+ # install workaround
+ pip_run.launch.inject_sitecustomize(str(site_packages))
+
+ env = dict(os.environ, PYTHONPATH=str(site_packages))
+ cmd = [
+ sys.executable,
+ '-m',
+ 'pip',
+ 'install',
+ '--editable',
+ str(sample_project),
+ '--prefix',
+ str(prefix),
+ '--no-build-isolation',
+ ]
+ subprocess.check_call(cmd, env=env)
+
+ # now run 'sample' with the prefix on the PYTHONPATH
+ bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
+ exe = prefix / bin / 'sample'
+ if sys.version_info < (3, 8) and platform.system() == 'Windows':
+ exe = str(exe)
+ subprocess.check_call([exe], env=env)
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 5162e1c..e7d2f5c 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -1,15 +1,25 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import unicode_literals
-
import io
-
+import collections
+import re
+import functools
+import os
+import urllib.request
+import urllib.parse
+from distutils.errors import DistutilsSetupError
+from setuptools.dist import (
+ _get_unpatched,
+ check_package_data,
+ DistDeprecationWarning,
+ check_specifier,
+ rfc822_escape,
+ rfc822_unescape,
+)
+from setuptools import sic
from setuptools import Distribution
-from setuptools.extern.six.moves.urllib.request import pathname2url
-from setuptools.extern.six.moves.urllib_parse import urljoin
from .textwrap import DALS
from .test_easy_install import make_nspkg_sdist
+from .test_find_packages import ensure_files
import pytest
@@ -19,7 +29,8 @@ def test_dist_fetch_build_egg(tmpdir):
Check multiple calls to `Distribution.fetch_build_egg` work as expected.
"""
index = tmpdir.mkdir('index')
- index_url = urljoin('file://', pathname2url(str(index)))
+ index_url = urllib.parse.urljoin(
+ 'file://', urllib.request.pathname2url(str(index)))
def sdist_with_index(distname, version):
dist_dir = index.mkdir(distname)
@@ -56,6 +67,124 @@ def test_dist_fetch_build_egg(tmpdir):
assert [dist.key for dist in resolved_dists if dist] == reqs
+def test_dist__get_unpatched_deprecated():
+ pytest.warns(DistDeprecationWarning, _get_unpatched, [""])
+
+
+EXAMPLE_BASE_INFO = dict(
+ name="package",
+ version="0.0.1",
+ author="Foo Bar",
+ author_email="foo@bar.net",
+ long_description="Long\ndescription",
+ description="Short description",
+ keywords=["one", "two"],
+)
+
+
+def __read_test_cases():
+ base = EXAMPLE_BASE_INFO
+
+ params = functools.partial(dict, base)
+
+ test_cases = [
+ ('Metadata version 1.0', params()),
+ ('Metadata Version 1.0: Short long description', params(
+ long_description='Short long description',
+ )),
+ ('Metadata version 1.1: Classifiers', params(
+ classifiers=[
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.7',
+ 'License :: OSI Approved :: MIT License',
+ ],
+ )),
+ ('Metadata version 1.1: Download URL', params(
+ download_url='https://example.com',
+ )),
+ ('Metadata Version 1.2: Requires-Python', params(
+ python_requires='>=3.7',
+ )),
+ pytest.param(
+ 'Metadata Version 1.2: Project-Url',
+ params(project_urls=dict(Foo='https://example.bar')),
+ marks=pytest.mark.xfail(
+ reason="Issue #1578: project_urls not read",
+ ),
+ ),
+ ('Metadata Version 2.1: Long Description Content Type', params(
+ long_description_content_type='text/x-rst; charset=UTF-8',
+ )),
+ ('License', params(license='MIT', )),
+ ('License multiline', params(
+ license='This is a long license \nover multiple lines',
+ )),
+ pytest.param(
+ 'Metadata Version 2.1: Provides Extra',
+ params(provides_extras=['foo', 'bar']),
+ marks=pytest.mark.xfail(reason="provides_extras not read"),
+ ),
+ ('Missing author', dict(
+ name='foo',
+ version='1.0.0',
+ author_email='snorri@sturluson.name',
+ )),
+ ('Missing author e-mail', dict(
+ name='foo',
+ version='1.0.0',
+ author='Snorri Sturluson',
+ )),
+ ('Missing author and e-mail', dict(
+ name='foo',
+ version='1.0.0',
+ )),
+ ('Bypass normalized version', dict(
+ name='foo',
+ version=sic('1.0.0a'),
+ )),
+ ]
+
+ return test_cases
+
+
+@pytest.mark.parametrize('name,attrs', __read_test_cases())
+def test_read_metadata(name, attrs):
+ dist = Distribution(attrs)
+ metadata_out = dist.metadata
+ dist_class = metadata_out.__class__
+
+ # Write to PKG_INFO and then load into a new metadata object
+ PKG_INFO = io.StringIO()
+
+ metadata_out.write_pkg_file(PKG_INFO)
+
+ PKG_INFO.seek(0)
+ metadata_in = dist_class()
+ metadata_in.read_pkg_file(PKG_INFO)
+
+ tested_attrs = [
+ ('name', dist_class.get_name),
+ ('version', dist_class.get_version),
+ ('author', dist_class.get_contact),
+ ('author_email', dist_class.get_contact_email),
+ ('metadata_version', dist_class.get_metadata_version),
+ ('provides', dist_class.get_provides),
+ ('description', dist_class.get_description),
+ ('long_description', dist_class.get_long_description),
+ ('download_url', dist_class.get_download_url),
+ ('keywords', dist_class.get_keywords),
+ ('platforms', dist_class.get_platforms),
+ ('obsoletes', dist_class.get_obsoletes),
+ ('requires', dist_class.get_requires),
+ ('classifiers', dist_class.get_classifiers),
+ ('project_urls', lambda s: getattr(s, 'project_urls', {})),
+ ('provides_extras', lambda s: getattr(s, 'provides_extras', set())),
+ ]
+
+ for attr, getter in tested_attrs:
+ assert getter(metadata_in) == getter(metadata_out)
+
+
def __maintainer_test_cases():
attrs = {"name": "package",
"version": "1.0",
@@ -127,8 +256,8 @@ def test_maintainer_author(name, attrs, tmpdir):
with io.open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
raw_pkg_lines = f.readlines()
- # Drop blank lines
- pkg_lines = list(filter(None, raw_pkg_lines))
+ # Drop blank lines and strip lines from default description
+ pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
pkg_lines_set = set(pkg_lines)
@@ -143,3 +272,238 @@ def test_maintainer_author(name, attrs, tmpdir):
else:
line = '%s: %s' % (fkey, val)
assert line in pkg_lines_set
+
+
+def test_provides_extras_deterministic_order():
+ extras = collections.OrderedDict()
+ extras['a'] = ['foo']
+ extras['b'] = ['bar']
+ attrs = dict(extras_require=extras)
+ dist = Distribution(attrs)
+ assert dist.metadata.provides_extras == ['a', 'b']
+ attrs['extras_require'] = collections.OrderedDict(
+ reversed(list(attrs['extras_require'].items())))
+ dist = Distribution(attrs)
+ assert dist.metadata.provides_extras == ['b', 'a']
+
+
+CHECK_PACKAGE_DATA_TESTS = (
+ # Valid.
+ ({
+ '': ['*.txt', '*.rst'],
+ 'hello': ['*.msg'],
+ }, None),
+ # Not a dictionary.
+ ((
+ ('', ['*.txt', '*.rst']),
+ ('hello', ['*.msg']),
+ ), (
+ "'package_data' must be a dictionary mapping package"
+ " names to lists of string wildcard patterns"
+ )),
+ # Invalid key type.
+ ({
+ 400: ['*.txt', '*.rst'],
+ }, (
+ "keys of 'package_data' dict must be strings (got 400)"
+ )),
+ # Invalid value type.
+ ({
+ 'hello': str('*.msg'),
+ }, (
+ "\"values of 'package_data' dict\" "
+ "must be a list of strings (got '*.msg')"
+ )),
+ # Invalid value type (generators are single use)
+ ({
+ 'hello': (x for x in "generator"),
+ }, (
+ "\"values of 'package_data' dict\" must be a list of strings "
+ "(got <generator object"
+ )),
+)
+
+
+@pytest.mark.parametrize(
+ 'package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
+def test_check_package_data(package_data, expected_message):
+ if expected_message is None:
+ assert check_package_data(None, 'package_data', package_data) is None
+ else:
+ with pytest.raises(
+ DistutilsSetupError, match=re.escape(expected_message)):
+ check_package_data(None, str('package_data'), package_data)
+
+
+def test_check_specifier():
+ # valid specifier value
+ attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'}
+ dist = Distribution(attrs)
+ check_specifier(dist, attrs, attrs['python_requires'])
+
+ # invalid specifier value
+ attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']}
+ with pytest.raises(DistutilsSetupError):
+ dist = Distribution(attrs)
+
+
+@pytest.mark.parametrize(
+ 'content, result',
+ (
+ pytest.param(
+ "Just a single line",
+ None,
+ id="single_line",
+ ),
+ pytest.param(
+ "Multiline\nText\nwithout\nextra indents\n",
+ None,
+ id="multiline",
+ ),
+ pytest.param(
+ "Multiline\n With\n\nadditional\n indentation",
+ None,
+ id="multiline_with_indentation",
+ ),
+ pytest.param(
+ " Leading whitespace",
+ "Leading whitespace",
+ id="remove_leading_whitespace",
+ ),
+ pytest.param(
+ " Leading whitespace\nIn\n Multiline comment",
+ "Leading whitespace\nIn\n Multiline comment",
+ id="remove_leading_whitespace_multiline",
+ ),
+ )
+)
+def test_rfc822_unescape(content, result):
+ assert (result or content) == rfc822_unescape(rfc822_escape(content))
+
+
+def test_metadata_name():
+ with pytest.raises(DistutilsSetupError, match='missing.*name'):
+ Distribution()._validate_metadata()
+
+
+@pytest.mark.parametrize(
+ "dist_name, py_module",
+ [
+ ("my.pkg", "my_pkg"),
+ ("my-pkg", "my_pkg"),
+ ("my_pkg", "my_pkg"),
+ ("pkg", "pkg"),
+ ]
+)
+def test_dist_default_py_modules(tmp_path, dist_name, py_module):
+ (tmp_path / f"{py_module}.py").touch()
+
+ (tmp_path / "setup.py").touch()
+ (tmp_path / "noxfile.py").touch()
+ # ^-- make sure common tool files are ignored
+
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "name": dist_name,
+ "src_root": str(tmp_path)
+ }
+ # Find `py_modules` corresponding to dist_name if not given
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert dist.py_modules == [py_module]
+ # When `py_modules` is given, don't do anything
+ dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
+ dist.set_defaults()
+ assert dist.py_modules == ["explicity_py_module"]
+ # When `packages` is given, don't do anything
+ dist = Distribution({**attrs, "packages": ["explicity_package"]})
+ dist.set_defaults()
+ assert not dist.py_modules
+
+
+@pytest.mark.parametrize(
+ "dist_name, package_dir, package_files, packages",
+ [
+ ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
+ ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
+ (
+ "my_pkg",
+ None,
+ ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
+ ["my_pkg", "my_pkg2"]
+ ),
+ (
+ "my_pkg",
+ {"pkg": "lib", "pkg2": "lib2"},
+ ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+ ["pkg", "pkg.nested", "pkg2"]
+ ),
+ ]
+)
+def test_dist_default_packages(
+ tmp_path, dist_name, package_dir, package_files, packages
+):
+ ensure_files(tmp_path, package_files)
+
+ (tmp_path / "setup.py").touch()
+ (tmp_path / "noxfile.py").touch()
+ # ^-- should not be included by default
+
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "name": dist_name,
+ "src_root": str(tmp_path),
+ "package_dir": package_dir
+ }
+ # Find `packages` either corresponding to dist_name or inside src
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert not dist.py_modules
+ assert not dist.py_modules
+ assert set(dist.packages) == set(packages)
+ # When `py_modules` is given, don't do anything
+ dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
+ dist.set_defaults()
+ assert not dist.packages
+ assert set(dist.py_modules) == {"explicit_py_module"}
+ # When `packages` is given, don't do anything
+ dist = Distribution({**attrs, "packages": ["explicit_package"]})
+ dist.set_defaults()
+ assert not dist.py_modules
+ assert set(dist.packages) == {"explicit_package"}
+
+
+@pytest.mark.parametrize(
+ "dist_name, package_dir, package_files",
+ [
+ ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
+ ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
+ ("my_pkg", None, ["my_pkg.py"]),
+ ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
+ ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
+ (
+ "my_pkg",
+ {"my_pkg": "lib", "my_pkg.lib2": "lib2"},
+ ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
+ ),
+ # Should not try to guess a name from multiple py_modules/packages
+ ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
+ ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
+ ]
+)
+def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
+ """Make sure dist.name is discovered from packages/py_modules"""
+ ensure_files(tmp_path, package_files)
+ attrs = {
+ **EXAMPLE_BASE_INFO,
+ "src_root": "/".join(os.path.split(tmp_path)), # POSIX-style
+ "package_dir": package_dir
+ }
+ del attrs["name"]
+
+ dist = Distribution(attrs)
+ dist.set_defaults()
+ assert dist.py_modules or dist.packages
+ assert dist.get_name() == dist_name
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index f7e7d2b..29fbd09 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -1,10 +1,6 @@
"""Test .dist-info style distributions.
"""
-from __future__ import unicode_literals
-
-from setuptools.extern.six.moves import map
-
import pytest
import pkg_resources
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
new file mode 100644
index 0000000..df8f354
--- /dev/null
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -0,0 +1,158 @@
+import os
+import sys
+import functools
+import platform
+import textwrap
+
+import pytest
+
+
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
+
+def popen_text(call):
+ """
+ Augment the Popen call with the parameters to ensure unicode text.
+ """
+ return functools.partial(call, universal_newlines=True) \
+ if sys.version_info < (3, 7) else functools.partial(call, text=True)
+
+
+def win_sr(env):
+ """
+ On Windows, SYSTEMROOT must be present to avoid
+
+ > Fatal Python error: _Py_HashRandomization_Init: failed to
+ > get random numbers to initialize Python
+ """
+ if env is None:
+ return
+ if platform.system() == 'Windows':
+ env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
+ return env
+
+
+def find_distutils(venv, imports='distutils', env=None, **kwargs):
+ py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals())
+ cmd = ['python', '-c', py_cmd]
+ return popen_text(venv.run)(cmd, env=win_sr(env), **kwargs)
+
+
+def count_meta_path(venv, env=None):
+ py_cmd = textwrap.dedent(
+ """
+ import sys
+ is_distutils = lambda finder: finder.__class__.__name__ == "DistutilsMetaFinder"
+ print(len(list(filter(is_distutils, sys.meta_path))))
+ """)
+ cmd = ['python', '-c', py_cmd]
+ return int(popen_text(venv.run)(cmd, env=win_sr(env)))
+
+
+def test_distutils_stdlib(venv):
+ """
+ Ensure stdlib distutils is used when appropriate.
+ """
+ env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib')
+ assert venv.name not in find_distutils(venv, env=env).split(os.sep)
+ assert count_meta_path(venv, env=env) == 0
+
+
+def test_distutils_local_with_setuptools(venv):
+ """
+ Ensure local distutils is used when appropriate.
+ """
+ env = dict(SETUPTOOLS_USE_DISTUTILS='local')
+ loc = find_distutils(venv, imports='setuptools, distutils', env=env)
+ assert venv.name in loc.split(os.sep)
+ assert count_meta_path(venv, env=env) <= 1
+
+
+@pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup')
+def test_distutils_local(venv):
+ """
+ Even without importing, the setuptools-local copy of distutils is
+ preferred.
+ """
+ env = dict(SETUPTOOLS_USE_DISTUTILS='local')
+ assert venv.name in find_distutils(venv, env=env).split(os.sep)
+ assert count_meta_path(venv, env=env) <= 1
+
+
+def test_pip_import(venv):
+ """
+ Ensure pip can be imported.
+ Regression test for #3002.
+ """
+ cmd = ['python', '-c', 'import pip']
+ popen_text(venv.run)(cmd)
+
+
+def test_distutils_has_origin():
+ """
+ Distutils module spec should have an origin. #2990.
+ """
+ assert __import__('distutils').__spec__.origin
+
+
+ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r"""
+# Depending on the importlib machinery and _distutils_hack, some imports are
+# duplicated resulting in different module objects being loaded, which prevents
+# patches as shown in #3042.
+# This script provides a way of verifying if this duplication is happening.
+
+from distutils import cmd
+import distutils.command.sdist as sdist
+
+# import last to prevent caching
+from distutils import {imported_module}
+
+for mod in (cmd, sdist):
+ assert mod.{imported_module} == {imported_module}, (
+ f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}"
+ )
+
+print("success")
+"""
+
+
+@pytest.mark.parametrize(
+ "distutils_version, imported_module",
+ [
+ ("stdlib", "dir_util"),
+ ("stdlib", "file_util"),
+ ("stdlib", "archive_util"),
+ ("local", "dir_util"),
+ ("local", "file_util"),
+ ("local", "archive_util"),
+ ]
+)
+def test_modules_are_not_duplicated_on_import(
+ distutils_version, imported_module, tmpdir_cwd, venv
+):
+ env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
+ script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module)
+ cmd = ['python', '-c', script]
+ output = popen_text(venv.run)(cmd, env=win_sr(env)).strip()
+ assert output == "success"
+
+
+ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r"""
+# Similar to ENSURE_IMPORTS_ARE_NOT_DUPLICATED
+import distutils.dist as dist
+from distutils import log
+
+assert dist.log == log, (
+ f"\n{dist.log}\n!=\n{log}"
+)
+
+print("success")
+"""
+
+
+@pytest.mark.parametrize("distutils_version", "local stdlib".split())
+def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv):
+ env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
+ cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
+ output = popen_text(venv.run)(cmd, env=win_sr(env)).strip()
+ assert output == "success"
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 57339c8..5831b26 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1,7 +1,5 @@
-# -*- coding: utf-8 -*-
"""Easy install Tests
"""
-from __future__ import absolute_import
import sys
import os
@@ -15,21 +13,27 @@ import distutils.errors
import io
import zipfile
import mock
-
import time
-from setuptools.extern.six.moves import urllib
+import re
+import subprocess
+import pathlib
+import warnings
+from collections import namedtuple
import pytest
+from jaraco import path
from setuptools import sandbox
from setuptools.sandbox import run_setup
import setuptools.command.easy_install as ei
-from setuptools.command.easy_install import PthDistributions
-from setuptools.command import easy_install as easy_install_pkg
+from setuptools.command.easy_install import (
+ EasyInstallDeprecationWarning, ScriptWriter, PthDistributions,
+ WindowsScriptWriter,
+)
from setuptools.dist import Distribution
from pkg_resources import normalize_path, working_set
from pkg_resources import Distribution as PRDistribution
-import setuptools.tests.server
+from setuptools.tests.server import MockServer, path_to_url
from setuptools.tests import fail_on_ascii
import pkg_resources
@@ -37,11 +41,21 @@ from . import contexts
from .textwrap import DALS
-class FakeDist(object):
+@pytest.fixture(autouse=True)
+def pip_disable_index(monkeypatch):
+ """
+ Important: Disable the default index for pip to avoid
+ querying packages in the index and potentially resolving
+ and installing packages there.
+ """
+ monkeypatch.setenv('PIP_NO_INDEX', 'true')
+
+
+class FakeDist:
def get_entry_map(self, group):
if group != 'console_scripts':
return {}
- return {'name': 'ep'}
+ return {str('name'): 'ep'}
def as_requirement(self):
return 'spec'
@@ -50,40 +64,22 @@ class FakeDist(object):
SETUP_PY = DALS("""
from setuptools import setup
- setup(name='foo')
+ setup()
""")
class TestEasyInstallTest:
- def test_install_site_py(self, tmpdir):
- dist = Distribution()
- cmd = ei.easy_install(dist)
- cmd.sitepy_installed = False
- cmd.install_dir = str(tmpdir)
- cmd.install_site_py()
- assert (tmpdir / 'site.py').exists()
-
def test_get_script_args(self):
header = ei.CommandSpec.best().from_environment().as_header()
- expected = header + DALS(r"""
- # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
- __requires__ = 'spec'
- import re
- import sys
- from pkg_resources import load_entry_point
-
- if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
- sys.exit(
- load_entry_point('spec', 'console_scripts', 'name')()
- )
- """)
dist = FakeDist()
-
args = next(ei.ScriptWriter.get_args(dist))
name, script = itertools.islice(args, 2)
-
- assert script == expected
+ assert script.startswith(header)
+ assert "'spec'" in script
+ assert "'console_scripts'" in script
+ assert "'name'" in script
+ assert re.search(
+ '^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE)
def test_no_find_links(self):
# new option '--no-find-links', that blocks find-links added at
@@ -124,7 +120,9 @@ class TestEasyInstallTest:
site.getsitepackages.
"""
path = normalize_path('/setuptools/test/site-packages')
- mock_gsp = lambda: [path]
+
+ def mock_gsp():
+ return [path]
monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False)
assert path in ei.get_site_dirs()
@@ -152,7 +150,7 @@ class TestEasyInstallTest:
"",
),
(
- u'mypkg/\u2603.txt',
+ 'mypkg/☃.txt',
"",
),
]
@@ -167,7 +165,8 @@ class TestEasyInstallTest:
return str(sdist)
@fail_on_ascii
- def test_unicode_filename_in_sdist(self, sdist_unicode, tmpdir, monkeypatch):
+ def test_unicode_filename_in_sdist(
+ self, sdist_unicode, tmpdir, monkeypatch):
"""
The install command should execute correctly even if
the package has unicode filenames.
@@ -184,6 +183,59 @@ class TestEasyInstallTest:
cmd.easy_install(sdist_unicode)
@pytest.fixture
+ def sdist_unicode_in_script(self, tmpdir):
+ files = [
+ (
+ "setup.py",
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name="setuptools-test-unicode",
+ version="1.0",
+ packages=["mypkg"],
+ include_package_data=True,
+ scripts=['mypkg/unicode_in_script'],
+ )
+ """),
+ ),
+ ("mypkg/__init__.py", ""),
+ (
+ "mypkg/unicode_in_script",
+ DALS(
+ """
+ #!/bin/sh
+ # á
+
+ non_python_fn() {
+ }
+ """),
+ ),
+ ]
+ sdist_name = "setuptools-test-unicode-script-1.0.zip"
+ sdist = tmpdir / sdist_name
+ # can't use make_sdist, because the issue only occurs
+ # with zip sdists.
+ sdist_zip = zipfile.ZipFile(str(sdist), "w")
+ for filename, content in files:
+ sdist_zip.writestr(filename, content.encode('utf-8'))
+ sdist_zip.close()
+ return str(sdist)
+
+ @fail_on_ascii
+ def test_unicode_content_in_sdist(
+ self, sdist_unicode_in_script, tmpdir, monkeypatch):
+ """
+ The install command should execute correctly even if
+ the package has unicode in scripts.
+ """
+ dist = Distribution({"script_args": ["easy_install"]})
+ target = (tmpdir / "target").ensure_dir()
+ cmd = ei.easy_install(dist, install_dir=str(target), args=["x"])
+ monkeypatch.setitem(os.environ, "PYTHONPATH", str(target))
+ cmd.ensure_finalized()
+ cmd.easy_install(sdist_unicode_in_script)
+
+ @pytest.fixture
def sdist_script(self, tmpdir):
files = [
(
@@ -198,7 +250,7 @@ class TestEasyInstallTest:
"""),
),
(
- u'mypkg_script',
+ 'mypkg_script',
DALS("""
#/usr/bin/python
print('mypkg_script')
@@ -228,7 +280,24 @@ class TestEasyInstallTest:
cmd.easy_install(sdist_script)
assert (target / 'mypkg_script').exists()
+ def test_dist_get_script_args_deprecated(self):
+ with pytest.warns(EasyInstallDeprecationWarning):
+ ScriptWriter.get_script_args(None, None)
+
+ def test_dist_get_script_header_deprecated(self):
+ with pytest.warns(EasyInstallDeprecationWarning):
+ ScriptWriter.get_script_header("")
+ def test_dist_get_writer_deprecated(self):
+ with pytest.warns(EasyInstallDeprecationWarning):
+ ScriptWriter.get_writer(None)
+
+ def test_dist_WindowsScriptWriter_get_writer_deprecated(self):
+ with pytest.warns(EasyInstallDeprecationWarning):
+ WindowsScriptWriter.get_writer()
+
+
+@pytest.mark.filterwarnings('ignore:Unbuilt egg')
class TestPTHFileWriter:
def test_add_from_cwd_site_sets_dirty(self):
'''a pth file manager should set dirty
@@ -249,7 +318,7 @@ class TestPTHFileWriter:
assert not pth.dirty
-@pytest.yield_fixture
+@pytest.fixture
def setup_context(tmpdir):
with (tmpdir / 'setup.py').open('w') as f:
f.write(SETUP_PY)
@@ -305,7 +374,7 @@ class TestUserInstallTest:
f.write('Name: foo\n')
return str(tmpdir)
- @pytest.yield_fixture()
+ @pytest.fixture()
def install_target(self, tmpdir):
target = str(tmpdir)
with mock.patch('sys.path', sys.path + [target]):
@@ -350,7 +419,7 @@ class TestUserInstallTest:
)
-@pytest.yield_fixture
+@pytest.fixture
def distutils_package():
distutils_setup_py = SETUP_PY.replace(
'from setuptools import setup',
@@ -362,48 +431,51 @@ def distutils_package():
yield
+@pytest.fixture
+def mock_index():
+ # set up a server which will simulate an alternate package index.
+ p_index = MockServer()
+ if p_index.server_port == 0:
+ # Some platforms (Jython) don't find a port to which to bind,
+ # so skip test for them.
+ pytest.skip("could not find a valid port")
+ p_index.start()
+ return p_index
+
+
class TestDistutilsPackage:
def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
run_setup('setup.py', ['bdist_egg'])
class TestSetupRequires:
- def test_setup_requires_honors_fetch_params(self):
+
+ def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
"""
When easy_install installs a source distribution which specifies
setup_requires, it should honor the fetch parameters (such as
- allow-hosts, index-url, and find-links).
+ index-url, and find-links).
"""
- # set up a server which will simulate an alternate package index.
- p_index = setuptools.tests.server.MockServer()
- p_index.start()
- netloc = 1
- p_index_loc = urllib.parse.urlparse(p_index.url)[netloc]
- if p_index_loc.endswith(':0'):
- # Some platforms (Jython) don't find a port to which to bind,
- # so skip this test for them.
- return
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ monkeypatch.setenv('PIP_NO_INDEX', 'false')
with contexts.quiet():
# create an sdist that has a build-time dependency.
with TestSetupRequires.create_sdist() as dist_file:
with contexts.tempdir() as temp_install_dir:
with contexts.environment(PYTHONPATH=temp_install_dir):
- ei_params = [
- '--index-url', p_index.url,
- '--allow-hosts', p_index_loc,
+ cmd = [
+ sys.executable,
+ '-m', 'setup',
+ 'easy_install',
+ '--index-url', mock_index.url,
'--exclude-scripts',
'--install-dir', temp_install_dir,
dist_file,
]
- with sandbox.save_argv(['easy_install']):
- # attempt to install the dist. It should fail because
- # it doesn't exist.
- with pytest.raises(SystemExit):
- easy_install_pkg.main(ei_params)
- # there should have been two or three requests to the server
- # (three happens on Python 3.3a)
- assert 2 <= len(p_index.requests) <= 3
- assert p_index.requests[0].path == '/does-not-exist/'
+ subprocess.Popen(cmd).wait()
+ # there should have been one requests to the server
+ assert [r.path for r in mock_index.requests] == ['/does-not-exist/']
@staticmethod
@contextlib.contextmanager
@@ -422,7 +494,9 @@ class TestSetupRequires:
version="1.0",
setup_requires = ['does-not-exist'],
)
- """))])
+ """)),
+ ('setup.cfg', ''),
+ ])
yield dist_path
use_setup_cfg = (
@@ -449,12 +523,13 @@ class TestSetupRequires:
with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir:
- test_pkg = create_setup_requires_package(temp_dir, use_setup_cfg=use_setup_cfg)
+ test_pkg = create_setup_requires_package(
+ temp_dir, use_setup_cfg=use_setup_cfg)
test_setup_py = os.path.join(test_pkg, 'setup.py')
with contexts.quiet() as (stdout, stderr):
# Don't even need to install the package, just
# running the setup.py at all is sufficient
- run_setup(test_setup_py, ['--name'])
+ run_setup(test_setup_py, [str('--name')])
lines = stdout.readlines()
assert len(lines) > 0
@@ -508,9 +583,10 @@ class TestSetupRequires:
try:
# Don't even need to install the package, just
# running the setup.py at all is sufficient
- run_setup(test_setup_py, ['--name'])
+ run_setup(test_setup_py, [str('--name')])
except pkg_resources.VersionConflict:
- self.fail('Installing setup.py requirements '
+ self.fail(
+ 'Installing setup.py requirements '
'caused a VersionConflict')
assert 'FAIL' not in stdout.getvalue()
@@ -521,35 +597,236 @@ class TestSetupRequires:
@pytest.mark.parametrize('use_setup_cfg', use_setup_cfg)
def test_setup_requires_with_attr_version(self, use_setup_cfg):
def make_dependency_sdist(dist_path, distname, version):
- make_sdist(dist_path, [
- ('setup.py',
- DALS("""
- import setuptools
- setuptools.setup(
- name={name!r},
- version={version!r},
- py_modules=[{name!r}],
- )
- """.format(name=distname, version=version))),
- (distname + '.py',
- DALS("""
- version = 42
- """
- ))])
+ files = [(
+ 'setup.py',
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name={name!r},
+ version={version!r},
+ py_modules=[{name!r}],
+ )
+ """.format(name=distname, version=version)),
+ ), (
+ distname + '.py',
+ DALS("""
+ version = 42
+ """),
+ )]
+ make_sdist(dist_path, files)
with contexts.save_pkg_resources_state():
with contexts.tempdir() as temp_dir:
test_pkg = create_setup_requires_package(
temp_dir, setup_attrs=dict(version='attr: foobar.version'),
make_package=make_dependency_sdist,
- use_setup_cfg=use_setup_cfg+('version',),
+ use_setup_cfg=use_setup_cfg + ('version',),
)
test_setup_py = os.path.join(test_pkg, 'setup.py')
with contexts.quiet() as (stdout, stderr):
- run_setup(test_setup_py, ['--version'])
+ run_setup(test_setup_py, [str('--version')])
lines = stdout.readlines()
assert len(lines) > 0
assert lines[-1].strip() == '42'
+ def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch):
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ monkeypatch.setenv('PIP_NO_INDEX', 'false')
+ monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ test_pkg = create_setup_requires_package(
+ temp_dir, 'python-xlib', '0.19',
+ setup_attrs=dict(dependency_links=[]))
+ test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+ with open(test_setup_cfg, 'w') as fp:
+ fp.write(DALS(
+ '''
+ [easy_install]
+ index_url = https://pypi.org/legacy/
+ '''))
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ with pytest.raises(distutils.errors.DistutilsError):
+ run_setup(test_setup_py, [str('--version')])
+ assert len(mock_index.requests) == 1
+ assert mock_index.requests[0].path == '/python-xlib/'
+
+ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ dep_sdist = os.path.join(temp_dir, 'dep.tar.gz')
+ make_trivial_sdist(dep_sdist, 'dependency', '42')
+ dep_url = path_to_url(dep_sdist, authority='localhost')
+ test_pkg = create_setup_requires_package(
+ temp_dir,
+ # Ignored (overridden by setup_attrs)
+ 'python-xlib', '0.19',
+ setup_attrs=dict(
+ setup_requires='dependency @ %s' % dep_url))
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ run_setup(test_setup_py, [str('--version')])
+ assert len(mock_index.requests) == 0
+
+ def test_setup_requires_with_allow_hosts(self, mock_index):
+ ''' The `allow-hosts` option in not supported anymore. '''
+ files = {
+ 'test_pkg': {
+ 'setup.py': DALS('''
+ from setuptools import setup
+ setup(setup_requires='python-xlib')
+ '''),
+ 'setup.cfg': DALS('''
+ [easy_install]
+ allow_hosts = *
+ '''),
+ }
+ }
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ path.build(files, prefix=temp_dir)
+ setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py'))
+ with pytest.raises(distutils.errors.DistutilsError):
+ run_setup(setup_py, [str('--version')])
+ assert len(mock_index.requests) == 0
+
+ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
+ ''' Check `python_requires` is honored. '''
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
+ monkeypatch.setenv(str('PIP_VERBOSE'), str('1'))
+ dep_1_0_sdist = 'dep-1.0.tar.gz'
+ dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
+ dep_1_0_python_requires = '>=2.7'
+ make_python_requires_sdist(
+ str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
+ dep_2_0_sdist = 'dep-2.0.tar.gz'
+ dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
+ dep_2_0_python_requires = '!=' + '.'.join(
+ map(str, sys.version_info[:2])) + '.*'
+ make_python_requires_sdist(
+ str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
+ index = tmpdir / 'index.html'
+ index.write_text(DALS(
+ '''
+ <!DOCTYPE html>
+ <html><head><title>Links for dep</title></head>
+ <body>
+ <h1>Links for dep</h1>
+ <a href="{dep_1_0_url}" data-requires-python="{dep_1_0_python_requires}">{dep_1_0_sdist}</a><br/>
+ <a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/>
+ </body>
+ </html>
+ ''').format( # noqa
+ dep_1_0_url=dep_1_0_url,
+ dep_1_0_sdist=dep_1_0_sdist,
+ dep_1_0_python_requires=dep_1_0_python_requires,
+ dep_2_0_url=dep_2_0_url,
+ dep_2_0_sdist=dep_2_0_sdist,
+ dep_2_0_python_requires=dep_2_0_python_requires,
+ ), 'utf-8')
+ index_url = path_to_url(str(index))
+ with contexts.save_pkg_resources_state():
+ test_pkg = create_setup_requires_package(
+ str(tmpdir),
+ 'python-xlib', '0.19', # Ignored (overridden by setup_attrs).
+ setup_attrs=dict(
+ setup_requires='dep', dependency_links=[index_url]))
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ run_setup(test_setup_py, [str('--version')])
+ eggs = list(map(str, pkg_resources.find_distributions(
+ os.path.join(test_pkg, '.eggs'))))
+ assert eggs == ['dep 1.0']
+
+ @pytest.mark.parametrize(
+ 'with_dependency_links_in_setup_py',
+ (False, True))
+ def test_setup_requires_with_find_links_in_setup_cfg(
+ self, monkeypatch,
+ with_dependency_links_in_setup_py):
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ make_trivial_sdist(
+ os.path.join(temp_dir, 'python-xlib-42.tar.gz'),
+ 'python-xlib',
+ '42')
+ test_pkg = os.path.join(temp_dir, 'test_pkg')
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+ os.mkdir(test_pkg)
+ with open(test_setup_py, 'w') as fp:
+ if with_dependency_links_in_setup_py:
+ dependency_links = [os.path.join(temp_dir, 'links')]
+ else:
+ dependency_links = []
+ fp.write(DALS(
+ '''
+ from setuptools import installer, setup
+ setup(setup_requires='python-xlib==42',
+ dependency_links={dependency_links!r})
+ ''').format(
+ dependency_links=dependency_links))
+ with open(test_setup_cfg, 'w') as fp:
+ fp.write(DALS(
+ '''
+ [easy_install]
+ index_url = {index_url}
+ find_links = {find_links}
+ ''').format(index_url=os.path.join(temp_dir, 'index'),
+ find_links=temp_dir))
+ run_setup(test_setup_py, [str('--version')])
+
+ def test_setup_requires_with_transitive_extra_dependency(
+ self, monkeypatch):
+ # Use case: installing a package with a build dependency on
+ # an already installed `dep[extra]`, which in turn depends
+ # on `extra_dep` (whose is not already installed).
+ with contexts.save_pkg_resources_state():
+ with contexts.tempdir() as temp_dir:
+ # Create source distribution for `extra_dep`.
+ make_trivial_sdist(
+ os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'),
+ 'extra_dep', '1.0')
+ # Create source tree for `dep`.
+ dep_pkg = os.path.join(temp_dir, 'dep')
+ os.mkdir(dep_pkg)
+ path.build({
+ 'setup.py':
+ DALS("""
+ import setuptools
+ setuptools.setup(
+ name='dep', version='2.0',
+ extras_require={'extra': ['extra_dep']},
+ )
+ """),
+ 'setup.cfg': '',
+ }, prefix=dep_pkg)
+ # "Install" dep.
+ run_setup(
+ os.path.join(dep_pkg, 'setup.py'), [str('dist_info')])
+ working_set.add_entry(dep_pkg)
+ # Create source tree for test package.
+ test_pkg = os.path.join(temp_dir, 'test_pkg')
+ test_setup_py = os.path.join(test_pkg, 'setup.py')
+ os.mkdir(test_pkg)
+ with open(test_setup_py, 'w') as fp:
+ fp.write(DALS(
+ '''
+ from setuptools import installer, setup
+ setup(setup_requires='dep[extra]')
+ '''))
+ # Check...
+ monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir))
+ monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
+ monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+ monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+ run_setup(test_setup_py, [str('--version')])
+
def make_trivial_sdist(dist_path, distname, version):
"""
@@ -565,7 +842,9 @@ def make_trivial_sdist(dist_path, distname, version):
name=%r,
version=%r
)
- """ % (distname, version)))])
+ """ % (distname, version))),
+ ('setup.cfg', ''),
+ ])
def make_nspkg_sdist(dist_path, distname, version):
@@ -601,12 +880,32 @@ def make_nspkg_sdist(dist_path, distname, version):
make_sdist(dist_path, files)
+def make_python_requires_sdist(dist_path, distname, version, python_requires):
+ make_sdist(dist_path, [
+ (
+ 'setup.py',
+ DALS("""\
+ import setuptools
+ setuptools.setup(
+ name={name!r},
+ version={version!r},
+ python_requires={python_requires!r},
+ )
+ """).format(
+ name=distname, version=version,
+ python_requires=python_requires)),
+ ('setup.cfg', ''),
+ ])
+
+
def make_sdist(dist_path, files):
"""
Create a simple sdist tarball at dist_path, containing the files
listed in ``files`` as ``(filename, content)`` tuples.
"""
+ # Distributions with only one file don't play well with pip.
+ assert len(files) > 1
with tarfile.open(dist_path, 'w:gz') as dist:
for filename, content in files:
file_bytes = io.BytesIO(content.encode('utf-8'))
@@ -639,8 +938,8 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
test_pkg = os.path.join(path, 'test_pkg')
os.mkdir(test_pkg)
+ # setup.cfg
if use_setup_cfg:
- test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
options = []
metadata = []
for name in use_setup_cfg:
@@ -652,28 +951,29 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
if isinstance(value, (tuple, list)):
value = ';'.join(value)
section.append('%s: %s' % (name, value))
- with open(test_setup_cfg, 'w') as f:
- f.write(DALS(
- """
- [metadata]
- {metadata}
- [options]
- {options}
- """
- ).format(
- options='\n'.join(options),
- metadata='\n'.join(metadata),
- ))
-
- test_setup_py = os.path.join(test_pkg, 'setup.py')
+ test_setup_cfg_contents = DALS(
+ """
+ [metadata]
+ {metadata}
+ [options]
+ {options}
+ """
+ ).format(
+ options='\n'.join(options),
+ metadata='\n'.join(metadata),
+ )
+ else:
+ test_setup_cfg_contents = ''
+ with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f:
+ f.write(test_setup_cfg_contents)
+ # setup.py
if setup_py_template is None:
setup_py_template = DALS("""\
import setuptools
setuptools.setup(**%r)
""")
-
- with open(test_setup_py, 'w') as f:
+ with open(os.path.join(test_pkg, 'setup.py'), 'w') as f:
f.write(setup_py_template % test_setup_attrs)
foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
@@ -688,27 +988,29 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
)
class TestScriptHeader:
non_ascii_exe = '/Users/José/bin/python'
- exe_with_spaces = r'C:\Program Files\Python33\python.exe'
+ exe_with_spaces = r'C:\Program Files\Python36\python.exe'
def test_get_script_header(self):
expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
- actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python')
+ actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python')
assert actual == expected
def test_get_script_header_args(self):
- expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath
- (sys.executable))
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x')
+ expected = '#!%s -x\n' % ei.nt_quote_arg(
+ os.path.normpath(sys.executable))
+ actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x')
assert actual == expected
def test_get_script_header_non_ascii_exe(self):
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
+ actual = ei.ScriptWriter.get_header(
+ '#!/usr/bin/python',
executable=self.non_ascii_exe)
- expected = '#!%s -x\n' % self.non_ascii_exe
+ expected = str('#!%s -x\n') % self.non_ascii_exe
assert actual == expected
def test_get_script_header_exe_with_spaces(self):
- actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
+ actual = ei.ScriptWriter.get_header(
+ '#!/usr/bin/python',
executable='"' + self.exe_with_spaces + '"')
expected = '#!"%s"\n' % self.exe_with_spaces
assert actual == expected
@@ -751,10 +1053,59 @@ class TestCommandSpec:
class TestWindowsScriptWriter:
def test_header(self):
- hdr = ei.WindowsScriptWriter.get_script_header('')
+ hdr = ei.WindowsScriptWriter.get_header('')
assert hdr.startswith('#!')
assert hdr.endswith('\n')
hdr = hdr.lstrip('#!')
hdr = hdr.rstrip('\n')
# header should not start with an escaped quote
assert not hdr.startswith('\\"')
+
+
+VersionStub = namedtuple("VersionStub", "major, minor, micro, releaselevel, serial")
+
+
+def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
+ # In issue #3001, easy_install wrongly uses the `python3.1` directory
+ # when the interpreter is `python3.10` and the `--user` option is given.
+ # See pypa/setuptools#3001.
+ dist = Distribution()
+ cmd = dist.get_command_obj('easy_install')
+ cmd.args = ['ok']
+ cmd.optimize = 0
+ cmd.user = True
+ cmd.install_userbase = str(tmpdir)
+ cmd.install_usersite = None
+ install_cmd = dist.get_command_obj('install')
+ install_cmd.install_userbase = str(tmpdir)
+ install_cmd.install_usersite = None
+
+ with monkeypatch.context() as patch, warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ version = '3.10.1 (main, Dec 21 2021, 09:17:12) [GCC 10.2.1 20210110]'
+ info = VersionStub(3, 10, 1, "final", 0)
+ patch.setattr('site.ENABLE_USER_SITE', True)
+ patch.setattr('sys.version', version)
+ patch.setattr('sys.version_info', info)
+ patch.setattr(cmd, 'create_home_path', mock.Mock())
+ cmd.finalize_options()
+
+ name = "pypy" if hasattr(sys, 'pypy_version_info') else "python"
+ install_dir = cmd.install_dir.lower()
+
+ # In some platforms (e.g. Windows), install_dir is mostly determined
+ # via `sysconfig`, which define constants eagerly at module creation.
+ # This means that monkeypatching `sys.version` to emulate 3.10 for testing
+ # may have no effect.
+ # The safest test here is to rely on the fact that 3.1 is no longer
+ # supported/tested, and make sure that if 'python3.1' ever appears in the string
+ # it is followed by another digit (e.g. 'python3.10').
+ if re.search(name + r'3\.?1', install_dir):
+ assert re.search(name + r'3\.?1\d', install_dir)
+
+ # The following "variables" are used for interpolation in distutils
+ # installation schemes, so it should be fair to treat them as "semi-public",
+ # or at least public enough so we can have a test to make sure they are correct
+ assert cmd.config_vars['py_version'] == '3.10.1'
+ assert cmd.config_vars['py_version_short'] == '3.10'
+ assert cmd.config_vars['py_version_nodot'] == '310'
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
new file mode 100644
index 0000000..aac4f5e
--- /dev/null
+++ b/setuptools/tests/test_editable_install.py
@@ -0,0 +1,113 @@
+import subprocess
+from textwrap import dedent
+
+import pytest
+import jaraco.envs
+import path
+
+
+@pytest.fixture
+def venv(tmp_path, setuptools_wheel):
+ env = jaraco.envs.VirtualEnv()
+ vars(env).update(
+ root=path.Path(tmp_path), # workaround for error on windows
+ name=".venv",
+ create_opts=["--no-setuptools"],
+ req=str(setuptools_wheel),
+ )
+ return env.create()
+
+
+EXAMPLE = {
+ 'pyproject.toml': dedent("""\
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "mypkg"
+ version = "3.14159"
+ license = {text = "MIT"}
+ description = "This is a Python package"
+ dynamic = ["readme"]
+ classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers"
+ ]
+ urls = {Homepage = "http://github.com"}
+ dependencies = ['importlib-metadata; python_version<"3.8"']
+
+ [tool.setuptools]
+ package-dir = {"" = "src"}
+ packages = {find = {where = ["src"]}}
+ license-files = ["LICENSE*"]
+
+ [tool.setuptools.dynamic]
+ readme = {file = "README.rst"}
+
+ [tool.distutils.egg_info]
+ tag-build = ".post0"
+ """),
+ "MANIFEST.in": dedent("""\
+ global-include *.py *.txt
+ global-exclude *.py[cod]
+ """).strip(),
+ "README.rst": "This is a ``README``",
+ "LICENSE.txt": "---- placeholder MIT license ----",
+ "src": {
+ "mypkg": {
+ "__init__.py": dedent("""\
+ import sys
+
+ if sys.version_info[:2] >= (3, 8):
+ from importlib.metadata import PackageNotFoundError, version
+ else:
+ from importlib_metadata import PackageNotFoundError, version
+
+ try:
+ __version__ = version(__name__)
+ except PackageNotFoundError:
+ __version__ = "unknown"
+ """),
+ "__main__.py": dedent("""\
+ from importlib.resources import read_text
+ from . import __version__, __name__ as parent
+ from .mod import x
+
+ data = read_text(parent, "data.txt")
+ print(__version__, data, x)
+ """),
+ "mod.py": "x = ''",
+ "data.txt": "Hello World",
+ }
+ }
+}
+
+
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+MISSING_SETUP_SCRIPT = pytest.param(
+ None,
+ marks=pytest.mark.xfail(
+ reason="Editable install is currently only supported with `setup.py`"
+ )
+)
+
+
+@pytest.mark.parametrize("setup_script", [SETUP_SCRIPT_STUB, MISSING_SETUP_SCRIPT])
+def test_editable_with_pyproject(tmp_path, venv, setup_script):
+ project = tmp_path / "mypkg"
+ files = {**EXAMPLE, "setup.py": setup_script}
+ project.mkdir()
+ jaraco.path.build(files, prefix=project)
+
+ cmd = [venv.exe(), "-m", "pip", "install",
+ "--no-build-isolation", # required to force current version of setuptools
+ "-e", str(project)]
+ print(str(subprocess.check_output(cmd), "utf-8"))
+
+ cmd = [venv.exe(), "-m", "mypkg"]
+ assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World"
+
+ (project / "src/mypkg/data.txt").write_text("foobar")
+ (project / "src/mypkg/mod.py").write_text("x = 42")
+ assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42"
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 2a070de..ee07b5a 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -4,15 +4,18 @@ import os
import glob
import re
import stat
-
-from setuptools.command.egg_info import egg_info, manifest_maker
-from setuptools.dist import Distribution
-from setuptools.extern.six.moves import map
+import time
+from typing import List, Tuple
import pytest
+from jaraco import path
+
+from setuptools.command.egg_info import (
+ egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision,
+)
+from setuptools.dist import Distribution
from . import environment
-from .files import build_files
from .textwrap import DALS
from . import contexts
@@ -21,7 +24,7 @@ class Environment(str):
pass
-class TestEggInfo(object):
+class TestEggInfo:
setup_script = DALS("""
from setuptools import setup
@@ -35,7 +38,7 @@ class TestEggInfo(object):
""")
def _create_project(self):
- build_files({
+ path.build({
'setup.py': self.setup_script,
'hello.py': DALS("""
def run():
@@ -43,7 +46,12 @@ class TestEggInfo(object):
""")
})
- @pytest.yield_fixture
+ @staticmethod
+ def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]:
+ version_str = pkg_info_lines[0].split(' ')[1]
+ return tuple(map(int, version_str.split('.')[:2]))
+
+ @pytest.fixture
def env(self):
with contexts.tempdir(prefix='setuptools-test.') as env_dir:
env = Environment(env_dir)
@@ -54,7 +62,7 @@ class TestEggInfo(object):
for dirname in subs
)
list(map(os.mkdir, env.paths.values()))
- build_files({
+ path.build({
env.paths['home']: {
'.pydistutils.cfg': DALS("""
[egg_info]
@@ -68,8 +76,7 @@ class TestEggInfo(object):
"""
When the egg_info section is empty or not present, running
save_version_info should add the settings to the setup.cfg
- in a deterministic order, consistent with the ordering found
- on Python 2.7 with PYTHONHASHSEED=0.
+ in a deterministic order.
"""
setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
dist = Distribution()
@@ -105,7 +112,7 @@ class TestEggInfo(object):
the file should remain unchanged.
"""
setup_cfg = os.path.join(env.paths['home'], 'setup.cfg')
- build_files({
+ path.build({
setup_cfg: DALS("""
[egg_info]
tag_build =
@@ -128,11 +135,11 @@ class TestEggInfo(object):
self._validate_content_order(content, expected_order)
- def test_egg_base_installed_egg_info(self, tmpdir_cwd, env):
+ def test_expected_files_produced(self, tmpdir_cwd, env):
self._create_project()
- self._run_install_command(tmpdir_cwd, env)
- actual = self._find_egg_info_files(env.paths['lib'])
+ self._run_egg_info_command(tmpdir_cwd, env)
+ actual = os.listdir('foo.egg-info')
expected = [
'PKG-INFO',
@@ -144,9 +151,57 @@ class TestEggInfo(object):
]
assert sorted(actual) == expected
+ def test_license_is_a_string(self, tmpdir_cwd, env):
+ setup_config = DALS("""
+ [metadata]
+ name=foo
+ version=0.0.1
+ license=file:MIT
+ """)
+
+ setup_script = DALS("""
+ from setuptools import setup
+
+ setup()
+ """)
+
+ path.build({
+ 'setup.py': setup_script,
+ 'setup.cfg': setup_config,
+ })
+
+ # This command should fail with a ValueError, but because it's
+ # currently configured to use a subprocess, the actual traceback
+ # object is lost and we need to parse it from stderr
+ with pytest.raises(AssertionError) as exc:
+ self._run_egg_info_command(tmpdir_cwd, env)
+
+ # Hopefully this is not too fragile: the only argument to the
+ # assertion error should be a traceback, ending with:
+ # ValueError: ....
+ #
+ # assert not 1
+ tb = exc.value.args[0].split('\n')
+ assert tb[-3].lstrip().startswith('ValueError')
+
+ def test_rebuilt(self, tmpdir_cwd, env):
+ """Ensure timestamps are updated when the command is re-run."""
+ self._create_project()
+
+ self._run_egg_info_command(tmpdir_cwd, env)
+ timestamp_a = os.path.getmtime('foo.egg-info')
+
+ # arbitrary sleep just to handle *really* fast systems
+ time.sleep(.001)
+
+ self._run_egg_info_command(tmpdir_cwd, env)
+ timestamp_b = os.path.getmtime('foo.egg-info')
+
+ assert timestamp_a != timestamp_b
+
def test_manifest_template_is_read(self, tmpdir_cwd, env):
self._create_project()
- build_files({
+ path.build({
'MANIFEST.in': DALS("""
recursive-include docs *.rst
"""),
@@ -154,8 +209,8 @@ class TestEggInfo(object):
'usage.rst': "Run 'hi'",
}
})
- self._run_install_command(tmpdir_cwd, env)
- egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
+ self._run_egg_info_command(tmpdir_cwd, env)
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt')
with open(sources_txt) as f:
assert 'docs/usage.rst' in f.read().split('\n')
@@ -169,8 +224,10 @@ class TestEggInfo(object):
'''
) % ('' if use_setup_cfg else requires)
setup_config = requires if use_setup_cfg else ''
- build_files({'setup.py': setup_script,
- 'setup.cfg': setup_config})
+ path.build({
+ 'setup.py': setup_script,
+ 'setup.cfg': setup_config,
+ })
mismatch_marker = "python_version<'{this_ver}'".format(
this_ver=sys.version_info[0],
@@ -181,7 +238,7 @@ class TestEggInfo(object):
)
invalid_marker = "<=>++"
- class RequiresTestHelper(object):
+ class RequiresTestHelper:
@staticmethod
def parametrize(*test_list, **format_dict):
@@ -233,27 +290,27 @@ class TestEggInfo(object):
'''
install_requires_deterministic
- install_requires=["fake-factory==0.5.2", "pytz"]
+ install_requires=["wheel>=0.5", "pytest"]
[options]
install_requires =
- fake-factory==0.5.2
- pytz
+ wheel>=0.5
+ pytest
- fake-factory==0.5.2
- pytz
+ wheel>=0.5
+ pytest
''',
'''
install_requires_ordered
- install_requires=["fake-factory>=1.12.3,!=2.0"]
+ install_requires=["pytest>=3.0.2,!=10.9999"]
[options]
install_requires =
- fake-factory>=1.12.3,!=2.0
+ pytest>=3.0.2,!=10.9999
- fake-factory!=2.0,>=1.12.3
+ pytest!=10.9999,>=3.0.2
''',
'''
@@ -394,7 +451,7 @@ class TestEggInfo(object):
self, tmpdir_cwd, env, requires, use_setup_cfg,
expected_requires, install_cmd_kwargs):
self._setup_script_with_requires(requires, use_setup_cfg)
- self._run_install_command(tmpdir_cwd, env, **install_cmd_kwargs)
+ self._run_egg_info_command(tmpdir_cwd, env, **install_cmd_kwargs)
egg_info_dir = os.path.join('.', 'foo.egg-info')
requires_txt = os.path.join(egg_info_dir, 'requires.txt')
if os.path.exists(requires_txt):
@@ -414,14 +471,14 @@ class TestEggInfo(object):
req = 'install_requires={"fake-factory==0.5.2", "pytz"}'
self._setup_script_with_requires(req)
with pytest.raises(AssertionError):
- self._run_install_command(tmpdir_cwd, env)
+ self._run_egg_info_command(tmpdir_cwd, env)
def test_extras_require_with_invalid_marker(self, tmpdir_cwd, env):
tmpl = 'extras_require={{":{marker}": ["barbazquux"]}},'
req = tmpl.format(marker=self.invalid_marker)
self._setup_script_with_requires(req)
with pytest.raises(AssertionError):
- self._run_install_command(tmpdir_cwd, env)
+ self._run_egg_info_command(tmpdir_cwd, env)
assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
def test_extras_require_with_invalid_marker_in_req(self, tmpdir_cwd, env):
@@ -429,7 +486,7 @@ class TestEggInfo(object):
req = tmpl.format(marker=self.invalid_marker)
self._setup_script_with_requires(req)
with pytest.raises(AssertionError):
- self._run_install_command(tmpdir_cwd, env)
+ self._run_egg_info_command(tmpdir_cwd, env)
assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
def test_provides_extra(self, tmpdir_cwd, env):
@@ -452,7 +509,7 @@ class TestEggInfo(object):
def test_doesnt_provides_extra(self, tmpdir_cwd, env):
self._setup_script_with_requires(
- '''install_requires=["spam ; python_version<'3.3'"]''')
+ '''install_requires=["spam ; python_version<'3.6'"]''')
environ = os.environ.copy().update(
HOME=env.paths['home'],
)
@@ -467,6 +524,375 @@ class TestEggInfo(object):
pkg_info_text = pkginfo_file.read()
assert 'Provides-Extra:' not in pkg_info_text
+ @pytest.mark.parametrize("files, license_in_sources", [
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE
+ """),
+ 'LICENSE': "Test license"
+ }, True), # with license
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = INVALID_LICENSE
+ """),
+ 'LICENSE': "Test license"
+ }, False), # with an invalid license
+ ({
+ 'setup.cfg': DALS("""
+ """),
+ 'LICENSE': "Test license"
+ }, True), # no license_file attribute, LICENSE auto-included
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE
+ """),
+ 'MANIFEST.in': "exclude LICENSE",
+ 'LICENSE': "Test license"
+ }, True), # manifest is overwritten by license_file
+ pytest.param({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICEN[CS]E*
+ """),
+ 'LICENSE': "Test license",
+ }, True,
+ id="glob_pattern"),
+ ])
+ def test_setup_cfg_license_file(
+ self, tmpdir_cwd, env, files, license_in_sources):
+ self._create_project()
+ path.build(files)
+
+ environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+
+ with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file:
+ sources_text = sources_file.read()
+
+ if license_in_sources:
+ assert 'LICENSE' in sources_text
+ else:
+ assert 'LICENSE' not in sources_text
+ # for invalid license test
+ assert 'INVALID_LICENSE' not in sources_text
+
+ @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE-ABC
+ LICENSE-XYZ
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files = LICENSE-ABC, LICENSE-XYZ
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE-ABC
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files = LICENSE-XYZ
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE-ABC
+ INVALID_LICENSE
+ """),
+ 'LICENSE-ABC': "Test license"
+ }, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license
+ ({
+ 'setup.cfg': DALS("""
+ """),
+ 'LICENSE': "Test license"
+ }, ['LICENSE'], []), # no license_files attribute, LICENSE auto-included
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files = LICENSE
+ """),
+ 'MANIFEST.in': "exclude LICENSE",
+ 'LICENSE': "Test license"
+ }, ['LICENSE'], []), # manifest is overwritten by license_files
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE-ABC
+ LICENSE-XYZ
+ """),
+ 'MANIFEST.in': "exclude LICENSE-XYZ",
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ # manifest is overwritten by license_files
+ }, ['LICENSE-ABC', 'LICENSE-XYZ'], []),
+ pytest.param({
+ 'setup.cfg': "",
+ 'LICENSE-ABC': "ABC license",
+ 'COPYING-ABC': "ABC copying",
+ 'NOTICE-ABC': "ABC notice",
+ 'AUTHORS-ABC': "ABC authors",
+ 'LICENCE-XYZ': "XYZ license",
+ 'LICENSE': "License",
+ 'INVALID-LICENSE': "Invalid license",
+ }, [
+ 'LICENSE-ABC',
+ 'COPYING-ABC',
+ 'NOTICE-ABC',
+ 'AUTHORS-ABC',
+ 'LICENCE-XYZ',
+ 'LICENSE',
+ ], ['INVALID-LICENSE'],
+ # ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
+ id="default_glob_patterns"),
+ pytest.param({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE*
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'NOTICE-XYZ': "XYZ notice",
+ }, ['LICENSE-ABC'], ['NOTICE-XYZ'],
+ id="no_default_glob_patterns"),
+ pytest.param({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_files =
+ LICENSE-ABC
+ LICENSE*
+ """),
+ 'LICENSE-ABC': "ABC license",
+ }, ['LICENSE-ABC'], [],
+ id="files_only_added_once",
+ ),
+ ])
+ def test_setup_cfg_license_files(
+ self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
+ self._create_project()
+ path.build(files)
+
+ environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+
+ with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file:
+ sources_lines = list(line.strip() for line in sources_file)
+
+ for lf in incl_licenses:
+ assert sources_lines.count(lf) == 1
+
+ for lf in excl_licenses:
+ assert sources_lines.count(lf) == 0
+
+ @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file =
+ license_files =
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file =
+ LICENSE-ABC
+ LICENSE-XYZ
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-XYZ': "XYZ license"
+ # license_file is still singular
+ }, [], ['LICENSE-ABC', 'LICENSE-XYZ']),
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE-ABC
+ license_files =
+ LICENSE-XYZ
+ LICENSE-PQR
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-PQR': "PQR license",
+ 'LICENSE-XYZ': "XYZ license"
+ }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE-ABC
+ license_files =
+ LICENSE-ABC
+ LICENSE-XYZ
+ LICENSE-PQR
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-PQR': "PQR license",
+ 'LICENSE-XYZ': "XYZ license"
+ # duplicate license
+ }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []),
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE-ABC
+ license_files =
+ LICENSE-XYZ
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-PQR': "PQR license",
+ 'LICENSE-XYZ': "XYZ license"
+ # combined subset
+ }, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']),
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE-ABC
+ license_files =
+ LICENSE-XYZ
+ LICENSE-PQR
+ """),
+ 'LICENSE-PQR': "Test license"
+ # with invalid licenses
+ }, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']),
+ ({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE-ABC
+ license_files =
+ LICENSE-PQR
+ LICENSE-XYZ
+ """),
+ 'MANIFEST.in': "exclude LICENSE-ABC\nexclude LICENSE-PQR",
+ 'LICENSE-ABC': "ABC license",
+ 'LICENSE-PQR': "PQR license",
+ 'LICENSE-XYZ': "XYZ license"
+ # manifest is overwritten
+ }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []),
+ pytest.param({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE*
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'NOTICE-XYZ': "XYZ notice",
+ }, ['LICENSE-ABC'], ['NOTICE-XYZ'],
+ id="no_default_glob_patterns"),
+ pytest.param({
+ 'setup.cfg': DALS("""
+ [metadata]
+ license_file = LICENSE*
+ license_files =
+ NOTICE*
+ """),
+ 'LICENSE-ABC': "ABC license",
+ 'NOTICE-ABC': "ABC notice",
+ 'AUTHORS-ABC': "ABC authors",
+ }, ['LICENSE-ABC', 'NOTICE-ABC'], ['AUTHORS-ABC'],
+ id="combined_glob_patterrns"),
+ ])
+ def test_setup_cfg_license_file_license_files(
+ self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
+ self._create_project()
+ path.build(files)
+
+ environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+
+ with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file:
+ sources_lines = list(line.strip() for line in sources_file)
+
+ for lf in incl_licenses:
+ assert sources_lines.count(lf) == 1
+
+ for lf in excl_licenses:
+ assert sources_lines.count(lf) == 0
+
+ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env):
+ """All matched license files should have a corresponding License-File."""
+ self._create_project()
+ path.build({
+ "setup.cfg": DALS("""
+ [metadata]
+ license_files =
+ NOTICE*
+ LICENSE*
+ """),
+ "LICENSE-ABC": "ABC license",
+ "LICENSE-XYZ": "XYZ license",
+ "NOTICE": "included",
+ "IGNORE": "not include",
+ })
+
+ environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ license_file_lines = [
+ line for line in pkg_info_lines if line.startswith('License-File:')]
+
+ # Only 'NOTICE', LICENSE-ABC', and 'LICENSE-XYZ' should have been matched
+ # Also assert that order from license_files is keeped
+ assert "License-File: NOTICE" == license_file_lines[0]
+ assert "License-File: LICENSE-ABC" in license_file_lines[1:]
+ assert "License-File: LICENSE-XYZ" in license_file_lines[1:]
+
+ def test_metadata_version(self, tmpdir_cwd, env):
+ """Make sure latest metadata version is used by default."""
+ self._setup_script_with_requires("")
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ # Update metadata version if changed
+ assert self._extract_mv_version(pkg_info_lines) == (2, 1)
+
def test_long_description_content_type(self, tmpdir_cwd, env):
# Test that specifying a `long_description_content_type` keyword arg to
# the `setup` function results in writing a `Description-Content-Type`
@@ -493,6 +919,29 @@ class TestEggInfo(object):
assert expected_line in pkg_info_lines
assert 'Metadata-Version: 2.1' in pkg_info_lines
+ def test_long_description(self, tmpdir_cwd, env):
+ # Test that specifying `long_description` and `long_description_content_type`
+ # keyword args to the `setup` function results in writing
+ # the description in the message payload of the `PKG-INFO` file
+ # in the `<distribution>.egg-info` directory.
+ self._setup_script_with_requires(
+ "long_description='This is a long description\\nover multiple lines',"
+ "long_description_content_type='text/markdown',"
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ assert 'Metadata-Version: 2.1' in pkg_info_lines
+ assert '' == pkg_info_lines[-1] # last line should be empty
+ long_desc_lines = pkg_info_lines[pkg_info_lines.index(''):]
+ assert 'This is a long description' in long_desc_lines
+ assert 'over multiple lines' in long_desc_lines
+
def test_project_urls(self, tmpdir_cwd, env):
# Test that specifying a `project_urls` dict to the `setup`
# function results in writing multiple `Project-URL` lines to
@@ -522,6 +971,40 @@ class TestEggInfo(object):
assert expected_line in pkg_info_lines
expected_line = 'Project-URL: Link Two, https://example.com/two/'
assert expected_line in pkg_info_lines
+ assert self._extract_mv_version(pkg_info_lines) >= (1, 2)
+
+ def test_license(self, tmpdir_cwd, env):
+ """Test single line license."""
+ self._setup_script_with_requires(
+ "license='MIT',"
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ assert 'License: MIT' in pkg_info_lines
+
+ def test_license_escape(self, tmpdir_cwd, env):
+ """Test license is escaped correctly if longer than one line."""
+ self._setup_script_with_requires(
+ "license='This is a long license text \\nover multiple lines',"
+ )
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+ data_stream=1,
+ )
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+
+ assert 'License: This is a long license text ' in pkg_info_lines
+ assert ' over multiple lines' in pkg_info_lines
+ assert 'text \n over multiple' in '\n'.join(pkg_info_lines)
def test_python_requires_egg_info(self, tmpdir_cwd, env):
self._setup_script_with_requires(
@@ -539,16 +1022,7 @@ class TestEggInfo(object):
with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
pkg_info_lines = pkginfo_file.read().split('\n')
assert 'Requires-Python: >=2.7.12' in pkg_info_lines
- assert 'Metadata-Version: 1.2' in pkg_info_lines
-
- def test_python_requires_install(self, tmpdir_cwd, env):
- self._setup_script_with_requires(
- """python_requires='>=1.2.3',""")
- self._run_install_command(tmpdir_cwd, env)
- egg_info_dir = self._find_egg_info_files(env.paths['lib']).base
- pkginfo = os.path.join(egg_info_dir, 'PKG-INFO')
- with open(pkginfo) as f:
- assert 'Requires-Python: >=1.2.3' in f.read().split('\n')
+ assert self._extract_mv_version(pkg_info_lines) >= (1, 2)
def test_manifest_maker_warning_suppression(self):
fixtures = [
@@ -559,17 +1033,27 @@ class TestEggInfo(object):
for msg in fixtures:
assert manifest_maker._should_suppress_warning(msg)
- def _run_install_command(self, tmpdir_cwd, env, cmd=None, output=None):
+ def test_egg_info_includes_setup_py(self, tmpdir_cwd):
+ self._create_project()
+ dist = Distribution({"name": "foo", "version": "0.0.1"})
+ dist.script_name = "non_setup.py"
+ egg_info_instance = egg_info(dist)
+ egg_info_instance.finalize_options()
+ egg_info_instance.run()
+
+ assert 'setup.py' in egg_info_instance.filelist.files
+
+ with open(egg_info_instance.egg_info + "/SOURCES.txt") as f:
+ sources = f.read().split('\n')
+ assert 'setup.py' in sources
+
+ def _run_egg_info_command(self, tmpdir_cwd, env, cmd=None, output=None):
environ = os.environ.copy().update(
HOME=env.paths['home'],
)
if cmd is None:
cmd = [
- 'install',
- '--home', env.paths['home'],
- '--install-lib', env.paths['lib'],
- '--install-scripts', env.paths['scripts'],
- '--install-data', env.paths['data'],
+ 'egg_info',
]
code, data = environment.run_setup_py(
cmd=cmd,
@@ -577,22 +1061,26 @@ class TestEggInfo(object):
data_stream=1,
env=environ,
)
- if code:
- raise AssertionError(data)
+ assert not code, data
+
if output:
assert output in data
- def _find_egg_info_files(self, root):
- class DirList(list):
- def __init__(self, files, base):
- super(DirList, self).__init__(files)
- self.base = base
+ def test_egg_info_tag_only_once(self, tmpdir_cwd, env):
+ self._create_project()
+ path.build({
+ 'setup.cfg': DALS("""
+ [egg_info]
+ tag_build = dev
+ tag_date = 0
+ tag_svn_revision = 0
+ """),
+ })
+ self._run_egg_info_command(tmpdir_cwd, env)
+ egg_info_dir = os.path.join('.', 'foo.egg-info')
+ with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
+ pkg_info_lines = pkginfo_file.read().split('\n')
+ assert 'Version: 0.0.0.dev0' in pkg_info_lines
- results = (
- DirList(filenames, dirpath)
- for dirpath, dirnames, filenames in os.walk(root)
- if os.path.basename(dirpath) == 'EGG-INFO'
- )
- # expect exactly one result
- result, = results
- return result
+ def test_get_pkg_info_revision_deprecated(self):
+ pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision)
diff --git a/setuptools/tests/test_extern.py b/setuptools/tests/test_extern.py
new file mode 100644
index 0000000..0d6b164
--- /dev/null
+++ b/setuptools/tests/test_extern.py
@@ -0,0 +1,20 @@
+import importlib
+import pickle
+
+from setuptools import Distribution
+from setuptools.extern import ordered_set
+
+
+def test_reimport_extern():
+ ordered_set2 = importlib.import_module(ordered_set.__name__)
+ assert ordered_set is ordered_set2
+
+
+def test_orderedset_pickle_roundtrip():
+ o1 = ordered_set.OrderedSet([1, 2, 5])
+ o2 = pickle.loads(pickle.dumps(o1))
+ assert o1 == o2
+
+
+def test_distribution_picklable():
+ pickle.loads(pickle.dumps(Distribution()))
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
index a6023de..efcce92 100644
--- a/setuptools/tests/test_find_packages.py
+++ b/setuptools/tests/test_find_packages.py
@@ -1,4 +1,4 @@
-"""Tests for setuptools.find_packages()."""
+"""Tests for automatic package discovery"""
import os
import sys
import shutil
@@ -7,14 +7,12 @@ import platform
import pytest
-import setuptools
from setuptools import find_packages
+from setuptools import find_namespace_packages
+from setuptools.discovery import FlatLayoutPackageFinder
-find_420_packages = setuptools.PEP420PackageFinder.find
# modeled after CPython's test.support.can_symlink
-
-
def can_symlink():
TESTFN = tempfile.mktemp()
symlink_path = TESTFN + "can_symlink"
@@ -154,29 +152,94 @@ class TestFindPackages:
assert set(actual) == set(expected)
def test_pep420_ns_package(self):
- packages = find_420_packages(
+ packages = find_namespace_packages(
self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes(self):
- packages = find_420_packages(
+ packages = find_namespace_packages(
self.dist_dir, exclude=['pkg.subpkg.assets'])
- self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
+ self._assert_packages(
+ packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes_or_excludes(self):
- packages = find_420_packages(self.dist_dir)
+ packages = find_namespace_packages(self.dist_dir)
expected = [
'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
self._assert_packages(packages, expected)
def test_regular_package_with_nested_pep420_ns_packages(self):
self._touch('__init__.py', self.pkg_dir)
- packages = find_420_packages(
+ packages = find_namespace_packages(
self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_non_package_dirs(self):
shutil.rmtree(self.docs_dir)
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
- packages = find_420_packages(self.dist_dir)
+ packages = find_namespace_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
+
+
+class TestFlatLayoutPackageFinder:
+ EXAMPLES = {
+ "hidden-folders": (
+ [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"],
+ ["pkg", "pkg.nested"]
+ ),
+ "private-packages": (
+ ["_pkg/__init__.py", "pkg/_private/__init__.py"],
+ ["pkg", "pkg._private"]
+ ),
+ "invalid-name": (
+ ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"],
+ []
+ ),
+ "docs": (
+ ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"],
+ ["pkg"]
+ ),
+ "tests": (
+ ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"],
+ ["pkg"]
+ ),
+ "examples": (
+ [
+ "pkg/__init__.py",
+ "examples/__init__.py",
+ "examples/file.py"
+ "example/other_file.py",
+ # Sub-packages should always be fine
+ "pkg/example/__init__.py",
+ "pkg/examples/__init__.py",
+ ],
+ ["pkg", "pkg.examples", "pkg.example"]
+ ),
+ "tool-specific": (
+ [
+ "pkg/__init__.py",
+ "tasks/__init__.py",
+ "tasks/subpackage/__init__.py",
+ "fabfile/__init__.py",
+ "fabfile/subpackage/__init__.py",
+ # Sub-packages should always be fine
+ "pkg/tasks/__init__.py",
+ "pkg/fabfile/__init__.py",
+ ],
+ ["pkg", "pkg.tasks", "pkg.fabfile"]
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_unwanted_directories_not_included(self, tmp_path, example):
+ files, expected_packages = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ found_packages = FlatLayoutPackageFinder.find(str(tmp_path))
+ assert set(found_packages) == set(expected_packages)
+
+
+def ensure_files(root_path, files):
+ for file in files:
+ path = root_path / file
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.touch()
diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py
new file mode 100644
index 0000000..4ef6880
--- /dev/null
+++ b/setuptools/tests/test_find_py_modules.py
@@ -0,0 +1,81 @@
+"""Tests for automatic discovery of modules"""
+import os
+
+import pytest
+
+from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder
+
+from .test_find_packages import ensure_files, has_symlink
+
+
+class TestModuleFinder:
+ def find(self, path, *args, **kwargs):
+ return set(ModuleFinder.find(str(path), *args, **kwargs))
+
+ EXAMPLES = {
+ # circumstance: (files, kwargs, expected_modules)
+ "simple_folder": (
+ ["file.py", "other.py"],
+ {}, # kwargs
+ ["file", "other"],
+ ),
+ "exclude": (
+ ["file.py", "other.py"],
+ {"exclude": ["f*"]},
+ ["other"],
+ ),
+ "include": (
+ ["file.py", "fole.py", "other.py"],
+ {"include": ["f*"], "exclude": ["fo*"]},
+ ["file"],
+ ),
+ "invalid-name": (
+ ["my-file.py", "other.file.py"],
+ {},
+ []
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_finder(self, tmp_path, example):
+ files, kwargs, expected_modules = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ assert self.find(tmp_path, **kwargs) == set(expected_modules)
+
+ @pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
+ def test_symlinked_packages_are_included(self, tmp_path):
+ src = "_myfiles/file.py"
+ ensure_files(tmp_path, [src])
+ os.symlink(tmp_path / src, tmp_path / "link.py")
+ assert self.find(tmp_path) == {"link"}
+
+
+class TestFlatLayoutModuleFinder:
+ def find(self, path, *args, **kwargs):
+ return set(FlatLayoutModuleFinder.find(str(path)))
+
+ EXAMPLES = {
+ # circumstance: (files, expected_modules)
+ "hidden-files": (
+ [".module.py"],
+ []
+ ),
+ "private-modules": (
+ ["_module.py"],
+ []
+ ),
+ "common-names": (
+ ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"],
+ ["mod"]
+ ),
+ "tool-specific": (
+ ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"],
+ ["mod"]
+ )
+ }
+
+ @pytest.mark.parametrize("example", EXAMPLES.keys())
+ def test_unwanted_files_not_included(self, tmp_path, example):
+ files, expected_modules = self.EXAMPLES[example]
+ ensure_files(tmp_path, files)
+ assert self.find(tmp_path) == set(expected_modules)
diff --git a/setuptools/tests/test_glob.py b/setuptools/tests/test_glob.py
new file mode 100644
index 0000000..e99587f
--- /dev/null
+++ b/setuptools/tests/test_glob.py
@@ -0,0 +1,34 @@
+import pytest
+from jaraco import path
+
+from setuptools.glob import glob
+
+
+@pytest.mark.parametrize('tree, pattern, matches', (
+ ('', b'', []),
+ ('', '', []),
+ ('''
+ appveyor.yml
+ CHANGES.rst
+ LICENSE
+ MANIFEST.in
+ pyproject.toml
+ README.rst
+ setup.cfg
+ setup.py
+ ''', '*.rst', ('CHANGES.rst', 'README.rst')),
+ ('''
+ appveyor.yml
+ CHANGES.rst
+ LICENSE
+ MANIFEST.in
+ pyproject.toml
+ README.rst
+ setup.cfg
+ setup.py
+ ''', b'*.rst', (b'CHANGES.rst', b'README.rst')),
+))
+def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
+ monkeypatch.chdir(tmpdir)
+ path.build({name: '' for name in tree.split()})
+ assert list(sorted(glob(pattern))) == list(sorted(matches))
diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py
index 7393241..4338c79 100644
--- a/setuptools/tests/test_install_scripts.py
+++ b/setuptools/tests/test_install_scripts.py
@@ -19,7 +19,7 @@ class TestInstallScripts:
)
unix_exe = '/usr/dummy-test-path/local/bin/python'
unix_spaces_exe = '/usr/bin/env dummy-test-python'
- win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.3\\python.exe'
+ win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.6\\python.exe'
def _run_install_scripts(self, install_dir, executable=None):
dist = Distribution(self.settings)
@@ -64,7 +64,8 @@ class TestInstallScripts:
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
def test_executable_with_spaces_escaping_unix(self, tmpdir):
"""
- Ensure that shebang on Unix is not quoted, even when a value with spaces
+ Ensure that shebang on Unix is not quoted, even when
+ a value with spaces
is specified using --executable.
"""
expected = '#!%s\n' % self.unix_spaces_exe
@@ -77,7 +78,8 @@ class TestInstallScripts:
@pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
def test_executable_arg_escaping_win32(self, tmpdir):
"""
- Ensure that shebang on Windows is quoted when getting a path with spaces
+ Ensure that shebang on Windows is quoted when
+ getting a path with spaces
from --executable, that is itself properly quoted.
"""
expected = '#!"%s"\n' % self.win32_exe
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
index 3a9a6c5..b557831 100644
--- a/setuptools/tests/test_integration.py
+++ b/setuptools/tests/test_integration.py
@@ -6,8 +6,8 @@ Try to install a few packages.
import glob
import os
import sys
+import urllib.request
-from setuptools.extern.six.moves import urllib
import pytest
from setuptools.command.easy_install import easy_install
@@ -15,6 +15,13 @@ from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
+pytestmark = pytest.mark.skipif(
+ 'platform.python_implementation() == "PyPy" and '
+ 'platform.system() == "Windows"',
+ reason="pypa/setuptools#2496",
+)
+
+
def setup_module(module):
packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
for pkg in packages:
@@ -59,7 +66,7 @@ def install_context(request, tmpdir, monkeypatch):
monkeypatch.setattr('site.USER_BASE', user_base.strpath)
monkeypatch.setattr('site.USER_SITE', user_site.strpath)
monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
- monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
+ monkeypatch.setenv(str('PYTHONPATH'), str(os.path.pathsep.join(sys.path)))
# Set up the command for performing the installation.
dist = Distribution()
@@ -112,54 +119,3 @@ def test_pyuri(install_context):
# The package data should be installed.
assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex'))
-
-
-import re
-import subprocess
-import functools
-import tarfile, zipfile
-
-
-build_deps = ['appdirs', 'packaging', 'pyparsing', 'six']
-@pytest.mark.parametrize("build_dep", build_deps)
-@pytest.mark.skipif(sys.version_info < (3, 6), reason='run only on late versions')
-def test_build_deps_on_distutils(request, tmpdir_factory, build_dep):
- """
- All setuptools build dependencies must build without
- setuptools.
- """
- if 'pyparsing' in build_dep:
- pytest.xfail(reason="Project imports setuptools unconditionally")
- build_target = tmpdir_factory.mktemp('source')
- build_dir = download_and_extract(request, build_dep, build_target)
- install_target = tmpdir_factory.mktemp('target')
- output = install(build_dir, install_target)
- for line in output.splitlines():
- match = re.search('Unknown distribution option: (.*)', line)
- allowed_unknowns = [
- 'test_suite',
- 'tests_require',
- 'install_requires',
- ]
- assert not match or match.group(1).strip('"\'') in allowed_unknowns
-
-
-def install(pkg_dir, install_dir):
- with open(os.path.join(pkg_dir, 'setuptools.py'), 'w') as breaker:
- breaker.write('raise ImportError()')
- cmd = [sys.executable, 'setup.py', 'install', '--prefix', install_dir]
- env = dict(os.environ, PYTHONPATH=pkg_dir)
- output = subprocess.check_output(cmd, cwd=pkg_dir, env=env, stderr=subprocess.STDOUT)
- return output.decode('utf-8')
-
-
-def download_and_extract(request, req, target):
- cmd = [sys.executable, '-m', 'pip', 'download', '--no-deps',
- '--no-binary', ':all:', req]
- output = subprocess.check_output(cmd, encoding='utf-8')
- filename = re.search('Saved (.*)', output).group(1)
- request.addfinalizer(functools.partial(os.remove, filename))
- opener = zipfile.ZipFile if filename.endswith('.zip') else tarfile.open
- with opener(filename) as archive:
- archive.extractall(target)
- return os.path.join(target, os.listdir(target)[0])
diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py
new file mode 100644
index 0000000..a5ddd56
--- /dev/null
+++ b/setuptools/tests/test_logging.py
@@ -0,0 +1,36 @@
+import logging
+
+import pytest
+
+
+setup_py = """\
+from setuptools import setup
+
+setup(
+ name="test_logging",
+ version="0.0"
+)
+"""
+
+
+@pytest.mark.parametrize(
+ "flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
+)
+def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
+ """Make sure the correct verbosity level is set (issue #3038)"""
+ import setuptools # noqa: Import setuptools to monkeypatch distutils
+ import distutils # <- load distutils after all the patches take place
+
+ logger = logging.Logger(__name__)
+ monkeypatch.setattr(logging, "root", logger)
+ unset_log_level = logger.getEffectiveLevel()
+ assert logging.getLevelName(unset_log_level) == "NOTSET"
+
+ setup_script = tmp_path / "setup.py"
+ setup_script.write_text(setup_py)
+ dist = distutils.core.run_setup(setup_script, stop_after="init")
+ dist.script_args = [flag, "sdist"]
+ dist.parse_command_line() # <- where the log level is set
+ log_level = logger.getEffectiveLevel()
+ log_level_name = logging.getLevelName(log_level)
+ assert log_level_name == expected_level
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index 65eec7d..82bdb9c 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -7,19 +7,16 @@ import shutil
import sys
import tempfile
import itertools
+import io
from distutils import log
from distutils.errors import DistutilsTemplateError
-import pkg_resources.py31compat
from setuptools.command.egg_info import FileList, egg_info, translate_pattern
from setuptools.dist import Distribution
-from setuptools.extern import six
from setuptools.tests.textwrap import DALS
import pytest
-py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
-
def make_local_path(s):
"""Converts '/' in a string to os.sep"""
@@ -42,7 +39,7 @@ setup(**%r)
@contextlib.contextmanager
def quiet():
old_stdout, old_stderr = sys.stdout, sys.stderr
- sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
+ sys.stdout, sys.stderr = io.StringIO(), io.StringIO()
try:
yield
finally:
@@ -73,7 +70,9 @@ translate_specs = [
# Glob matching
('*.txt', ['foo.txt', 'bar.txt'], ['foo/foo.txt']),
- ('dir/*.txt', ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
+ (
+ 'dir/*.txt',
+ ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
('*/*.py', ['bin/start.py'], []),
('docs/page-?.txt', ['docs/page-9.txt'], ['docs/page-10.txt']),
@@ -157,7 +156,7 @@ def test_translated_pattern_mismatch(pattern_mismatch):
assert not translate_pattern(pattern).match(target)
-class TempDirTestCase(object):
+class TempDirTestCase:
def setup_method(self, method):
self.temp_dir = tempfile.mkdtemp()
self.old_cwd = os.getcwd()
@@ -242,77 +241,77 @@ class TestManifestTest(TempDirTestCase):
def test_exclude(self):
"""Include everything in app/ except the text files"""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
include app/*
exclude app/*.txt
""")
- files = default_files | set([l('app/c.rst')])
+ files = default_files | set([ml('app/c.rst')])
assert files == self.get_files()
def test_include_multiple(self):
"""Include with multiple patterns."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("include app/*.txt app/static/*")
files = default_files | set([
- l('app/a.txt'), l('app/b.txt'),
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/a.txt'), ml('app/b.txt'),
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft(self):
"""Include the whole app/static/ directory."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("graft app/static")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft_glob_syntax(self):
"""Include the whole app/static/ directory."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("graft */static")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft_global_exclude(self):
"""Exclude all *.map files in the project."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
graft app/static
global-exclude *.map
""")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.css')])
+ ml('app/static/app.js'), ml('app/static/app.css')])
assert files == self.get_files()
def test_global_include(self):
"""Include all *.rst, *.js, and *.css files in the whole tree."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
global-include *.rst *.js *.css
""")
files = default_files | set([
- '.hidden.rst', 'testing.rst', l('app/c.rst'),
- l('app/static/app.js'), l('app/static/app.css')])
+ '.hidden.rst', 'testing.rst', ml('app/c.rst'),
+ ml('app/static/app.js'), ml('app/static/app.css')])
assert files == self.get_files()
def test_graft_prune(self):
"""Include all files in app/, except for the whole app/static/ dir."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
graft app
prune app/static
""")
files = default_files | set([
- l('app/a.txt'), l('app/b.txt'), l('app/c.rst')])
+ ml('app/a.txt'), ml('app/b.txt'), ml('app/c.rst')])
assert files == self.get_files()
@@ -362,13 +361,13 @@ class TestFileListTest(TempDirTestCase):
for file in files:
file = os.path.join(self.temp_dir, file)
dirname, basename = os.path.split(file)
- pkg_resources.py31compat.makedirs(dirname, exist_ok=True)
+ os.makedirs(dirname, exist_ok=True)
open(file, 'w').close()
def test_process_template_line(self):
# testing all MANIFEST.in template patterns
file_list = FileList()
- l = make_local_path
+ ml = make_local_path
# simulated file list
self.make_files([
@@ -376,16 +375,16 @@ class TestFileListTest(TempDirTestCase):
'buildout.cfg',
# filelist does not filter out VCS directories,
# it's sdist that does
- l('.hg/last-message.txt'),
- l('global/one.txt'),
- l('global/two.txt'),
- l('global/files.x'),
- l('global/here.tmp'),
- l('f/o/f.oo'),
- l('dir/graft-one'),
- l('dir/dir2/graft2'),
- l('dir3/ok'),
- l('dir3/sub/ok.txt'),
+ ml('.hg/last-message.txt'),
+ ml('global/one.txt'),
+ ml('global/two.txt'),
+ ml('global/files.x'),
+ ml('global/here.tmp'),
+ ml('f/o/f.oo'),
+ ml('dir/graft-one'),
+ ml('dir/dir2/graft2'),
+ ml('dir3/ok'),
+ ml('dir3/sub/ok.txt'),
])
MANIFEST_IN = DALS("""\
@@ -412,12 +411,12 @@ class TestFileListTest(TempDirTestCase):
'buildout.cfg',
'four.txt',
'ok',
- l('.hg/last-message.txt'),
- l('dir/graft-one'),
- l('dir/dir2/graft2'),
- l('f/o/f.oo'),
- l('global/one.txt'),
- l('global/two.txt'),
+ ml('.hg/last-message.txt'),
+ ml('dir/graft-one'),
+ ml('dir/dir2/graft2'),
+ ml('f/o/f.oo'),
+ ml('global/one.txt'),
+ ml('global/two.txt'),
]
file_list.sort()
@@ -474,10 +473,10 @@ class TestFileListTest(TempDirTestCase):
assert False, "Should have thrown an error"
def test_include(self):
- l = make_local_path
+ ml = make_local_path
# include
file_list = FileList()
- self.make_files(['a.py', 'b.txt', l('d/c.py')])
+ self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('include *.py')
file_list.sort()
@@ -490,42 +489,42 @@ class TestFileListTest(TempDirTestCase):
self.assertWarnings()
def test_exclude(self):
- l = make_local_path
+ ml = make_local_path
# exclude
file_list = FileList()
- file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+ file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('exclude *.py')
file_list.sort()
- assert file_list.files == ['b.txt', l('d/c.py')]
+ assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertNoWarnings()
file_list.process_template_line('exclude *.rb')
file_list.sort()
- assert file_list.files == ['b.txt', l('d/c.py')]
+ assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertWarnings()
def test_global_include(self):
- l = make_local_path
+ ml = make_local_path
# global-include
file_list = FileList()
- self.make_files(['a.py', 'b.txt', l('d/c.py')])
+ self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('global-include *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.py')]
+ assert file_list.files == ['a.py', ml('d/c.py')]
self.assertNoWarnings()
file_list.process_template_line('global-include *.rb')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.py')]
+ assert file_list.files == ['a.py', ml('d/c.py')]
self.assertWarnings()
def test_global_exclude(self):
- l = make_local_path
+ ml = make_local_path
# global-exclude
file_list = FileList()
- file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+ file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('global-exclude *.py')
file_list.sort()
@@ -538,65 +537,65 @@ class TestFileListTest(TempDirTestCase):
self.assertWarnings()
def test_recursive_include(self):
- l = make_local_path
+ ml = make_local_path
# recursive-include
file_list = FileList()
- self.make_files(['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')])
+ self.make_files(['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')])
file_list.process_template_line('recursive-include d *.py')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings()
file_list.process_template_line('recursive-include e *.py')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings()
def test_recursive_exclude(self):
- l = make_local_path
+ ml = make_local_path
# recursive-exclude
file_list = FileList()
- file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+ file_list.files = ['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')]
file_list.process_template_line('recursive-exclude d *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.txt')]
+ assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertNoWarnings()
file_list.process_template_line('recursive-exclude e *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.txt')]
+ assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertWarnings()
def test_graft(self):
- l = make_local_path
+ ml = make_local_path
# graft
file_list = FileList()
- self.make_files(['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')])
+ self.make_files(['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')])
file_list.process_template_line('graft d')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings()
file_list.process_template_line('graft e')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings()
def test_prune(self):
- l = make_local_path
+ ml = make_local_path
# prune
file_list = FileList()
- file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+ file_list.files = ['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')]
file_list.process_template_line('prune d')
file_list.sort()
- assert file_list.files == ['a.py', l('f/f.py')]
+ assert file_list.files == ['a.py', ml('f/f.py')]
self.assertNoWarnings()
file_list.process_template_line('prune e')
file_list.sort()
- assert file_list.files == ['a.py', l('f/f.py')]
+ assert file_list.files == ['a.py', ml('f/f.py')]
self.assertWarnings()
diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py
index 32d7a90..d1527bf 100644
--- a/setuptools/tests/test_msvc.py
+++ b/setuptools/tests/test_msvc.py
@@ -49,7 +49,8 @@ def mock_reg(hkcu=None, hklm=None):
for k in hive if k.startswith(key.lower())
)
- return mock.patch.multiple(distutils.msvc9compiler.Reg,
+ return mock.patch.multiple(
+ distutils.msvc9compiler.Reg,
read_keys=read_keys, read_values=read_values)
@@ -61,7 +62,7 @@ class TestModulePatch:
"""
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
- key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
+ key_64 = key_32.replace(r'\microsoft', r'\wow6432node\microsoft')
def test_patched(self):
"Test the module is actually patched"
@@ -87,7 +88,7 @@ class TestModulePatch:
assert isinstance(exc, expected)
assert 'aka.ms/vcpython27' in str(exc)
- @pytest.yield_fixture
+ @pytest.fixture
def user_preferred_setting(self):
"""
Set up environment with different install dirs for user vs. system
@@ -115,7 +116,7 @@ class TestModulePatch:
expected = os.path.join(user_preferred_setting, 'vcvarsall.bat')
assert expected == result
- @pytest.yield_fixture
+ @pytest.fixture
def local_machine_setting(self):
"""
Set up environment with only the system environment configured.
@@ -137,7 +138,7 @@ class TestModulePatch:
expected = os.path.join(local_machine_setting, 'vcvarsall.bat')
assert expected == result
- @pytest.yield_fixture
+ @pytest.fixture
def x64_preferred_setting(self):
"""
Set up environment with 64-bit and 32-bit system settings configured
diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py
new file mode 100644
index 0000000..1aca12d
--- /dev/null
+++ b/setuptools/tests/test_msvc14.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+"""
+Tests for msvc support module (msvc14 unit tests).
+"""
+
+import os
+from distutils.errors import DistutilsPlatformError
+import pytest
+import sys
+
+
+@pytest.mark.skipif(sys.platform != "win32",
+ reason="These tests are only for win32")
+class TestMSVC14:
+ """Python 3.8 "distutils/tests/test_msvccompiler.py" backport"""
+ def test_no_compiler(self):
+ import setuptools.msvc as _msvccompiler
+ # makes sure query_vcvarsall raises
+ # a DistutilsPlatformError if the compiler
+ # is not found
+
+ def _find_vcvarsall(plat_spec):
+ return None, None
+
+ old_find_vcvarsall = _msvccompiler._msvc14_find_vcvarsall
+ _msvccompiler._msvc14_find_vcvarsall = _find_vcvarsall
+ try:
+ pytest.raises(DistutilsPlatformError,
+ _msvccompiler._msvc14_get_vc_env,
+ 'wont find this version')
+ finally:
+ _msvccompiler._msvc14_find_vcvarsall = old_find_vcvarsall
+
+ def test_get_vc_env_unicode(self):
+ import setuptools.msvc as _msvccompiler
+
+ test_var = 'ṰḖṤṪ┅ṼẨṜ'
+ test_value = '₃⁴₅'
+
+ # Ensure we don't early exit from _get_vc_env
+ old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
+ os.environ[test_var] = test_value
+ try:
+ env = _msvccompiler._msvc14_get_vc_env('x86')
+ assert test_var.lower() in env
+ assert test_value == env[test_var.lower()]
+ finally:
+ os.environ.pop(test_var)
+ if old_distutils_use_sdk:
+ os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
+
+ def test_get_vc2017(self):
+ import setuptools.msvc as _msvccompiler
+
+ # This function cannot be mocked, so pass it if we find VS 2017
+ # and mark it skipped if we do not.
+ version, path = _msvccompiler._msvc14_find_vc2017()
+ if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [
+ 'Visual Studio 2017'
+ ]:
+ assert version
+ if version:
+ assert version >= 15
+ assert os.path.isdir(path)
+ else:
+ pytest.skip("VS 2017 is not installed")
+
+ def test_get_vc2015(self):
+ import setuptools.msvc as _msvccompiler
+
+ # This function cannot be mocked, so pass it if we find VS 2015
+ # and mark it skipped if we do not.
+ version, path = _msvccompiler._msvc14_find_vc2015()
+ if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [
+ 'Visual Studio 2015', 'Visual Studio 2017'
+ ]:
+ assert version
+ if version:
+ assert version >= 14
+ assert os.path.isdir(path)
+ else:
+ pytest.skip("VS 2015 is not installed")
diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py
index 1ac1b35..270f90c 100644
--- a/setuptools/tests/test_namespaces.py
+++ b/setuptools/tests/test_namespaces.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import, unicode_literals
-
-import os
import sys
import subprocess
@@ -12,10 +9,10 @@ from setuptools.command import test
class TestNamespaces:
- @pytest.mark.xfail(sys.version_info < (3, 5),
- reason="Requires importlib.util.module_from_spec")
- @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
- reason="https://github.com/pypa/setuptools/issues/851")
+ @pytest.mark.skipif(
+ sys.version_info < (3, 5),
+ reason="Requires importlib.util.module_from_spec",
+ )
def test_mixed_site_and_non_site(self, tmpdir):
"""
Installing two packages sharing the same namespace, one installed
@@ -55,8 +52,6 @@ class TestNamespaces:
with test.test.paths_on_pythonpath(map(str, targets)):
subprocess.check_call(try_import)
- @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
- reason="https://github.com/pypa/setuptools/issues/851")
def test_pkg_resources_import(self, tmpdir):
"""
Ensure that a namespace package doesn't break on import
@@ -67,8 +62,9 @@ class TestNamespaces:
target.mkdir()
install_cmd = [
sys.executable,
- '-m', 'easy_install',
- '-d', str(target),
+ '-m', 'pip',
+ 'install',
+ '-t', str(target),
str(pkg),
]
with test.test.paths_on_pythonpath([str(target)]):
@@ -81,8 +77,6 @@ class TestNamespaces:
with test.test.paths_on_pythonpath([str(target)]):
subprocess.check_call(try_import)
- @pytest.mark.skipif(bool(os.environ.get("APPVEYOR")),
- reason="https://github.com/pypa/setuptools/issues/851")
def test_namespace_package_installed_and_cwd(self, tmpdir):
"""
Installing a namespace packages but also having it in the current
@@ -109,3 +103,32 @@ class TestNamespaces:
]
with test.test.paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
+
+ def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
+ """
+ Installing one namespace package and also have another in the same
+ namespace in the current working directory, both of them must be
+ importable.
+ """
+ pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+ pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+ target = tmpdir / 'packages'
+ # use pip to install to the target directory
+ install_cmd = [
+ sys.executable,
+ '-m',
+ 'pip.__main__',
+ 'install',
+ str(pkg_A),
+ '-t', str(target),
+ ]
+ subprocess.check_call(install_cmd)
+ namespaces.make_site_dir(target)
+
+ # ensure that all packages import and pkg_resources imports
+ pkg_resources_imp = [
+ sys.executable,
+ '-c', 'import pkg_resources; import myns.pkgA; import myns.pkgB',
+ ]
+ with test.test.paths_on_pythonpath([str(target)]):
+ subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index 63b9294..8e9435e 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -1,15 +1,15 @@
-from __future__ import absolute_import
-
import sys
import os
import distutils.errors
+import platform
+import urllib.request
+import urllib.error
+import http.client
-from setuptools.extern import six
-from setuptools.extern.six.moves import urllib, http_client
+import mock
+import pytest
-import pkg_resources
import setuptools.package_index
-from setuptools.tests.server import IndexServer
from .textwrap import DALS
@@ -42,7 +42,10 @@ class TestPackageIndex:
hosts=('www.example.com',)
)
- url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+ url = (
+ 'url:%20https://svn.plone.org/svn'
+ '/collective/inquant.contentmirror.plone/trunk'
+ )
try:
v = index.open_url(url)
except Exception as v:
@@ -56,14 +59,14 @@ class TestPackageIndex:
)
def _urlopen(*args):
- raise http_client.BadStatusLine('line')
+ raise http.client.BadStatusLine('line')
index.opener = _urlopen
url = 'http://example.com'
try:
- v = index.open_url(url)
- except Exception as v:
- assert 'line' in str(v)
+ index.open_url(url)
+ except Exception as exc:
+ assert 'line' in str(exc)
else:
raise AssertionError('Should have raise here!')
@@ -80,8 +83,12 @@ class TestPackageIndex:
try:
index.open_url(url)
except distutils.errors.DistutilsError as error:
- msg = six.text_type(error)
- assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
+ msg = str(error)
+ assert (
+ 'nonnumeric port' in msg
+ or 'getaddrinfo failed' in msg
+ or 'Name or service not known' in msg
+ )
return
raise RuntimeError("Did not raise")
@@ -105,43 +112,6 @@ class TestPackageIndex:
url = 'file:///tmp/test_package_index'
assert index.url_ok(url, True)
- def test_links_priority(self):
- """
- Download links from the pypi simple index should be used before
- external download links.
- https://bitbucket.org/tarek/distribute/issue/163
-
- Usecase :
- - someone uploads a package on pypi, a md5 is generated
- - someone manually copies this link (with the md5 in the url) onto an
- external page accessible from the package page.
- - someone reuploads the package (with a different md5)
- - while easy_installing, an MD5 error occurs because the external link
- is used
- -> Setuptools should use the link from pypi, not the external one.
- """
- if sys.platform.startswith('java'):
- # Skip this test on jython because binding to :0 fails
- return
-
- # start an index server
- server = IndexServer()
- server.start()
- index_url = server.base_url() + 'test_links_priority/simple/'
-
- # scan a test index
- pi = setuptools.package_index.PackageIndex(index_url)
- requirement = pkg_resources.Requirement.parse('foobar')
- pi.find_packages(requirement)
- server.stop()
-
- # the distribution has been found
- assert 'foobar' in pi
- # we have only one link, because links are compared without md5
- assert len(pi['foobar']) == 1
- # the link should be from the index
- assert 'correct_md5' in pi['foobar'][0].location
-
def test_parse_bdist_wininst(self):
parse = setuptools.package_index.parse_bdist_wininst
@@ -212,17 +182,72 @@ class TestPackageIndex:
('+ubuntu_0', '+ubuntu.0'),
]
versions = [
- [''.join([e, r, p, l]) for l in ll]
+ [''.join([e, r, p, loc]) for loc in locs]
for e in epoch
for r in releases
for p in sum([pre, post, dev], [''])
- for ll in local]
+ for locs in local]
for v, vc in versions:
dists = list(setuptools.package_index.distros_for_url(
'http://example.com/example.zip#egg=example-' + v))
assert dists[0].version == ''
assert dists[1].version == vc
+ def test_download_git_with_rev(self, tmpdir):
+ url = 'git+https://github.example/group/project@master#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project@master')
+ expected = (
+ 'git clone --quiet '
+ 'https://github.example/group/project {expected_dir}'
+ ).format(**locals())
+ first_call_args = os_system_mock.call_args_list[0][0]
+ assert first_call_args == (expected,)
+
+ tmpl = 'git -C {expected_dir} checkout --quiet master'
+ expected = tmpl.format(**locals())
+ assert os_system_mock.call_args_list[1][0] == (expected,)
+ assert result == expected_dir
+
+ def test_download_git_no_rev(self, tmpdir):
+ url = 'git+https://github.example/group/project#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project')
+ expected = (
+ 'git clone --quiet '
+ 'https://github.example/group/project {expected_dir}'
+ ).format(**locals())
+ os_system_mock.assert_called_once_with(expected)
+
+ def test_download_svn(self, tmpdir):
+ url = 'svn+https://svn.example/project#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with pytest.warns(UserWarning):
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project')
+ expected = (
+ 'svn checkout -q '
+ 'svn+https://svn.example/project {expected_dir}'
+ ).format(**locals())
+ os_system_mock.assert_called_once_with(expected)
+
class TestContentCheckers:
def test_md5(self):
@@ -258,17 +283,27 @@ class TestContentCheckers:
assert rep == 'My message about md5'
+@pytest.fixture
+def temp_home(tmpdir, monkeypatch):
+ key = (
+ 'USERPROFILE'
+ if platform.system() == 'Windows' and sys.version_info > (3, 8) else
+ 'HOME'
+ )
+
+ monkeypatch.setitem(os.environ, key, str(tmpdir))
+ return tmpdir
+
+
class TestPyPIConfig:
- def test_percent_in_password(self, tmpdir, monkeypatch):
- monkeypatch.setitem(os.environ, 'HOME', str(tmpdir))
- pypirc = tmpdir / '.pypirc'
- with pypirc.open('w') as strm:
- strm.write(DALS("""
- [pypi]
- repository=https://pypi.org
- username=jaraco
- password=pity%
- """))
+ def test_percent_in_password(self, temp_home):
+ pypirc = temp_home / '.pypirc'
+ pypirc.write(DALS("""
+ [pypi]
+ repository=https://pypi.org
+ username=jaraco
+ password=pity%
+ """))
cfg = setuptools.package_index.PyPIConfig()
cred = cfg.creds_by_repository['https://pypi.org']
assert cred.username == 'jaraco'
diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py
new file mode 100644
index 0000000..9860580
--- /dev/null
+++ b/setuptools/tests/test_register.py
@@ -0,0 +1,22 @@
+from setuptools.command.register import register
+from setuptools.dist import Distribution
+from setuptools.errors import RemovedCommandError
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+import pytest
+
+
+class TestRegister:
+ def test_register_exception(self):
+ """Ensure that the register command has been properly removed."""
+ dist = Distribution()
+ dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
+
+ cmd = register(dist)
+
+ with pytest.raises(RemovedCommandError):
+ cmd.run()
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
index d867542..99398cd 100644
--- a/setuptools/tests/test_sandbox.py
+++ b/setuptools/tests/test_sandbox.py
@@ -26,7 +26,8 @@ class TestSandbox:
"""
It should be possible to execute a setup.py with a Byte Order Mark
"""
- target = pkg_resources.resource_filename(__name__,
+ target = pkg_resources.resource_filename(
+ __name__,
'script-with-bom.py')
namespace = types.ModuleType('namespace')
setuptools.sandbox._execfile(target, vars(namespace))
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 02222da..302cff7 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -1,27 +1,23 @@
-# -*- coding: utf-8 -*-
"""sdist tests"""
import os
-import shutil
import sys
import tempfile
import unicodedata
import contextlib
import io
-
-from setuptools.extern import six
-from setuptools.extern.six.moves import map
+from unittest import mock
import pytest
-import pkg_resources
+from setuptools._importlib import metadata
+from setuptools import SetuptoolsDeprecationWarning
from setuptools.command.sdist import sdist
from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution
from setuptools.tests import fail_on_ascii
from .text import Filenames
-py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
SETUP_ATTRS = {
'name': 'sdist_test',
@@ -41,7 +37,7 @@ setup(**%r)
@contextlib.contextmanager
def quiet():
old_stdout, old_stderr = sys.stdout, sys.stderr
- sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
+ sys.stdout, sys.stderr = io.StringIO(), io.StringIO()
try:
yield
finally:
@@ -50,7 +46,7 @@ def quiet():
# Convert to POSIX path
def posix(path):
- if six.PY3 and not isinstance(path, str):
+ if not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b'/')
else:
return path.replace(os.sep, '/')
@@ -58,7 +54,7 @@ def posix(path):
# HFS Plus uses decomposed UTF-8
def decompose(path):
- if isinstance(path, six.text_type):
+ if isinstance(path, str):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
@@ -89,31 +85,35 @@ fail_on_latin1_encoded_filenames = pytest.mark.xfail(
)
+def touch(path):
+ path.write_text('', encoding='utf-8')
+
+
class TestSdistTest:
- def setup_method(self, method):
- self.temp_dir = tempfile.mkdtemp()
- f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
- f.write(SETUP_PY)
- f.close()
+ @pytest.fixture(autouse=True)
+ def source_dir(self, tmpdir):
+ (tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8')
# Set up the rest of the test package
- test_pkg = os.path.join(self.temp_dir, 'sdist_test')
- os.mkdir(test_pkg)
- data_folder = os.path.join(self.temp_dir, "d")
- os.mkdir(data_folder)
+ test_pkg = tmpdir / 'sdist_test'
+ test_pkg.mkdir()
+ data_folder = tmpdir / 'd'
+ data_folder.mkdir()
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
- for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst',
- os.path.join(data_folder, "e.dat")]:
- # Just touch the files; their contents are irrelevant
- open(os.path.join(test_pkg, fname), 'w').close()
+ for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
+ touch(test_pkg / fname)
+ touch(data_folder / 'e.dat')
- self.old_cwd = os.getcwd()
- os.chdir(self.temp_dir)
+ with tmpdir.as_cwd():
+ yield
- def teardown_method(self, method):
- os.chdir(self.old_cwd)
- shutil.rmtree(self.temp_dir)
+ def assert_package_data_in_manifest(self, cmd):
+ manifest = cmd.filelist.files
+ assert os.path.join('sdist_test', 'a.txt') in manifest
+ assert os.path.join('sdist_test', 'b.txt') in manifest
+ assert os.path.join('sdist_test', 'c.rst') not in manifest
+ assert os.path.join('d', 'e.dat') in manifest
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
@@ -129,20 +129,113 @@ class TestSdistTest:
with quiet():
cmd.run()
+ self.assert_package_data_in_manifest(cmd)
+
+ def test_package_data_and_include_package_data_in_sdist(self):
+ """
+ Ensure package_data and include_package_data work
+ together.
+ """
+ setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
+ assert setup_attrs['package_data']
+
+ dist = Distribution(setup_attrs)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ with quiet():
+ cmd.run()
+
+ self.assert_package_data_in_manifest(cmd)
+
+ def test_custom_build_py(self):
+ """
+ Ensure projects defining custom build_py don't break
+ when creating sdists (issue #2849)
+ """
+ from distutils.command.build_py import build_py as OrigBuildPy
+
+ using_custom_command_guard = mock.Mock()
+
+ class CustomBuildPy(OrigBuildPy):
+ """
+ Some projects have custom commands inheriting from `distutils`
+ """
+
+ def get_data_files(self):
+ using_custom_command_guard()
+ return super().get_data_files()
+
+ setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
+ assert setup_attrs['package_data']
+
+ dist = Distribution(setup_attrs)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ # Make sure we use the custom command
+ cmd.cmdclass = {'build_py': CustomBuildPy}
+ cmd.distribution.cmdclass = {'build_py': CustomBuildPy}
+ assert cmd.distribution.get_command_class('build_py') == CustomBuildPy
+
+ msg = "setuptools instead of distutils"
+ with quiet(), pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+ cmd.run()
+
+ using_custom_command_guard.assert_called()
+ self.assert_package_data_in_manifest(cmd)
+
+ def test_setup_py_exists(self):
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'foo.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ with quiet():
+ cmd.run()
+
manifest = cmd.filelist.files
- assert os.path.join('sdist_test', 'a.txt') in manifest
- assert os.path.join('sdist_test', 'b.txt') in manifest
- assert os.path.join('sdist_test', 'c.rst') not in manifest
- assert os.path.join('d', 'e.dat') in manifest
+ assert 'setup.py' in manifest
+
+ def test_setup_py_missing(self):
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'foo.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
- def test_defaults_case_sensitivity(self):
+ if os.path.exists("setup.py"):
+ os.remove("setup.py")
+ with quiet():
+ cmd.run()
+
+ manifest = cmd.filelist.files
+ assert 'setup.py' not in manifest
+
+ def test_setup_py_excluded(self):
+ with open("MANIFEST.in", "w") as manifest_file:
+ manifest_file.write("exclude setup.py")
+
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'foo.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+
+ with quiet():
+ cmd.run()
+
+ manifest = cmd.filelist.files
+ assert 'setup.py' not in manifest
+
+ def test_defaults_case_sensitivity(self, tmpdir):
"""
Make sure default files (README.*, etc.) are added in a case-sensitive
way to avoid problems with packages built on Windows.
"""
- open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
- open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
+ touch(tmpdir / 'readme.rst')
+ touch(tmpdir / 'SETUP.cfg')
dist = Distribution(SETUP_ATTRS)
# the extension deliberately capitalized for this test
@@ -190,13 +283,8 @@ class TestSdistTest:
u_contents = contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
- if six.PY2:
- fs_enc = sys.getfilesystemencoding()
- filename = filename.decode(fs_enc)
-
assert posix(filename) in u_contents
- @py3_only
@fail_on_ascii
def test_write_manifest_allows_utf8_filenames(self):
# Test for #303.
@@ -230,7 +318,6 @@ class TestSdistTest:
# The filelist should have been updated as well
assert u_filename in mm.filelist.files
- @py3_only
def test_write_manifest_skips_non_utf8_filenames(self):
"""
Files that cannot be encoded to UTF-8 (specifically, those that
@@ -294,11 +381,9 @@ class TestSdistTest:
cmd.read_manifest()
# The filelist should contain the UTF-8 filename
- if six.PY3:
- filename = filename.decode('utf-8')
+ filename = filename.decode('utf-8')
assert filename in cmd.filelist.files
- @py3_only
@fail_on_latin1_encoded_filenames
def test_read_manifest_skips_non_utf8_filenames(self):
# Test for #303.
@@ -334,7 +419,7 @@ class TestSdistTest:
@fail_on_latin1_encoded_filenames
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
- dist = Distribution(SETUP_ATTRS)
+ dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
@@ -348,27 +433,33 @@ class TestSdistTest:
if sys.platform == 'darwin':
filename = decompose(filename)
- if six.PY3:
- fs_enc = sys.getfilesystemencoding()
+ fs_enc = sys.getfilesystemencoding()
- if sys.platform == 'win32':
- if fs_enc == 'cp1252':
- # Python 3 mangles the UTF-8 filename
- filename = filename.decode('cp1252')
- assert filename in cmd.filelist.files
- else:
- filename = filename.decode('mbcs')
- assert filename in cmd.filelist.files
+ if sys.platform == 'win32':
+ if fs_enc == 'cp1252':
+ # Python mangles the UTF-8 filename
+ filename = filename.decode('cp1252')
+ assert filename in cmd.filelist.files
else:
- filename = filename.decode('utf-8')
+ filename = filename.decode('mbcs')
assert filename in cmd.filelist.files
else:
+ filename = filename.decode('utf-8')
assert filename in cmd.filelist.files
+ @classmethod
+ def make_strings(cls, item):
+ if isinstance(item, dict):
+ return {
+ key: cls.make_strings(value) for key, value in item.items()}
+ if isinstance(item, list):
+ return list(map(cls.make_strings, item))
+ return str(item)
+
@fail_on_latin1_encoded_filenames
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
- dist = Distribution(SETUP_ATTRS)
+ dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
@@ -381,33 +472,50 @@ class TestSdistTest:
with quiet():
cmd.run()
- if six.PY3:
- # not all windows systems have a default FS encoding of cp1252
- if sys.platform == 'win32':
- # Latin-1 is similar to Windows-1252 however
- # on mbcs filesys it is not in latin-1 encoding
- fs_enc = sys.getfilesystemencoding()
- if fs_enc != 'mbcs':
- fs_enc = 'latin-1'
- filename = filename.decode(fs_enc)
+ # not all windows systems have a default FS encoding of cp1252
+ if sys.platform == 'win32':
+ # Latin-1 is similar to Windows-1252 however
+ # on mbcs filesys it is not in latin-1 encoding
+ fs_enc = sys.getfilesystemencoding()
+ if fs_enc != 'mbcs':
+ fs_enc = 'latin-1'
+ filename = filename.decode(fs_enc)
- assert filename in cmd.filelist.files
- else:
- # The Latin-1 filename should have been skipped
- filename = filename.decode('latin-1')
- filename not in cmd.filelist.files
+ assert filename in cmd.filelist.files
else:
- # Under Python 2 there seems to be no decoded string in the
- # filelist. However, due to decode and encoding of the
- # file name to get utf-8 Manifest the latin1 maybe excluded
- try:
- # fs_enc should match how one is expect the decoding to
- # be proformed for the manifest output.
- fs_enc = sys.getfilesystemencoding()
- filename.decode(fs_enc)
- assert filename in cmd.filelist.files
- except UnicodeDecodeError:
- filename not in cmd.filelist.files
+ # The Latin-1 filename should have been skipped
+ filename = filename.decode('latin-1')
+ filename not in cmd.filelist.files
+
+ def test_pyproject_toml_in_sdist(self, tmpdir):
+ """
+ Check if pyproject.toml is included in source distribution if present
+ """
+ touch(tmpdir / 'pyproject.toml')
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+ with quiet():
+ cmd.run()
+ manifest = cmd.filelist.files
+ assert 'pyproject.toml' in manifest
+
+ def test_pyproject_toml_excluded(self, tmpdir):
+ """
+ Check that pyproject.toml can excluded even if present
+ """
+ touch(tmpdir / 'pyproject.toml')
+ with open('MANIFEST.in', 'w') as mts:
+ print('exclude pyproject.toml', file=mts)
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = sdist(dist)
+ cmd.ensure_finalized()
+ with quiet():
+ cmd.run()
+ manifest = cmd.filelist.files
+ assert 'pyproject.toml' not in manifest
def test_default_revctrl():
@@ -421,7 +529,9 @@ def test_default_revctrl():
This interface must be maintained until Ubuntu 12.04 is no longer
supported (by Setuptools).
"""
- ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
- ep = pkg_resources.EntryPoint.parse(ep_def)
- res = ep.resolve()
+ ep, = metadata.EntryPoints._from_text("""
+ [setuptools.file_finders]
+ svn_cvs = setuptools.command.sdist:_default_revctrl
+ """)
+ res = ep.load()
assert hasattr(res, '__iter__')
diff --git a/setuptools/tests/test_setopt.py b/setuptools/tests/test_setopt.py
new file mode 100644
index 0000000..3600863
--- /dev/null
+++ b/setuptools/tests/test_setopt.py
@@ -0,0 +1,41 @@
+import io
+import configparser
+
+from setuptools.command import setopt
+
+
+class TestEdit:
+ @staticmethod
+ def parse_config(filename):
+ parser = configparser.ConfigParser()
+ with io.open(filename, encoding='utf-8') as reader:
+ parser.read_file(reader)
+ return parser
+
+ @staticmethod
+ def write_text(file, content):
+ with io.open(file, 'wb') as strm:
+ strm.write(content.encode('utf-8'))
+
+ def test_utf8_encoding_retained(self, tmpdir):
+ """
+ When editing a file, non-ASCII characters encoded in
+ UTF-8 should be retained.
+ """
+ config = tmpdir.join('setup.cfg')
+ self.write_text(str(config), '[names]\njaraco=джарако')
+ setopt.edit_config(str(config), dict(names=dict(other='yes')))
+ parser = self.parse_config(str(config))
+ assert parser.get('names', 'jaraco') == 'джарако'
+ assert parser.get('names', 'other') == 'yes'
+
+ def test_case_retained(self, tmpdir):
+ """
+ When editing a file, case of keys should be retained.
+ """
+ config = tmpdir.join('setup.cfg')
+ self.write_text(str(config), '[names]\nFoO=bAr')
+ setopt.edit_config(str(config), dict(names=dict(oTher='yes')))
+ actual = config.read_text(encoding='ascii')
+ assert 'FoO' in actual
+ assert 'oTher' in actual
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 26e37a6..0640f49 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -4,19 +4,24 @@ import sys
import os
import distutils.core
import distutils.cmd
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsOptionError
from distutils.errors import DistutilsSetupError
from distutils.core import Extension
-from distutils.version import LooseVersion
+from zipfile import ZipFile
import pytest
+from setuptools.extern.packaging import version
+
import setuptools
import setuptools.dist
import setuptools.depends as dep
-from setuptools import Feature
from setuptools.depends import Require
-from setuptools.extern import six
+
+
+@pytest.fixture(autouse=True)
+def isolated_dir(tmpdir_cwd):
+ yield
def makeSetup(**args):
@@ -50,7 +55,7 @@ class TestDepends:
x = "test"
y = z
- fc = six.get_function_code(f1)
+ fc = f1.__code__
# unrecognized name
assert dep.extract_constant(fc, 'q', -1) is None
@@ -77,7 +82,8 @@ class TestDepends:
from json import __version__
assert dep.get_module_constant('json', '__version__') == __version__
assert dep.get_module_constant('sys', 'version') == sys.version
- assert dep.get_module_constant('setuptools.tests.test_setuptools', '__doc__') == __doc__
+ assert dep.get_module_constant(
+ 'setuptools.tests.test_setuptools', '__doc__') == __doc__
@needs_bytecode
def testRequire(self):
@@ -85,12 +91,12 @@ class TestDepends:
assert req.name == 'Json'
assert req.module == 'json'
- assert req.requested_version == '1.0.3'
+ assert req.requested_version == version.Version('1.0.3')
assert req.attribute == '__version__'
assert req.full_name() == 'Json-1.0.3'
from json import __version__
- assert req.get_version() == __version__
+ assert str(req.get_version()) == __version__
assert req.version_ok('1.0.9')
assert not req.version_ok('0.9.1')
assert not req.version_ok('unknown')
@@ -98,15 +104,15 @@ class TestDepends:
assert req.is_present()
assert req.is_current()
- req = Require('Json 3000', '03000', 'json', format=LooseVersion)
- assert req.is_present()
- assert not req.is_current()
- assert not req.version_ok('unknown')
-
req = Require('Do-what-I-mean', '1.0', 'd-w-i-m')
assert not req.is_present()
assert not req.is_current()
+ @needs_bytecode
+ def test_require_present(self):
+ # In #1896, this test was failing for months with the only
+ # complaint coming from test runners (not end users).
+ # TODO: Evaluate if this code is needed at all.
req = Require('Tests', None, 'tests', homepage="http://example.com")
assert req.format is None
assert req.attribute is None
@@ -210,83 +216,6 @@ class TestDistro:
self.dist.exclude(package_dir=['q'])
-class TestFeatures:
- def setup_method(self, method):
- self.req = Require('Distutils', '1.0.3', 'distutils')
- self.dist = makeSetup(
- features={
- 'foo': Feature("foo", standard=True, require_features=['baz', self.req]),
- 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
- py_modules=['bar_et'], remove=['bar.ext'],
- ),
- 'baz': Feature(
- "baz", optional=False, packages=['pkg.baz'],
- scripts=['scripts/baz_it'],
- libraries=[('libfoo', 'foo/foofoo.c')]
- ),
- 'dwim': Feature("DWIM", available=False, remove='bazish'),
- },
- script_args=['--without-bar', 'install'],
- packages=['pkg.bar', 'pkg.foo'],
- py_modules=['bar_et', 'bazish'],
- ext_modules=[Extension('bar.ext', ['bar.c'])]
- )
-
- def testDefaults(self):
- assert not Feature(
- "test", standard=True, remove='x', available=False
- ).include_by_default()
- assert Feature("test", standard=True, remove='x').include_by_default()
- # Feature must have either kwargs, removes, or require_features
- with pytest.raises(DistutilsSetupError):
- Feature("test")
-
- def testAvailability(self):
- with pytest.raises(DistutilsPlatformError):
- self.dist.features['dwim'].include_in(self.dist)
-
- def testFeatureOptions(self):
- dist = self.dist
- assert (
- ('with-dwim', None, 'include DWIM') in dist.feature_options
- )
- assert (
- ('without-dwim', None, 'exclude DWIM (default)') in dist.feature_options
- )
- assert (
- ('with-bar', None, 'include bar (default)') in dist.feature_options
- )
- assert (
- ('without-bar', None, 'exclude bar') in dist.feature_options
- )
- assert dist.feature_negopt['without-foo'] == 'with-foo'
- assert dist.feature_negopt['without-bar'] == 'with-bar'
- assert dist.feature_negopt['without-dwim'] == 'with-dwim'
- assert ('without-baz' not in dist.feature_negopt)
-
- def testUseFeatures(self):
- dist = self.dist
- assert dist.with_foo == 1
- assert dist.with_bar == 0
- assert dist.with_baz == 1
- assert ('bar_et' not in dist.py_modules)
- assert ('pkg.bar' not in dist.packages)
- assert ('pkg.baz' in dist.packages)
- assert ('scripts/baz_it' in dist.scripts)
- assert (('libfoo', 'foo/foofoo.c') in dist.libraries)
- assert dist.ext_modules == []
- assert dist.require_features == [self.req]
-
- # If we ask for bar, it should fail because we explicitly disabled
- # it on the command line
- with pytest.raises(DistutilsOptionError):
- dist.include_feature('bar')
-
- def testFeatureWithInvalidRemove(self):
- with pytest.raises(SystemExit):
- makeSetup(features={'x': Feature('x', remove='y')})
-
-
class TestCommandTests:
def testTestIsCommand(self):
test_cmd = makeSetup().get_command_obj('test')
@@ -366,3 +295,16 @@ def test_findall_missing_symlink(tmpdir, can_symlink):
os.symlink('foo', 'bar')
found = list(setuptools.findall())
assert found == []
+
+
+def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
+ with ZipFile(setuptools_wheel) as zipfile:
+ contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
+
+ for member in contents:
+ assert '/tests/' not in member
+
+
+def test_convert_path_deprecated():
+ with pytest.warns(setuptools.SetuptoolsDeprecationWarning):
+ setuptools.convert_path('setuptools/tests')
diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py
index 960527b..530474d 100644
--- a/setuptools/tests/test_test.py
+++ b/setuptools/tests/test_test.py
@@ -1,131 +1,40 @@
-# -*- coding: UTF-8 -*-
-
-from __future__ import unicode_literals
-
-from distutils import log
-import os
-import sys
-
import pytest
+from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
-from . import contexts
-
-SETUP_PY = DALS("""
- from setuptools import setup
-
- setup(name='foo',
- packages=['name', 'name.space', 'name.space.tests'],
- namespace_packages=['name'],
- test_suite='name.space.tests.test_suite',
- )
- """)
-
-NS_INIT = DALS("""
- # -*- coding: Latin-1 -*-
- # Söme Arbiträry Ünicode to test Distribute Issüé 310
- try:
- __import__('pkg_resources').declare_namespace(__name__)
- except ImportError:
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
- """)
-
-TEST_PY = DALS("""
- import unittest
-
- class TestTest(unittest.TestCase):
- def test_test(self):
- print "Foo" # Should fail under Python 3 unless 2to3 is used
-
- test_suite = unittest.makeSuite(TestTest)
- """)
-
-
-@pytest.fixture
-def sample_test(tmpdir_cwd):
- os.makedirs('name/space/tests')
-
- # setup.py
- with open('setup.py', 'wt') as f:
- f.write(SETUP_PY)
-
- # name/__init__.py
- with open('name/__init__.py', 'wb') as f:
- f.write(NS_INIT.encode('Latin-1'))
-
- # name/space/__init__.py
- with open('name/space/__init__.py', 'wt') as f:
- f.write('#empty\n')
-
- # name/space/tests/__init__.py
- with open('name/space/tests/__init__.py', 'wt') as f:
- f.write(TEST_PY)
-
-
-@pytest.fixture
-def quiet_log():
- # Running some of the other tests will automatically
- # change the log level to info, messing our output.
- log.set_verbosity(0)
-
-
-@pytest.mark.usefixtures('sample_test', 'quiet_log')
-def test_test(capfd):
- params = dict(
- name='foo',
- packages=['name', 'name.space', 'name.space.tests'],
- namespace_packages=['name'],
- test_suite='name.space.tests.test_suite',
- use_2to3=True,
- )
- dist = Distribution(params)
- dist.script_name = 'setup.py'
- cmd = test(dist)
- cmd.ensure_finalized()
- # The test runner calls sys.exit
- with contexts.suppress_exceptions(SystemExit):
- cmd.run()
- out, err = capfd.readouterr()
- assert out == 'Foo\n'
-@pytest.mark.xfail(
- sys.version_info < (2, 7),
- reason="No discover support for unittest on Python 2.6",
-)
-@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
+@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
- name='foo',
packages=['dummy'],
)
- with open('setup.py', 'wt') as f:
- f.write('from setuptools import setup; setup(\n')
- for k, v in sorted(params.items()):
- f.write(' %s=%r,\n' % (k, v))
- f.write(')\n')
- os.makedirs('dummy')
- with open('dummy/__init__.py', 'wt'):
- pass
- with open('dummy/test_dummy.py', 'wt') as f:
- f.write(DALS(
- """
- from __future__ import print_function
- import unittest
- class TestTest(unittest.TestCase):
- def test_test(self):
- print('Foo')
- """))
+ files = {
+ 'setup.py':
+ 'from setuptools import setup; setup('
+ + ','.join(f'{name}={params[name]!r}' for name in params)
+ + ')',
+ 'dummy': {
+ '__init__.py': '',
+ 'test_dummy.py': DALS(
+ """
+ import unittest
+ class TestTest(unittest.TestCase):
+ def test_test(self):
+ print('Foo')
+ """
+ ),
+ },
+ }
+ path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
- # The test runner calls sys.exit
- with contexts.suppress_exceptions(SystemExit):
- cmd.run()
+ cmd.run()
out, err = capfd.readouterr()
- assert out == 'Foo\n'
+ assert out.endswith('Foo\n')
+ assert len(out.split('Foo')) == 2
diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py
new file mode 100644
index 0000000..7586cb2
--- /dev/null
+++ b/setuptools/tests/test_upload.py
@@ -0,0 +1,22 @@
+from setuptools.command.upload import upload
+from setuptools.dist import Distribution
+from setuptools.errors import RemovedCommandError
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+import pytest
+
+
+class TestUpload:
+ def test_upload_exception(self):
+ """Ensure that the register command has been properly removed."""
+ dist = Distribution()
+ dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
+
+ cmd = upload(dist)
+
+ with pytest.raises(RemovedCommandError):
+ cmd.run()
diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py
deleted file mode 100644
index a26e32a..0000000
--- a/setuptools/tests/test_upload_docs.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import os
-import zipfile
-import contextlib
-
-import pytest
-
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-from .textwrap import DALS
-from . import contexts
-
-SETUP_PY = DALS(
- """
- from setuptools import setup
-
- setup(name='foo')
- """)
-
-
-@pytest.fixture
-def sample_project(tmpdir_cwd):
- # setup.py
- with open('setup.py', 'wt') as f:
- f.write(SETUP_PY)
-
- os.mkdir('build')
-
- # A test document.
- with open('build/index.html', 'w') as f:
- f.write("Hello world.")
-
- # An empty folder.
- os.mkdir('build/empty')
-
-
-@pytest.mark.usefixtures('sample_project')
-@pytest.mark.usefixtures('user_override')
-class TestUploadDocsTest:
- def test_create_zipfile(self):
- """
- Ensure zipfile creation handles common cases, including a folder
- containing an empty folder.
- """
-
- dist = Distribution()
-
- cmd = upload_docs(dist)
- cmd.target_dir = cmd.upload_dir = 'build'
- with contexts.tempdir() as tmp_dir:
- tmp_file = os.path.join(tmp_dir, 'foo.zip')
- zip_file = cmd.create_zipfile(tmp_file)
-
- assert zipfile.is_zipfile(tmp_file)
-
- with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
- assert zip_file.namelist() == ['index.html']
-
- def test_build_multipart(self):
- data = dict(
- a="foo",
- b="bar",
- file=('file.txt', b'content'),
- )
- body, content_type = upload_docs._build_multipart(data)
- assert 'form-data' in content_type
- assert "b'" not in content_type
- assert 'b"' not in content_type
- assert isinstance(body, bytes)
- assert b'foo' in body
- assert b'content' in body
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index b66a311..6535854 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -1,94 +1,101 @@
-import glob
import os
import sys
+import subprocess
+from urllib.request import urlopen
+from urllib.error import URLError
-import pytest
-from pytest import yield_fixture
-from pytest_fixture_config import yield_requires_config
+import pathlib
-import pytest_virtualenv
+import pytest
+from . import contexts
from .textwrap import DALS
from .test_easy_install import make_nspkg_sdist
@pytest.fixture(autouse=True)
-def pytest_virtualenv_works(virtualenv):
+def pytest_virtualenv_works(venv):
"""
pytest_virtualenv may not work. if it doesn't, skip these
tests. See #1284.
"""
- venv_prefix = virtualenv.run(
- 'python -c "import sys; print(sys.prefix)"',
- capture=True,
- ).strip()
+ venv_prefix = venv.run(["python" , "-c", "import sys; print(sys.prefix)"]).strip()
if venv_prefix == sys.prefix:
pytest.skip("virtualenv is broken (see pypa/setuptools#1284)")
-@yield_requires_config(pytest_virtualenv.CONFIG, ['virtualenv_executable'])
-@yield_fixture(scope='function')
-def bare_virtualenv():
- """ Bare virtualenv (no pip/setuptools/wheel).
- """
- with pytest_virtualenv.VirtualEnv(args=(
- '--no-wheel',
- '--no-pip',
- '--no-setuptools',
- )) as venv:
- yield venv
-
-
-SOURCE_DIR = os.path.join(os.path.dirname(__file__), '../..')
-
-
-def test_clean_env_install(bare_virtualenv):
+def test_clean_env_install(venv_without_setuptools, setuptools_wheel):
"""
Check setuptools can be installed in a clean environment.
"""
- bare_virtualenv.run(' && '.join((
- 'cd {source}',
- 'python setup.py install',
- )).format(source=SOURCE_DIR))
-
-
-def test_pip_upgrade_from_source(virtualenv):
+ cmd = ["python", "-m", "pip", "install", str(setuptools_wheel)]
+ venv_without_setuptools.run(cmd)
+
+
+def access_pypi():
+ # Detect if tests are being run without connectivity
+ if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover
+ try:
+ urlopen('https://pypi.org', timeout=1)
+ except URLError:
+ # No network, disable most of these tests
+ return False
+
+ return True
+
+
+@pytest.mark.skipif(
+ 'platform.python_implementation() == "PyPy"',
+ reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995",
+)
+@pytest.mark.skipif(not access_pypi(), reason="no network")
+# ^-- Even when it is not necessary to install a different version of `pip`
+# the build process will still try to download `wheel`, see #3147 and #2986.
+@pytest.mark.parametrize(
+ 'pip_version',
+ [
+ None,
+ pytest.param('pip<20', marks=pytest.mark.xfail(reason='pypa/pip#6599')),
+ 'pip<20.1',
+ 'pip<21',
+ 'pip<22',
+ pytest.param(
+ 'https://github.com/pypa/pip/archive/main.zip',
+ marks=pytest.mark.xfail(reason='#2975'),
+ ),
+ ]
+)
+def test_pip_upgrade_from_source(pip_version, venv_without_setuptools,
+ setuptools_wheel, setuptools_sdist):
"""
Check pip can upgrade setuptools from source.
"""
- dist_dir = virtualenv.workspace
- if sys.version_info < (2, 7):
- # Python 2.6 support was dropped in wheel 0.30.0.
- virtualenv.run('pip install -U "wheel<0.30.0"')
- # Generate source distribution / wheel.
- virtualenv.run(' && '.join((
- 'cd {source}',
- 'python setup.py -q sdist -d {dist}',
- 'python setup.py -q bdist_wheel -d {dist}',
- )).format(source=SOURCE_DIR, dist=dist_dir))
- sdist = glob.glob(os.path.join(dist_dir, '*.zip'))[0]
- wheel = glob.glob(os.path.join(dist_dir, '*.whl'))[0]
- # Then update from wheel.
- virtualenv.run('pip install ' + wheel)
+ # Install pip/wheel, in a venv without setuptools (as it
+ # should not be needed for bootstraping from source)
+ venv = venv_without_setuptools
+ venv.run(["pip", "install", "-U", "wheel"])
+ if pip_version is not None:
+ venv.run(["python", "-m", "pip", "install", "-U", pip_version, "--retries=1"])
+ with pytest.raises(subprocess.CalledProcessError):
+ # Meta-test to make sure setuptools is not installed
+ venv.run(["python", "-c", "import setuptools"])
+
+ # Then install from wheel.
+ venv.run(["pip", "install", str(setuptools_wheel)])
# And finally try to upgrade from source.
- virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist)
+ venv.run(["pip", "install", "--no-cache-dir", "--upgrade", str(setuptools_sdist)])
-def test_test_command_install_requirements(bare_virtualenv, tmpdir):
+def _check_test_command_install_requirements(venv, tmpdir):
"""
Check the test command will install all required dependencies.
"""
- bare_virtualenv.run(' && '.join((
- 'cd {source}',
- 'python setup.py develop',
- )).format(source=SOURCE_DIR))
-
def sdist(distname, version):
dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version))
make_nspkg_sdist(str(dist_path), distname, version)
return dist_path
dependency_links = [
- str(dist_path)
+ pathlib.Path(str(dist_path)).as_uri()
for dist_path in (
sdist('foobar', '2.4'),
sdist('bits', '4.2'),
@@ -131,9 +138,24 @@ def test_test_command_install_requirements(bare_virtualenv, tmpdir):
open('success', 'w').close()
'''))
- # Run test command for test package.
- bare_virtualenv.run(' && '.join((
- 'cd {tmpdir}',
- 'python setup.py test -s test',
- )).format(tmpdir=tmpdir))
+
+ cmd = ["python", 'setup.py', 'test', '-s', 'test']
+ venv.run(cmd, cwd=str(tmpdir))
assert tmpdir.join('success').check()
+
+
+def test_test_command_install_requirements(venv, tmpdir, tmpdir_cwd):
+ # Ensure pip/wheel packages are installed.
+ venv.run(["python", "-c", "__import__('pkg_resources').require(['pip', 'wheel'])"])
+ # disable index URL so bits and bobs aren't requested from PyPI
+ with contexts.environment(PYTHONPATH=None, PIP_NO_INDEX="1"):
+ _check_test_command_install_requirements(venv, tmpdir)
+
+
+def test_no_missing_dependencies(bare_venv, request):
+ """
+ Quick and dirty test to ensure all external dependencies are vendored.
+ """
+ setuptools_dir = request.config.rootdir
+ for command in ('upload',): # sorted(distutils.command.__all__):
+ bare_venv.run(['python', 'setup.py', command, '-h'], cwd=setuptools_dir)
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 150ac4c..a15c3a4 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -9,16 +9,20 @@ import contextlib
import glob
import inspect
import os
+import shutil
import subprocess
import sys
+import zipfile
import pytest
+from jaraco import path
from pkg_resources import Distribution, PathMetadata, PY_MAJOR
+from setuptools.extern.packaging.utils import canonicalize_name
+from setuptools.extern.packaging.tags import parse_tag
from setuptools.wheel import Wheel
from .contexts import tempdir
-from .files import build_files
from .textwrap import DALS
@@ -58,6 +62,7 @@ WHEEL_INFO_TESTS = (
}),
)
+
@pytest.mark.parametrize(
('filename', 'info'), WHEEL_INFO_TESTS,
ids=[t[0] for t in WHEEL_INFO_TESTS]
@@ -86,7 +91,7 @@ def build_wheel(extra_file_defs=None, **kwargs):
if extra_file_defs:
file_defs.update(extra_file_defs)
with tempdir() as source_dir:
- build_files(file_defs, source_dir)
+ path.build(file_defs, source_dir)
subprocess.check_call((sys.executable, 'setup.py',
'-q', 'bdist_wheel'), cwd=source_dir)
yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
@@ -118,11 +123,12 @@ def flatten_tree(tree):
def format_install_tree(tree):
- return {x.format(
- py_version=PY_MAJOR,
- platform=get_platform(),
- shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
- for x in tree}
+ return {
+ x.format(
+ py_version=PY_MAJOR,
+ platform=get_platform(),
+ shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'))
+ for x in tree}
def _check_wheel_install(filename, install_dir, install_tree_includes,
@@ -142,10 +148,11 @@ def _check_wheel_install(filename, install_dir, install_tree_includes,
if requires_txt is None:
assert not dist.has_metadata('requires.txt')
else:
+ # Order must match to ensure reproducibility.
assert requires_txt == dist.get_metadata('requires.txt').lstrip()
-class Record(object):
+class Record:
def __init__(self, id, **kwargs):
self._id = id
@@ -414,6 +421,38 @@ WHEEL_INSTALL_TESTS = (
),
dict(
+ id='requires_ensure_order',
+ install_requires='''
+ foo
+ bar
+ baz
+ qux
+ ''',
+ extras_require={
+ 'extra': '''
+ foobar>3
+ barbaz>4
+ bazqux>5
+ quxzap>6
+ ''',
+ },
+ requires_txt=DALS(
+ '''
+ foo
+ bar
+ baz
+ qux
+
+ [extra]
+ foobar>3
+ barbaz>4
+ bazqux>5
+ quxzap>6
+ '''
+ ),
+ ),
+
+ dict(
id='namespace_package',
file_defs={
'foo': {
@@ -445,6 +484,35 @@ WHEEL_INSTALL_TESTS = (
),
dict(
+ id='empty_namespace_package',
+ file_defs={
+ 'foobar': {
+ '__init__.py':
+ "__import__('pkg_resources').declare_namespace(__name__)",
+ },
+ },
+ setup_kwargs=dict(
+ namespace_packages=['foobar'],
+ packages=['foobar'],
+ ),
+ install_tree=flatten_tree({
+ 'foo-1.0-py{py_version}.egg': [
+ 'foo-1.0-py{py_version}-nspkg.pth',
+ {'EGG-INFO': [
+ 'PKG-INFO',
+ 'RECORD',
+ 'WHEEL',
+ 'namespace_packages.txt',
+ 'top_level.txt',
+ ]},
+ {'foobar': [
+ '__init__.py',
+ ]},
+ ]
+ }),
+ ),
+
+ dict(
id='data_in_package',
file_defs={
'foo': {
@@ -482,6 +550,7 @@ WHEEL_INSTALL_TESTS = (
)
+
@pytest.mark.parametrize(
'params', WHEEL_INSTALL_TESTS,
ids=list(params['id'] for params in WHEEL_INSTALL_TESTS),
@@ -506,3 +575,42 @@ def test_wheel_install(params):
_check_wheel_install(filename, install_dir,
install_tree, project_name,
version, requires_txt)
+
+
+def test_wheel_install_pep_503():
+ project_name = 'Foo_Bar' # PEP 503 canonicalized name is "foo-bar"
+ version = '1.0'
+ with build_wheel(
+ name=project_name,
+ version=version,
+ ) as filename, tempdir() as install_dir:
+ new_filename = filename.replace(project_name,
+ canonicalize_name(project_name))
+ shutil.move(filename, new_filename)
+ _check_wheel_install(new_filename, install_dir, None,
+ canonicalize_name(project_name),
+ version, None)
+
+
+def test_wheel_no_dist_dir():
+ project_name = 'nodistinfo'
+ version = '1.0'
+ wheel_name = '{0}-{1}-py2.py3-none-any.whl'.format(project_name, version)
+ with tempdir() as source_dir:
+ wheel_path = os.path.join(source_dir, wheel_name)
+ # create an empty zip file
+ zipfile.ZipFile(wheel_path, 'w').close()
+ with tempdir() as install_dir:
+ with pytest.raises(ValueError):
+ _check_wheel_install(wheel_path, install_dir, None,
+ project_name,
+ version, None)
+
+
+def test_wheel_is_compatible(monkeypatch):
+ def sys_tags():
+ for t in parse_tag('cp36-cp36m-manylinux1_x86_64'):
+ yield t
+ monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags)
+ assert Wheel(
+ 'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index d2871c0..8ac9bd0 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -12,9 +12,8 @@ the script they are to wrap and with the same name as the script they
are to wrap.
"""
-from __future__ import absolute_import
-
import sys
+import platform
import textwrap
import subprocess
@@ -53,10 +52,20 @@ class WrapperTester:
f.write(w)
+def win_launcher_exe(prefix):
+ """ A simple routine to select launcher script based on platform."""
+ assert prefix in ('cli', 'gui')
+ if platform.machine() == "ARM64":
+ return "{}-arm64.exe".format(prefix)
+ else:
+ return "{}-32.exe".format(prefix)
+
+
class TestCLI(WrapperTester):
script_name = 'foo-script.py'
- wrapper_source = 'cli-32.exe'
wrapper_name = 'foo.exe'
+ wrapper_source = win_launcher_exe('cli')
+
script_tmpl = textwrap.dedent("""
#!%(python_exe)s
import sys
@@ -97,7 +106,8 @@ class TestCLI(WrapperTester):
'arg 4\\',
'arg5 a\\\\b',
]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
@@ -134,7 +144,11 @@ class TestCLI(WrapperTester):
with (tmpdir / 'foo-script.py').open('w') as f:
f.write(self.prep_script(tmpl))
cmd = [str(tmpdir / 'foo.exe')]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
@@ -152,7 +166,7 @@ class TestGUI(WrapperTester):
-----------------------
"""
script_name = 'bar-script.pyw'
- wrapper_source = 'gui-32.exe'
+ wrapper_source = win_launcher_exe('gui')
wrapper_name = 'bar.exe'
script_tmpl = textwrap.dedent("""
@@ -164,7 +178,7 @@ class TestGUI(WrapperTester):
""").strip()
def test_basic(self, tmpdir):
- """Test the GUI version with the simple scipt, bar-script.py"""
+ """Test the GUI version with the simple script, bar-script.py"""
self.create_script(tmpdir)
cmd = [
@@ -172,7 +186,9 @@ class TestGUI(WrapperTester):
str(tmpdir / 'test_output.txt'),
'Test Argument',
]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr
diff --git a/setuptools/tests/text.py b/setuptools/tests/text.py
index ad2c624..e05cc63 100644
--- a/setuptools/tests/text.py
+++ b/setuptools/tests/text.py
@@ -1,8 +1,3 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import unicode_literals
-
-
class Filenames:
unicode = 'smörbröd.py'
latin_1 = unicode.encode('latin-1')
diff --git a/setuptools/tests/textwrap.py b/setuptools/tests/textwrap.py
index 5cd9e5b..5e39618 100644
--- a/setuptools/tests/textwrap.py
+++ b/setuptools/tests/textwrap.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import textwrap
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 7c63efd..e84e65e 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -1,12 +1,10 @@
import unicodedata
import sys
-from setuptools.extern import six
-
# HFS Plus uses decomposed UTF-8
def decompose(path):
- if isinstance(path, six.text_type):
+ if isinstance(path, str):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
@@ -23,7 +21,7 @@ def filesys_decode(path):
NONE when no expected encoding works
"""
- if isinstance(path, six.text_type):
+ if isinstance(path, str):
return path
fs_enc = sys.getfilesystemencoding() or 'utf-8'
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 37dfa53..9819e8b 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -1,16 +1,19 @@
-'''Wheels support.'''
+"""Wheels support."""
from distutils.util import get_platform
+from distutils import log
import email
import itertools
import os
+import posixpath
import re
import zipfile
-from pkg_resources import Distribution, PathMetadata, parse_version
-from setuptools.extern.six import PY3
-from setuptools import Distribution as SetuptoolsDistribution
-from setuptools import pep425tags
+import pkg_resources
+import setuptools
+from pkg_resources import parse_version
+from setuptools.extern.packaging.tags import sys_tags
+from setuptools.extern.packaging.utils import canonicalize_name
from setuptools.command.egg_info import write_requirements
@@ -18,14 +21,10 @@ WHEEL_NAME = re.compile(
r"""^(?P<project_name>.+?)-(?P<version>\d.*?)
((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?)
)\.whl$""",
-re.VERBOSE).match
+ re.VERBOSE).match
-NAMESPACE_PACKAGE_INIT = '''\
-try:
- __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
- __path__ = __import__('pkgutil').extend_path(__path__, __name__)
-'''
+NAMESPACE_PACKAGE_INIT = \
+ "__import__('pkg_resources').declare_namespace(__name__)\n"
def unpack(src_dir, dst_dir):
@@ -50,7 +49,7 @@ def unpack(src_dir, dst_dir):
os.rmdir(dirpath)
-class Wheel(object):
+class Wheel:
def __init__(self, filename):
match = WHEEL_NAME(os.path.basename(filename))
@@ -62,102 +61,153 @@ class Wheel(object):
def tags(self):
'''List tags (py_version, abi, platform) supported by this wheel.'''
- return itertools.product(self.py_version.split('.'),
- self.abi.split('.'),
- self.platform.split('.'))
+ return itertools.product(
+ self.py_version.split('.'),
+ self.abi.split('.'),
+ self.platform.split('.'),
+ )
def is_compatible(self):
'''Is the wheel is compatible with the current platform?'''
- supported_tags = pep425tags.get_supported()
+ supported_tags = set(
+ (t.interpreter, t.abi, t.platform) for t in sys_tags())
return next((True for t in self.tags() if t in supported_tags), False)
def egg_name(self):
- return Distribution(
+ return pkg_resources.Distribution(
project_name=self.project_name, version=self.version,
platform=(None if self.platform == 'any' else get_platform()),
).egg_name() + '.egg'
+ def get_dist_info(self, zf):
+ # find the correct name of the .dist-info dir in the wheel file
+ for member in zf.namelist():
+ dirname = posixpath.dirname(member)
+ if (dirname.endswith('.dist-info') and
+ canonicalize_name(dirname).startswith(
+ canonicalize_name(self.project_name))):
+ return dirname
+ raise ValueError("unsupported wheel format. .dist-info not found")
+
def install_as_egg(self, destination_eggdir):
'''Install wheel as an egg directory.'''
with zipfile.ZipFile(self.filename) as zf:
- dist_basename = '%s-%s' % (self.project_name, self.version)
- dist_info = '%s.dist-info' % dist_basename
- dist_data = '%s.data' % dist_basename
- def get_metadata(name):
- with zf.open('%s/%s' % (dist_info, name)) as fp:
- value = fp.read().decode('utf-8') if PY3 else fp.read()
- return email.parser.Parser().parsestr(value)
- wheel_metadata = get_metadata('WHEEL')
- dist_metadata = get_metadata('METADATA')
- # Check wheel format version is supported.
- wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
- if not parse_version('1.0') <= wheel_version < parse_version('2.0dev0'):
- raise ValueError('unsupported wheel format version: %s' % wheel_version)
- # Extract to target directory.
- os.mkdir(destination_eggdir)
- zf.extractall(destination_eggdir)
- # Convert metadata.
- dist_info = os.path.join(destination_eggdir, dist_info)
- dist = Distribution.from_location(
- destination_eggdir, dist_info,
- metadata=PathMetadata(destination_eggdir, dist_info)
- )
- # Note: we need to evaluate and strip markers now,
- # as we can't easily convert back from the syntax:
- # foobar; "linux" in sys_platform and extra == 'test'
- def raw_req(req):
- req.marker = None
- return str(req)
- install_requires = list(sorted(map(raw_req, dist.requires())))
- extras_require = {
- extra: list(sorted(
- req
- for req in map(raw_req, dist.requires((extra,)))
- if req not in install_requires
- ))
- for extra in dist.extras
- }
- egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
- os.rename(dist_info, egg_info)
- os.rename(os.path.join(egg_info, 'METADATA'),
- os.path.join(egg_info, 'PKG-INFO'))
- setup_dist = SetuptoolsDistribution(attrs=dict(
+ self._install_as_egg(destination_eggdir, zf)
+
+ def _install_as_egg(self, destination_eggdir, zf):
+ dist_basename = '%s-%s' % (self.project_name, self.version)
+ dist_info = self.get_dist_info(zf)
+ dist_data = '%s.data' % dist_basename
+ egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
+
+ self._convert_metadata(zf, destination_eggdir, dist_info, egg_info)
+ self._move_data_entries(destination_eggdir, dist_data)
+ self._fix_namespace_packages(egg_info, destination_eggdir)
+
+ @staticmethod
+ def _convert_metadata(zf, destination_eggdir, dist_info, egg_info):
+ def get_metadata(name):
+ with zf.open(posixpath.join(dist_info, name)) as fp:
+ value = fp.read().decode('utf-8')
+ return email.parser.Parser().parsestr(value)
+
+ wheel_metadata = get_metadata('WHEEL')
+ # Check wheel format version is supported.
+ wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
+ wheel_v1 = (
+ parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
+ )
+ if not wheel_v1:
+ raise ValueError(
+ 'unsupported wheel format version: %s' % wheel_version)
+ # Extract to target directory.
+ os.mkdir(destination_eggdir)
+ zf.extractall(destination_eggdir)
+ # Convert metadata.
+ dist_info = os.path.join(destination_eggdir, dist_info)
+ dist = pkg_resources.Distribution.from_location(
+ destination_eggdir, dist_info,
+ metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info),
+ )
+
+ # Note: Evaluate and strip markers now,
+ # as it's difficult to convert back from the syntax:
+ # foobar; "linux" in sys_platform and extra == 'test'
+ def raw_req(req):
+ req.marker = None
+ return str(req)
+ install_requires = list(map(raw_req, dist.requires()))
+ extras_require = {
+ extra: [
+ req
+ for req in map(raw_req, dist.requires((extra,)))
+ if req not in install_requires
+ ]
+ for extra in dist.extras
+ }
+ os.rename(dist_info, egg_info)
+ os.rename(
+ os.path.join(egg_info, 'METADATA'),
+ os.path.join(egg_info, 'PKG-INFO'),
+ )
+ setup_dist = setuptools.Distribution(
+ attrs=dict(
install_requires=install_requires,
extras_require=extras_require,
- ))
- write_requirements(setup_dist.get_command_obj('egg_info'),
- None, os.path.join(egg_info, 'requires.txt'))
- # Move data entries to their correct location.
- dist_data = os.path.join(destination_eggdir, dist_data)
- dist_data_scripts = os.path.join(dist_data, 'scripts')
- if os.path.exists(dist_data_scripts):
- egg_info_scripts = os.path.join(destination_eggdir,
- 'EGG-INFO', 'scripts')
- os.mkdir(egg_info_scripts)
- for entry in os.listdir(dist_data_scripts):
- # Remove bytecode, as it's not properly handled
- # during easy_install scripts install phase.
- if entry.endswith('.pyc'):
- os.unlink(os.path.join(dist_data_scripts, entry))
- else:
- os.rename(os.path.join(dist_data_scripts, entry),
- os.path.join(egg_info_scripts, entry))
- os.rmdir(dist_data_scripts)
- for subdir in filter(os.path.exists, (
- os.path.join(dist_data, d)
- for d in ('data', 'headers', 'purelib', 'platlib')
- )):
- unpack(subdir, destination_eggdir)
- if os.path.exists(dist_data):
- os.rmdir(dist_data)
- # Fix namespace packages.
- namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')
- if os.path.exists(namespace_packages):
- with open(namespace_packages) as fp:
- namespace_packages = fp.read().split()
- for mod in namespace_packages:
- mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
- mod_init = os.path.join(mod_dir, '__init__.py')
- if os.path.exists(mod_dir) and not os.path.exists(mod_init):
- with open(mod_init, 'w') as fp:
- fp.write(NAMESPACE_PACKAGE_INIT)
+ ),
+ )
+ # Temporarily disable info traces.
+ log_threshold = log._global_log.threshold
+ log.set_threshold(log.WARN)
+ try:
+ write_requirements(
+ setup_dist.get_command_obj('egg_info'),
+ None,
+ os.path.join(egg_info, 'requires.txt'),
+ )
+ finally:
+ log.set_threshold(log_threshold)
+
+ @staticmethod
+ def _move_data_entries(destination_eggdir, dist_data):
+ """Move data entries to their correct location."""
+ dist_data = os.path.join(destination_eggdir, dist_data)
+ dist_data_scripts = os.path.join(dist_data, 'scripts')
+ if os.path.exists(dist_data_scripts):
+ egg_info_scripts = os.path.join(
+ destination_eggdir, 'EGG-INFO', 'scripts')
+ os.mkdir(egg_info_scripts)
+ for entry in os.listdir(dist_data_scripts):
+ # Remove bytecode, as it's not properly handled
+ # during easy_install scripts install phase.
+ if entry.endswith('.pyc'):
+ os.unlink(os.path.join(dist_data_scripts, entry))
+ else:
+ os.rename(
+ os.path.join(dist_data_scripts, entry),
+ os.path.join(egg_info_scripts, entry),
+ )
+ os.rmdir(dist_data_scripts)
+ for subdir in filter(os.path.exists, (
+ os.path.join(dist_data, d)
+ for d in ('data', 'headers', 'purelib', 'platlib')
+ )):
+ unpack(subdir, destination_eggdir)
+ if os.path.exists(dist_data):
+ os.rmdir(dist_data)
+
+ @staticmethod
+ def _fix_namespace_packages(egg_info, destination_eggdir):
+ namespace_packages = os.path.join(
+ egg_info, 'namespace_packages.txt')
+ if os.path.exists(namespace_packages):
+ with open(namespace_packages) as fp:
+ namespace_packages = fp.read().split()
+ for mod in namespace_packages:
+ mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
+ mod_init = os.path.join(mod_dir, '__init__.py')
+ if not os.path.exists(mod_dir):
+ os.mkdir(mod_dir)
+ if not os.path.exists(mod_init):
+ with open(mod_init, 'w') as fp:
+ fp.write(NAMESPACE_PACKAGE_INIT)
diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py
index cb977cf..1ca64fb 100644
--- a/setuptools/windows_support.py
+++ b/setuptools/windows_support.py
@@ -1,5 +1,4 @@
import platform
-import ctypes
def windows_only(func):
@@ -17,6 +16,7 @@ def hide_file(path):
`path` must be text.
"""
+ import ctypes
__import__('ctypes.wintypes')
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
diff --git a/tests/manual_test.py b/tests/manual_test.py
deleted file mode 100644
index e5aaf17..0000000
--- a/tests/manual_test.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-import shutil
-import tempfile
-import subprocess
-from distutils.command.install import INSTALL_SCHEMES
-from string import Template
-
-from six.moves import urllib
-
-
-def _system_call(*args):
- assert subprocess.call(args) == 0
-
-
-def tempdir(func):
- def _tempdir(*args, **kwargs):
- test_dir = tempfile.mkdtemp()
- old_dir = os.getcwd()
- os.chdir(test_dir)
- try:
- return func(*args, **kwargs)
- finally:
- os.chdir(old_dir)
- shutil.rmtree(test_dir)
-
- return _tempdir
-
-
-SIMPLE_BUILDOUT = """\
-[buildout]
-
-parts = eggs
-
-[eggs]
-recipe = zc.recipe.egg
-
-eggs =
- extensions
-"""
-
-BOOTSTRAP = 'http://downloads.buildout.org/1/bootstrap.py'
-PYVER = sys.version.split()[0][:3]
-
-_VARS = {'base': '.',
- 'py_version_short': PYVER}
-
-scheme = 'nt' if sys.platform == 'win32' else 'unix_prefix'
-PURELIB = INSTALL_SCHEMES[scheme]['purelib']
-
-
-@tempdir
-def test_virtualenv():
- """virtualenv with setuptools"""
- purelib = os.path.abspath(Template(PURELIB).substitute(**_VARS))
- _system_call('virtualenv', '--no-site-packages', '.')
- _system_call('bin/easy_install', 'setuptools==dev')
- # linux specific
- site_pkg = os.listdir(purelib)
- site_pkg.sort()
- assert 'setuptools' in site_pkg[0]
- easy_install = os.path.join(purelib, 'easy-install.pth')
- with open(easy_install) as f:
- res = f.read()
- assert 'setuptools' in res
-
-
-@tempdir
-def test_full():
- """virtualenv + pip + buildout"""
- _system_call('virtualenv', '--no-site-packages', '.')
- _system_call('bin/easy_install', '-q', 'setuptools==dev')
- _system_call('bin/easy_install', '-qU', 'setuptools==dev')
- _system_call('bin/easy_install', '-q', 'pip')
- _system_call('bin/pip', 'install', '-q', 'zc.buildout')
-
- with open('buildout.cfg', 'w') as f:
- f.write(SIMPLE_BUILDOUT)
-
- with open('bootstrap.py', 'w') as f:
- f.write(urllib.request.urlopen(BOOTSTRAP).read())
-
- _system_call('bin/python', 'bootstrap.py')
- _system_call('bin/buildout', '-q')
- eggs = os.listdir('eggs')
- eggs.sort()
- assert len(eggs) == 3
- assert eggs[1].startswith('setuptools')
- del eggs[1]
- assert eggs == ['extensions-0.3-py2.6.egg',
- 'zc.recipe.egg-1.2.2-py2.6.egg']
-
-
-if __name__ == '__main__':
- test_virtualenv()
- test_full()
diff --git a/tests/test_pypi.py b/tests/test_pypi.py
deleted file mode 100644
index b3425e5..0000000
--- a/tests/test_pypi.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import os
-import subprocess
-
-import virtualenv
-from setuptools.extern.six.moves import http_client
-from setuptools.extern.six.moves import xmlrpc_client
-
-TOP = 200
-PYPI_HOSTNAME = 'pypi.python.org'
-
-
-def rpc_pypi(method, *args):
- """Call an XML-RPC method on the Pypi server."""
- conn = http_client.HTTPSConnection(PYPI_HOSTNAME)
- headers = {'Content-Type': 'text/xml'}
- payload = xmlrpc_client.dumps(args, method)
-
- conn.request("POST", "/pypi", payload, headers)
- response = conn.getresponse()
- if response.status == 200:
- result = xmlrpc_client.loads(response.read())[0][0]
- return result
- else:
- raise RuntimeError("Unable to download the list of top "
- "packages from Pypi.")
-
-
-def get_top_packages(limit):
- """Collect the name of the top packages on Pypi."""
- packages = rpc_pypi('top_packages')
- return packages[:limit]
-
-
-def _package_install(package_name, tmp_dir=None, local_setuptools=True):
- """Try to install a package and return the exit status.
-
- This function creates a virtual environment, install setuptools using pip
- and then install the required package. If local_setuptools is True, it
- will install the local version of setuptools.
- """
- package_dir = os.path.join(tmp_dir, "test_%s" % package_name)
- if not local_setuptools:
- package_dir = package_dir + "_baseline"
-
- virtualenv.create_environment(package_dir)
-
- pip_path = os.path.join(package_dir, "bin", "pip")
- if local_setuptools:
- subprocess.check_call([pip_path, "install", "."])
- returncode = subprocess.call([pip_path, "install", package_name])
- return returncode
-
-
-def test_package_install(package_name, tmpdir):
- """Test to verify the outcome of installing a package.
-
- This test compare that the return code when installing a package is the
- same as with the current stable version of setuptools.
- """
- new_exit_status = _package_install(package_name, tmp_dir=str(tmpdir))
- if new_exit_status:
- print("Installation failed, testing against stable setuptools",
- package_name)
- old_exit_status = _package_install(package_name, tmp_dir=str(tmpdir),
- local_setuptools=False)
- assert new_exit_status == old_exit_status
-
-
-def pytest_generate_tests(metafunc):
- """Generator function for test_package_install.
-
- This function will generate calls to test_package_install. If a package
- list has been specified on the command line, it will be used. Otherwise,
- Pypi will be queried to get the current list of top packages.
- """
- if "package_name" in metafunc.fixturenames:
- if not metafunc.config.option.package_name:
- packages = get_top_packages(TOP)
- packages = [name for name, downloads in packages]
- else:
- packages = metafunc.config.option.package_name
- metafunc.parametrize("package_name", packages)
diff --git a/tools/finalize.py b/tools/finalize.py
new file mode 100644
index 0000000..5a4df5d
--- /dev/null
+++ b/tools/finalize.py
@@ -0,0 +1,102 @@
+"""
+Finalize the repo for a release. Invokes towncrier and bumpversion.
+"""
+
+__requires__ = ['bump2version', 'towncrier']
+
+
+import subprocess
+import pathlib
+import re
+import sys
+
+
+def release_kind():
+ """
+ Determine which release to make based on the files in the
+ changelog.
+ """
+ # use min here as 'major' < 'minor' < 'patch'
+ return min(
+ 'major' if 'breaking' in file.name else
+ 'minor' if 'change' in file.name else
+ 'patch'
+ for file in pathlib.Path('changelog.d').iterdir()
+ )
+
+
+bump_version_command = [
+ sys.executable,
+ '-m', 'bumpversion',
+ release_kind(),
+]
+
+
+def get_version():
+ cmd = bump_version_command + ['--dry-run', '--verbose']
+ out = subprocess.check_output(cmd, text=True)
+ return re.search('^new_version=(.*)', out, re.MULTILINE).group(1)
+
+
+def update_changelog():
+ cmd = [
+ sys.executable, '-m',
+ 'towncrier',
+ 'build',
+ '--version', get_version(),
+ '--yes',
+ ]
+ subprocess.check_call(cmd)
+ _repair_changelog()
+
+
+def _repair_changelog():
+ """
+ Workaround for #2666
+ """
+ changelog_fn = pathlib.Path('CHANGES.rst')
+ changelog = changelog_fn.read_text()
+ fixed = re.sub(r'^(v[0-9.]+)v[0-9.]+$', r'\1', changelog, flags=re.M)
+ changelog_fn.write_text(fixed)
+ subprocess.check_output(['git', 'add', changelog_fn])
+
+
+def bump_version():
+ cmd = bump_version_command + ['--allow-dirty']
+ subprocess.check_call(cmd)
+
+
+def ensure_config():
+ """
+ Double-check that Git has an e-mail configured.
+ """
+ subprocess.check_output(['git', 'config', 'user.email'])
+
+
+def check_changes():
+ """
+ Verify that all of the files in changelog.d have the appropriate
+ names.
+ """
+ allowed = 'deprecation', 'breaking', 'change', 'doc', 'misc'
+ except_ = 'README.rst', '.gitignore'
+ news_fragments = (
+ file
+ for file in pathlib.Path('changelog.d').iterdir()
+ if file.name not in except_
+ )
+ unrecognized = [
+ str(file)
+ for file in news_fragments
+ if not any(f".{key}" in file.suffixes for key in allowed)
+ ]
+ if unrecognized:
+ raise ValueError(f"Some news fragments have invalid names: {unrecognized}")
+
+
+if __name__ == '__main__':
+ print("Cutting release at", get_version())
+ ensure_config()
+ check_changes()
+ update_changelog()
+ bump_version()
diff --git a/tools/msvc-build-launcher-arm64.cmd b/tools/msvc-build-launcher-arm64.cmd
new file mode 100644
index 0000000..8e63506
--- /dev/null
+++ b/tools/msvc-build-launcher-arm64.cmd
@@ -0,0 +1,19 @@
+@echo off
+
+REM Build with jaraco/windows Docker image
+
+set PATH_OLD=%PATH%
+set PATH=C:\BuildTools\VC\Auxiliary\Build;%PATH_OLD%
+
+REM now for arm 64-bit
+REM Cross compile for arm64
+call VCVARSx86_arm64
+if "%ERRORLEVEL%"=="0" (
+ cl /D "GUI=0" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:arm64 /SUBSYSTEM:CONSOLE /out:setuptools/cli-arm64.exe
+ cl /D "GUI=1" /D "WIN32_LEAN_AND_MEAN" launcher.c /O2 /link /MACHINE:arm64 /SUBSYSTEM:WINDOWS /out:setuptools/gui-arm64.exe
+) else (
+ echo Visual Studio 2019 with arm64 toolchain not installed
+)
+
+set PATH=%PATH_OLD%
+
diff --git a/msvc-build-launcher.cmd b/tools/msvc-build-launcher.cmd
index 92da290..92da290 100644
--- a/msvc-build-launcher.cmd
+++ b/tools/msvc-build-launcher.cmd
diff --git a/tools/ppc64le-patch.py b/tools/ppc64le-patch.py
new file mode 100644
index 0000000..2a8ff8e
--- /dev/null
+++ b/tools/ppc64le-patch.py
@@ -0,0 +1,28 @@
+"""
+Except on bionic, Travis Linux base image for PPC64LE
+platform lacks the proper
+permissions to the directory ~/.cache/pip/wheels that allow
+the user running travis build to install pip packages.
+TODO: is someone tracking this issue? Maybe just move to bionic?
+"""
+
+import subprocess
+import collections
+import os
+
+
+def patch():
+ env = collections.defaultdict(str, os.environ)
+ if env['TRAVIS_CPU_ARCH'] != 'ppc64le':
+ return
+ cmd = [
+ 'sudo',
+ 'chown',
+ '-Rfv',
+ '{USER}:{GROUP}'.format_map(env),
+ os.path.expanduser('~/.cache/pip/wheels'),
+ ]
+ subprocess.Popen(cmd)
+
+
+__name__ == '__main__' and patch()
diff --git a/tools/towncrier_template.rst b/tools/towncrier_template.rst
new file mode 100644
index 0000000..7f50734
--- /dev/null
+++ b/tools/towncrier_template.rst
@@ -0,0 +1,35 @@
+{% if top_line %}
+{{ top_line }}
+{{ top_underline * ((top_line)|length)}}
+{% endif %}
+{% for section, _ in sections.items() %}
+{% set underline = underlines[0] %}{% if section %}{{section}}
+{{ underline * section|length }}
+{% set underline = underlines[1] %}
+{% endif %}
+
+{% if sections[section] %}
+{% for category, val in definitions.items() if category in sections[section]%}
+
+{{ definitions[category]['name'] }}
+{{ underline * definitions[category]['name']|length }}
+{% if definitions[category]['showcontent'] %}
+{% for text, values in sections[section][category].items() %}
+* {{ values|join(', ') }}: {{ text }}
+{% endfor %}
+{% else %}
+* {{ sections[section][category]['']|join(', ') }}
+
+{% endif %}
+{% if sections[section][category]|length == 0 %}
+No significant changes.
+{% else %}
+{% endif %}
+{% endfor %}
+
+{% else %}
+No significant changes.
+
+
+{% endif %}
+{% endfor %}
diff --git a/tools/vendored.py b/tools/vendored.py
new file mode 100644
index 0000000..dc1b0c0
--- /dev/null
+++ b/tools/vendored.py
@@ -0,0 +1,187 @@
+import re
+import sys
+import string
+import subprocess
+import venv
+from tempfile import TemporaryDirectory
+
+from path import Path
+
+
+def remove_all(paths):
+ for path in paths:
+ path.rmtree() if path.isdir() else path.remove()
+
+
+def update_vendored():
+ update_pkg_resources()
+ update_setuptools()
+
+
+def rewrite_packaging(pkg_files, new_root):
+ """
+ Rewrite imports in packaging to redirect to vendored copies.
+ """
+ for file in pkg_files.glob('*.py'):
+ text = file.text()
+ text = re.sub(r' (pyparsing)', rf' {new_root}.\1', text)
+ text = text.replace(
+ 'from six.moves.urllib import parse',
+ 'from urllib import parse',
+ )
+ file.write_text(text)
+
+
+def rewrite_jaraco_text(pkg_files, new_root):
+ """
+ Rewrite imports in jaraco.text to redirect to vendored copies.
+ """
+ for file in pkg_files.glob('*.py'):
+ text = file.read_text()
+ text = re.sub(r' (jaraco\.)', rf' {new_root}.\1', text)
+ text = re.sub(r' (importlib_resources)', rf' {new_root}.\1', text)
+ # suppress loading of lorem_ipsum; ref #3072
+ text = re.sub(r'^lorem_ipsum.*\n$', '', text, flags=re.M)
+ file.write_text(text)
+
+
+def rewrite_jaraco(pkg_files, new_root):
+ """
+ Rewrite imports in jaraco.functools to redirect to vendored copies.
+ """
+ for file in pkg_files.glob('*.py'):
+ text = file.read_text()
+ text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text)
+ file.write_text(text)
+ # required for zip-packaged setuptools #3084
+ pkg_files.joinpath('__init__.py').write_text('')
+
+
+def rewrite_importlib_resources(pkg_files, new_root):
+ """
+ Rewrite imports in importlib_resources to redirect to vendored copies.
+ """
+ for file in pkg_files.glob('*.py'):
+ text = file.read_text().replace('importlib_resources.abc', '.abc')
+ text = text.replace('zipp', '..zipp')
+ file.write_text(text)
+
+
+def rewrite_importlib_metadata(pkg_files, new_root):
+ """
+ Rewrite imports in importlib_metadata to redirect to vendored copies.
+ """
+ for file in pkg_files.glob('*.py'):
+ text = file.read_text().replace('typing_extensions', '..typing_extensions')
+ text = text.replace('import zipp', 'from .. import zipp')
+ file.write_text(text)
+
+
+def rewrite_more_itertools(pkg_files: Path):
+ """
+ Defer import of concurrent.futures. Workaround for #3090.
+ """
+ more_file = pkg_files.joinpath('more.py')
+ text = more_file.read_text()
+ text = re.sub(r'^.*concurrent.futures.*?\n', '', text, flags=re.MULTILINE)
+ text = re.sub(
+ 'ThreadPoolExecutor',
+ '__import__("concurrent.futures").futures.ThreadPoolExecutor',
+ text,
+ )
+ more_file.write_text(text)
+
+
+def rewrite_nspektr(pkg_files: Path, new_root):
+ for file in pkg_files.glob('*.py'):
+ text = file.read_text()
+ text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text)
+ text = re.sub(r' (jaraco\.\w+)', rf' {new_root}.\1', text)
+ text = re.sub(r' (packaging)', rf' {new_root}.\1', text)
+ text = re.sub(r' (importlib_metadata)', rf' {new_root}.\1', text)
+ file.write_text(text)
+
+
+def clean(vendor):
+ """
+ Remove all files out of the vendor directory except the meta
+ data (as pip uninstall doesn't support -t).
+ """
+ remove_all(
+ path
+ for path in vendor.glob('*')
+ if path.basename() != 'vendored.txt'
+ )
+
+
+def install(vendor):
+ clean(vendor)
+ install_args = [
+ sys.executable,
+ '-m', 'pip',
+ 'install',
+ '-r', str(vendor / 'vendored.txt'),
+ '-t', str(vendor),
+ ]
+ subprocess.check_call(install_args)
+ (vendor / '__init__.py').write_text('')
+
+
+def update_pkg_resources():
+ vendor = Path('pkg_resources/_vendor')
+ install(vendor)
+ rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern')
+ rewrite_jaraco_text(vendor / 'jaraco/text', 'pkg_resources.extern')
+ rewrite_jaraco(vendor / 'jaraco', 'pkg_resources.extern')
+ rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern')
+ rewrite_more_itertools(vendor / "more_itertools")
+
+
+def update_setuptools():
+ vendor = Path('setuptools/_vendor')
+ install(vendor)
+ install_validate_pyproject(vendor)
+ rewrite_packaging(vendor / 'packaging', 'setuptools.extern')
+ rewrite_jaraco_text(vendor / 'jaraco/text', 'setuptools.extern')
+ rewrite_jaraco(vendor / 'jaraco', 'setuptools.extern')
+ rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern')
+ rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern')
+ rewrite_more_itertools(vendor / "more_itertools")
+ rewrite_nspektr(vendor / "nspektr", 'setuptools.extern')
+
+
+def install_validate_pyproject(vendor):
+ """``validate-pyproject`` can be vendorized to remove all dependencies"""
+ req = next(
+ (x for x in (vendor / "vendored.txt").lines() if 'validate-pyproject' in x),
+ "validate-pyproject[all]"
+ )
+
+ pkg, _, _ = req.strip(string.whitespace + "#").partition("#")
+ pkg = pkg.strip()
+
+ opts = {}
+ if sys.version_info[:2] >= (3, 10):
+ opts["ignore_cleanup_errors"] = True
+
+ with TemporaryDirectory(**opts) as tmp:
+ env_builder = venv.EnvBuilder(with_pip=True)
+ env_builder.create(tmp)
+ context = env_builder.ensure_directories(tmp)
+ venv_python = getattr(context, 'env_exec_cmd', context.env_exe)
+
+ subprocess.check_call([venv_python, "-m", "pip", "install", pkg])
+ cmd = [
+ venv_python,
+ "-m",
+ "validate_pyproject.vendoring",
+ f"--output-dir={vendor / '_validate_pyproject' !s}",
+ "--enable-plugins",
+ "setuptools",
+ "distutils",
+ "--very-verbose"
+ ]
+ subprocess.check_call(cmd)
+
+
+__name__ == '__main__' and update_vendored()
diff --git a/tox.ini b/tox.ini
index a0c4cdf..22c796f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,41 +1,85 @@
-# Note: Run "python bootstrap.py" before running Tox, to generate metadata.
-#
-# To run Tox against all supported Python interpreters, you can set:
-#
-# export TOXENV='py27,py3{3,4,5,6},pypy,pypy3'
-
[tox]
-envlist=python
+envlist = python
+minversion = 3.2
+# https://github.com/jaraco/skeleton/issues/6
+tox_pip_extensions_ext_venv_update = true
+toxworkdir={env:TOX_WORK_DIR:.tox}
[testenv]
-deps=-rtests/requirements.txt
-setenv=COVERAGE_FILE={toxworkdir}/.coverage.{envname}
-# TODO: The passed environment variables came from copying other tox.ini files
-# These should probably be individually annotated to explain what needs them.
-passenv=APPDATA HOMEDRIVE HOMEPATH windir APPVEYOR APPVEYOR_* CI CODECOV_* TRAVIS TRAVIS_*
-commands=pytest --cov-config={toxinidir}/tox.ini --cov-report= {posargs}
-usedevelop=True
+deps =
+ # Ideally all the dependencies should be set as "extras"
+commands =
+ pytest {posargs}
+usedevelop = True
+extras = testing
+passenv =
+ SETUPTOOLS_USE_DISTUTILS
+ PRE_BUILT_SETUPTOOLS_WHEEL
+ PRE_BUILT_SETUPTOOLS_SDIST
+ TIMEOUT_BACKEND_TEST # timeout (in seconds) for test_build_meta
+ windir # required for test_pkg_resources
+ # honor git config in pytest-perf
+ HOME
+ # workaround for tox-dev/tox#2382
+ PROGRAMDATA
+ PROGRAMFILES
+ PROGRAMFILES(x86)
+
+[testenv:integration]
+deps = {[testenv]deps}
+extras = testing-integration
+passenv =
+ {[testenv]passenv}
+ DOWNLOAD_PATH
+ # workaround for tox-dev/tox#2382
+ PROGRAMDATA
+ PROGRAMFILES
+ PROGRAMFILES(x86)
+setenv =
+ PROJECT_ROOT = {toxinidir}
+commands =
+ pytest --integration {posargs:-vv --durations=10 setuptools/tests/integration}
+ # use verbose mode by default to facilitate debugging from CI logs
+[testenv:docs]
+extras =
+ docs
+ testing
+changedir = docs
+commands =
+ python -m sphinx -W --keep-going . {toxinidir}/build/html
-[testenv:coverage]
-description=Combine coverage data and create report
-deps=coverage
-skip_install=True
-changedir={toxworkdir}
-setenv=COVERAGE_FILE=.coverage
-commands=coverage erase
- coverage combine
- coverage {posargs:xml}
+[testenv:finalize]
+skip_install = True
+deps =
+ towncrier
+ bump2version
+passenv = *
+commands =
+ python tools/finalize.py
-[testenv:codecov]
-description=[Only run on CI]: Upload coverage data to codecov
-deps=codecov
-skip_install=True
-commands=codecov --file {toxworkdir}/coverage.xml
+[testenv:vendor]
+skip_install = True
+deps =
+ path
+commands =
+ python -m tools.vendored
-[coverage:run]
-source=
- pkg_resources
- setuptools
-omit=
- */_vendor/*
+[testenv:release]
+skip_install = True
+deps =
+ build
+ twine>=3
+ jaraco.develop>=7.1
+passenv =
+ TWINE_PASSWORD
+ GITHUB_TOKEN
+setenv =
+ TWINE_USERNAME = {env:TWINE_USERNAME:__token__}
+commands =
+ python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)"
+ # unset tag_build and tag_date pypa/setuptools#2500
+ python setup.py egg_info -Db "" saveopts
+ python -m build
+ python -m twine upload dist/*
+ python -m jaraco.develop.create-github-release