From 5516c82c9030cbe282d1d77b7e50f3a8d1e1c9a1 Mon Sep 17 00:00:00 2001 From: Ivan Maslov Date: Mon, 3 Aug 2020 12:49:39 +0300 Subject: [PATCH] # minor fix in SessionHex if no RDP session (time lag with WEB GUI) --- .../site-packages/pip-19.0.3.dist-info/RECORD | 620 ---- .../pip-19.0.3.dist-info/entry_points.txt | 5 - .../INSTALLER | 0 .../LICENSE.txt | 0 .../pip-20.2.dist-info}/METADATA | 35 +- .../site-packages/pip-20.2.dist-info/RECORD | 751 +++++ .../WHEEL | 2 +- .../pip-20.2.dist-info/entry_points.txt | 5 + .../top_level.txt | 0 .../Lib/site-packages/pip/__init__.py | 19 +- .../Lib/site-packages/pip/__main__.py | 9 +- .../site-packages/pip/_internal/__init__.py | 83 +- .../site-packages/pip/_internal/build_env.py | 70 +- .../Lib/site-packages/pip/_internal/cache.py | 236 +- .../pip/_internal/cli/autocompletion.py | 46 +- .../pip/_internal/cli/base_command.py | 326 +- .../pip/_internal/cli/cmdoptions.py | 322 +- .../pip/_internal/cli/command_context.py | 36 + .../site-packages/pip/_internal/cli/main.py | 75 + .../pip/_internal/cli/main_parser.py | 29 +- .../site-packages/pip/_internal/cli/parser.py | 21 +- .../pip/_internal/cli/progress_bars.py | 280 ++ .../pip/_internal/cli/req_command.py | 402 +++ .../pip/_internal/cli/spinners.py | 173 + .../pip/_internal/commands/__init__.py | 155 +- .../pip/_internal/commands/cache.py | 182 ++ .../pip/_internal/commands/check.py | 24 +- .../pip/_internal/commands/completion.py | 58 +- .../pip/_internal/commands/configuration.py | 151 +- .../pip/_internal/commands/debug.py | 229 ++ .../pip/_internal/commands/download.py | 203 +- .../pip/_internal/commands/freeze.py | 31 +- .../pip/_internal/commands/hash.py | 28 +- .../pip/_internal/commands/help.py | 19 +- .../pip/_internal/commands/install.py | 826 +++-- .../pip/_internal/commands/list.py | 152 +- .../pip/_internal/commands/search.py | 66 +- .../pip/_internal/commands/show.py | 90 +- .../pip/_internal/commands/uninstall.py | 79 +- .../pip/_internal/commands/wheel.py | 212 +- .../pip/_internal/configuration.py | 121 +- .../pip/_internal/distributions/__init__.py | 24 + .../pip/_internal/distributions/base.py | 45 + .../pip/_internal/distributions/installed.py | 24 + .../pip/_internal/distributions/sdist.py | 104 + .../pip/_internal/distributions/wheel.py | 36 + .../site-packages/pip/_internal/download.py | 971 ------ .../site-packages/pip/_internal/exceptions.py | 141 +- .../Lib/site-packages/pip/_internal/index.py | 990 ------ .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 692 ++++ .../pip/_internal/index/package_finder.py | 1014 ++++++ .../site-packages/pip/_internal/locations.py | 193 +- .../Lib/site-packages/pip/_internal/main.py | 16 + .../pip/_internal/models/candidate.py | 25 +- .../pip/_internal/models/direct_url.py | 245 ++ .../pip/_internal/models/format_control.py | 29 +- .../pip/_internal/models/index.py | 3 + .../pip/_internal/models/link.py | 170 +- .../pip/_internal/models/scheme.py | 31 + .../pip/_internal/models/search_scope.py | 135 + .../pip/_internal/models/selection_prefs.py | 49 + .../pip/_internal/models/target_python.py | 120 + .../pip/_internal/models/wheel.py | 78 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 308 ++ .../pip/_internal/network/cache.py | 79 + .../pip/_internal/network/download.py | 182 ++ .../pip/_internal/network/lazy_wheel.py | 231 ++ .../pip/_internal/network/session.py | 421 +++ .../pip/_internal/network/utils.py | 97 + .../pip/_internal/network/xmlrpc.py | 52 + .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 37 + .../operations/build/metadata_legacy.py | 77 + .../pip/_internal/operations/build/wheel.py | 46 + .../operations/build/wheel_legacy.py | 115 + .../pip/_internal/operations/check.py | 27 +- .../pip/_internal/operations/freeze.py | 121 +- .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 52 + .../_internal/operations/install/legacy.py | 130 + .../pip/_internal/operations/install/wheel.py | 830 +++++ .../pip/_internal/operations/prepare.py | 709 ++-- .../site-packages/pip/_internal/pep425tags.py | 381 --- .../site-packages/pip/_internal/pyproject.py | 39 +- .../pip/_internal/req/__init__.py | 84 +- .../pip/_internal/req/constructors.py | 347 +- .../pip/_internal/req/req_file.py | 538 ++- .../pip/_internal/req/req_install.py | 1009 +++--- .../pip/_internal/req/req_set.py | 148 +- .../pip/_internal/req/req_tracker.py | 130 +- .../pip/_internal/req/req_uninstall.py | 116 +- .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../legacy/resolver.py} | 268 +- .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 82 + .../resolution/resolvelib/candidates.py | 594 ++++ .../resolution/resolvelib/factory.py | 459 +++ .../resolution/resolvelib/provider.py | 150 + .../resolution/resolvelib/requirements.py | 137 + .../resolution/resolvelib/resolver.py | 258 ++ .../pip/_internal/self_outdated_check.py} | 153 +- .../pip/_internal/utils/appdirs.py | 264 +- .../pip/_internal/utils/compat.py | 123 +- .../pip/_internal/utils/compatibility_tags.py | 166 + .../pip/_internal/utils/datetime.py | 14 + .../pip/_internal/utils/deprecation.py | 32 +- .../pip/_internal/utils/direct_url_helpers.py | 130 + .../pip/_internal/utils/distutils_args.py | 48 + .../pip/_internal/utils/encoding.py | 20 +- .../pip/_internal/utils/entrypoints.py | 31 + .../pip/_internal/utils/filesystem.py | 196 +- .../pip/_internal/utils/filetypes.py | 16 + .../pip/_internal/utils/glibc.py | 65 +- .../pip/_internal/utils/hashes.py | 40 +- .../_internal/utils/inject_securetransport.py | 36 + .../pip/_internal/utils/logging.py | 107 +- .../site-packages/pip/_internal/utils/misc.py | 894 +++-- .../pip/_internal/utils/models.py | 6 +- .../pip/_internal/utils/packaging.py | 73 +- .../pip/_internal/utils/parallel.py | 107 + .../pip/_internal/utils/pkg_resources.py | 44 + .../pip/_internal/utils/setuptools_build.py | 177 +- .../pip/_internal/utils/subprocess.py | 280 ++ .../pip/_internal/utils/temp_dir.py | 193 +- .../pip/_internal/utils/typing.py | 11 +- .../site-packages/pip/_internal/utils/ui.py | 441 --- .../pip/_internal/utils/unpacking.py | 281 ++ .../site-packages/pip/_internal/utils/urls.py | 55 + .../pip/_internal/utils/virtualenv.py | 116 + .../pip/_internal/utils/wheel.py | 225 ++ .../pip/_internal/vcs/__init__.py | 547 +--- .../site-packages/pip/_internal/vcs/bazaar.py | 73 +- .../site-packages/pip/_internal/vcs/git.py | 240 +- .../pip/_internal/vcs/mercurial.py | 111 +- .../pip/_internal/vcs/subversion.py | 246 +- .../pip/_internal/vcs/versioncontrol.py | 811 +++++ .../Lib/site-packages/pip/_internal/wheel.py | 1095 ------- .../pip/_internal/wheel_builder.py | 308 ++ .../Lib/site-packages/pip/_vendor/__init__.py | 37 +- .../Lib/site-packages/pip/_vendor/appdirs.py | 45 +- .../pip/_vendor/cachecontrol/__init__.py | 2 +- .../pip/_vendor/cachecontrol/adapter.py | 2 +- .../_vendor/cachecontrol/caches/file_cache.py | 4 +- .../pip/_vendor/cachecontrol/controller.py | 11 +- .../pip/_vendor/cachecontrol/serialize.py | 4 +- .../pip/_vendor/cachecontrol/wrapper.py | 2 +- .../pip/_vendor/certifi/__init__.py | 4 +- .../pip/_vendor/certifi/__main__.py | 14 +- .../pip/_vendor/certifi/cacert.pem | 602 ++-- .../site-packages/pip/_vendor/certifi/core.py | 54 +- .../pip/_vendor/colorama/__init__.py | 2 +- .../site-packages/pip/_vendor/contextlib2.py | 518 +++ .../pip/_vendor/distlib/__init__.py | 4 +- .../pip/_vendor/distlib/_backport/shutil.py | 9 +- .../_vendor/distlib/_backport/sysconfig.py | 8 +- .../pip/_vendor/distlib/compat.py | 2 +- .../pip/_vendor/distlib/database.py | 4 +- .../pip/_vendor/distlib/index.py | 2 +- .../pip/_vendor/distlib/locators.py | 27 +- .../pip/_vendor/distlib/metadata.py | 130 +- .../pip/_vendor/distlib/scripts.py | 60 +- .../site-packages/pip/_vendor/distlib/t32.exe | Bin 92672 -> 96768 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 102400 -> 105984 bytes .../site-packages/pip/_vendor/distlib/util.py | 11 +- .../site-packages/pip/_vendor/distlib/w32.exe | Bin 89088 -> 90112 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 99328 -> 99840 bytes .../pip/_vendor/distlib/wheel.py | 70 +- .../Lib/site-packages/pip/_vendor/distro.py | 101 +- .../pip/_vendor/html5lib/__init__.py | 2 +- .../pip/_vendor/html5lib/_ihatexml.py | 5 +- .../pip/_vendor/html5lib/_inputstream.py | 55 +- .../pip/_vendor/html5lib/_tokenizer.py | 16 +- .../pip/_vendor/html5lib/_trie/__init__.py | 13 +- .../pip/_vendor/html5lib/_trie/_base.py | 5 +- .../pip/_vendor/html5lib/_trie/datrie.py | 44 - .../pip/_vendor/html5lib/_utils.py | 49 +- .../pip/_vendor/html5lib/constants.py | 9 +- .../pip/_vendor/html5lib/filters/sanitizer.py | 20 + .../pip/_vendor/html5lib/html5parser.py | 734 ++--- .../pip/_vendor/html5lib/serializer.py | 2 +- .../pip/_vendor/html5lib/treebuilders/base.py | 8 +- .../pip/_vendor/html5lib/treebuilders/dom.py | 5 +- .../_vendor/html5lib/treebuilders/etree.py | 27 +- .../html5lib/treebuilders/etree_lxml.py | 64 +- .../_vendor/html5lib/treewalkers/__init__.py | 6 +- .../pip/_vendor/html5lib/treewalkers/etree.py | 1 + .../html5lib/treewalkers/etree_lxml.py | 4 +- .../site-packages/pip/_vendor/idna/core.py | 6 +- .../pip/_vendor/idna/idnadata.py | 155 +- .../pip/_vendor/idna/package_data.py | 2 +- .../pip/_vendor/idna/uts46data.py | 846 +++-- .../site-packages/pip/_vendor/ipaddress.py | 5 +- .../pip/_vendor/lockfile/__init__.py | 347 -- .../pip/_vendor/lockfile/linklockfile.py | 73 - .../pip/_vendor/lockfile/mkdirlockfile.py | 84 - .../pip/_vendor/lockfile/pidlockfile.py | 190 -- .../pip/_vendor/lockfile/sqlitelockfile.py | 156 - .../pip/_vendor/lockfile/symlinklockfile.py | 70 - .../pip/_vendor/msgpack/__init__.py | 30 +- .../pip/_vendor/msgpack/_version.py | 2 +- .../pip/_vendor/msgpack/exceptions.py | 45 +- .../site-packages/pip/_vendor/msgpack/ext.py | 191 ++ .../pip/_vendor/msgpack/fallback.py | 786 +++-- .../pip/_vendor/packaging/__about__.py | 4 +- .../pip/_vendor/packaging/_compat.py | 9 +- .../pip/_vendor/packaging/_structures.py | 26 +- .../pip/_vendor/packaging/_typing.py | 48 + .../pip/_vendor/packaging/markers.py | 56 +- .../pip/_vendor/packaging/requirements.py | 9 +- .../pip/_vendor/packaging/specifiers.py | 190 +- .../pip/_vendor/packaging/tags.py | 751 +++++ .../pip/_vendor/packaging/utils.py | 18 +- .../pip/_vendor/packaging/version.py | 151 +- .../pip/_vendor/pep517/__init__.py | 2 +- .../pip/_vendor/pep517/_in_process.py | 95 +- .../site-packages/pip/_vendor/pep517/build.py | 80 +- .../site-packages/pip/_vendor/pep517/check.py | 7 +- .../pip/_vendor/pep517/compat.py | 13 +- .../pip/_vendor/pep517/dirtools.py | 44 + .../pip/_vendor/pep517/envbuild.py | 29 +- .../site-packages/pip/_vendor/pep517/meta.py | 92 + .../pip/_vendor/pep517/wrappers.py | 183 +- .../pip/_vendor/pkg_resources/__init__.py | 147 +- .../pip/_vendor/progress/__init__.py | 80 +- .../site-packages/pip/_vendor/progress/bar.py | 5 +- .../pip/_vendor/progress/counter.py | 13 +- .../pip/_vendor/progress/helpers.py | 91 - .../pip/_vendor/progress/spinner.py | 7 +- .../site-packages/pip/_vendor/pyparsing.py | 2889 ++++++++++------- .../pip/_vendor/pytoml/__init__.py | 4 - .../site-packages/pip/_vendor/pytoml/core.py | 13 - .../pip/_vendor/pytoml/parser.py | 341 -- .../site-packages/pip/_vendor/pytoml/test.py | 30 - .../site-packages/pip/_vendor/pytoml/utils.py | 67 - .../pip/_vendor/pytoml/writer.py | 106 - .../pip/_vendor/requests/__init__.py | 37 +- .../pip/_vendor/requests/__version__.py | 8 +- .../site-packages/pip/_vendor/requests/api.py | 11 +- .../pip/_vendor/requests/auth.py | 4 +- .../pip/_vendor/requests/compat.py | 2 + .../pip/_vendor/requests/exceptions.py | 9 +- .../pip/_vendor/requests/models.py | 19 +- .../pip/_vendor/requests/sessions.py | 33 +- .../pip/_vendor/requests/status_codes.py | 15 +- .../pip/_vendor/requests/structures.py | 4 +- .../pip/_vendor/requests/utils.py | 11 +- .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/compat/__init__.py | 0 .../resolvelib/compat/collections_abc.py | 6 + .../pip/_vendor/resolvelib/providers.py | 109 + .../pip/_vendor/resolvelib/reporters.py | 42 + .../pip/_vendor/resolvelib/resolvers.py | 428 +++ .../pip/_vendor/resolvelib/structs.py | 68 + .../Lib/site-packages/pip/_vendor/six.py | 88 +- .../pip/_vendor/toml/__init__.py | 25 + .../site-packages/pip/_vendor/toml/common.py | 6 + .../site-packages/pip/_vendor/toml/decoder.py | 1052 ++++++ .../site-packages/pip/_vendor/toml/encoder.py | 304 ++ .../site-packages/pip/_vendor/toml/ordered.py | 15 + .../Lib/site-packages/pip/_vendor/toml/tz.py | 21 + .../pip/_vendor/urllib3/__init__.py | 56 +- .../pip/_vendor/urllib3/_collections.py | 37 +- .../pip/_vendor/urllib3/connection.py | 270 +- .../pip/_vendor/urllib3/connectionpool.py | 461 ++- .../urllib3/contrib/_appengine_environ.py | 26 +- .../contrib/_securetransport/bindings.py | 276 +- .../contrib/_securetransport/low_level.py | 52 +- .../pip/_vendor/urllib3/contrib/appengine.py | 121 +- .../pip/_vendor/urllib3/contrib/ntlmpool.py | 96 +- .../pip/_vendor/urllib3/contrib/pyopenssl.py | 167 +- .../urllib3/contrib/securetransport.py | 216 +- .../pip/_vendor/urllib3/contrib/socks.py | 136 +- .../pip/_vendor/urllib3/exceptions.py | 48 +- .../pip/_vendor/urllib3/fields.py | 165 +- .../pip/_vendor/urllib3/filepost.py | 14 +- .../pip/_vendor/urllib3/packages/__init__.py | 2 +- .../urllib3/packages/backports/makefile.py | 9 +- .../pip/_vendor/urllib3/packages/six.py | 321 +- .../packages/ssl_match_hostname/__init__.py | 2 +- .../ssl_match_hostname/_implementation.py | 56 +- .../pip/_vendor/urllib3/poolmanager.py | 208 +- .../pip/_vendor/urllib3/request.py | 79 +- .../pip/_vendor/urllib3/response.py | 262 +- .../pip/_vendor/urllib3/util/__init__.py | 60 +- .../pip/_vendor/urllib3/util/connection.py | 18 +- .../pip/_vendor/urllib3/util/request.py | 57 +- .../pip/_vendor/urllib3/util/response.py | 9 +- .../pip/_vendor/urllib3/util/retry.py | 102 +- .../pip/_vendor/urllib3/util/ssl_.py | 255 +- .../pip/_vendor/urllib3/util/timeout.py | 87 +- .../pip/_vendor/urllib3/util/url.py | 382 ++- .../pip/_vendor/urllib3/util/wait.py | 3 + .../Lib/site-packages/pip/_vendor/vendor.txt | 24 + .../INSTALLER | 0 .../METADATA | 2 +- .../pyOpenRPA-1.1.15.dist-info}/RECORD | 19 +- .../pyOpenRPA-1.1.15.dist-info/REQUESTED | 0 .../WHEEL | 0 .../top_level.txt | 0 .../RobotRDPActive/RobotRDPActive.py | 6 + .../pyOpenRPA/Orchestrator/Web/Index.js | 8 + .../pyOpenRPA/Orchestrator/Web/Index.xhtml | 4 +- .../Lib/site-packages/pyOpenRPA/__init__.py | 2 +- .../WPy32-3720/python-3.7.2/Scripts/pip.exe | Bin 93003 -> 97108 bytes .../python-3.7.2/Scripts/pip3.7.exe | Bin 93003 -> 97108 bytes .../WPy32-3720/python-3.7.2/Scripts/pip3.exe | Bin 93003 -> 97108 bytes .../site-packages/pip-19.0.3.dist-info/RECORD | 620 ---- .../pip-19.0.3.dist-info/entry_points.txt | 5 - .../INSTALLER | 0 .../LICENSE.txt | 0 .../pip-20.2.dist-info}/METADATA | 35 +- .../site-packages/pip-20.2.dist-info/RECORD | 751 +++++ .../WHEEL | 2 +- .../pip-20.2.dist-info/entry_points.txt | 5 + .../top_level.txt | 0 .../Lib/site-packages/pip/__init__.py | 19 +- .../Lib/site-packages/pip/__main__.py | 9 +- .../site-packages/pip/_internal/__init__.py | 83 +- .../site-packages/pip/_internal/build_env.py | 70 +- .../Lib/site-packages/pip/_internal/cache.py | 236 +- .../pip/_internal/cli/autocompletion.py | 46 +- .../pip/_internal/cli/base_command.py | 326 +- .../pip/_internal/cli/cmdoptions.py | 322 +- .../pip/_internal/cli/command_context.py | 36 + .../site-packages/pip/_internal/cli/main.py | 75 + .../pip/_internal/cli/main_parser.py | 29 +- .../site-packages/pip/_internal/cli/parser.py | 21 +- .../pip/_internal/cli/progress_bars.py | 280 ++ .../pip/_internal/cli/req_command.py | 402 +++ .../pip/_internal/cli/spinners.py | 173 + .../pip/_internal/commands/__init__.py | 155 +- .../pip/_internal/commands/cache.py | 182 ++ .../pip/_internal/commands/check.py | 24 +- .../pip/_internal/commands/completion.py | 58 +- .../pip/_internal/commands/configuration.py | 151 +- .../pip/_internal/commands/debug.py | 229 ++ .../pip/_internal/commands/download.py | 203 +- .../pip/_internal/commands/freeze.py | 31 +- .../pip/_internal/commands/hash.py | 28 +- .../pip/_internal/commands/help.py | 19 +- .../pip/_internal/commands/install.py | 826 +++-- .../pip/_internal/commands/list.py | 152 +- .../pip/_internal/commands/search.py | 66 +- .../pip/_internal/commands/show.py | 90 +- .../pip/_internal/commands/uninstall.py | 79 +- .../pip/_internal/commands/wheel.py | 212 +- .../pip/_internal/configuration.py | 121 +- .../pip/_internal/distributions/__init__.py | 24 + .../pip/_internal/distributions/base.py | 45 + .../pip/_internal/distributions/installed.py | 24 + .../pip/_internal/distributions/sdist.py | 104 + .../pip/_internal/distributions/wheel.py | 36 + .../site-packages/pip/_internal/download.py | 971 ------ .../site-packages/pip/_internal/exceptions.py | 141 +- .../Lib/site-packages/pip/_internal/index.py | 990 ------ .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 692 ++++ .../pip/_internal/index/package_finder.py | 1014 ++++++ .../site-packages/pip/_internal/locations.py | 193 +- .../Lib/site-packages/pip/_internal/main.py | 16 + .../pip/_internal/models/candidate.py | 25 +- .../pip/_internal/models/direct_url.py | 245 ++ .../pip/_internal/models/format_control.py | 29 +- .../pip/_internal/models/index.py | 3 + .../pip/_internal/models/link.py | 170 +- .../pip/_internal/models/scheme.py | 31 + .../pip/_internal/models/search_scope.py | 135 + .../pip/_internal/models/selection_prefs.py | 49 + .../pip/_internal/models/target_python.py | 120 + .../pip/_internal/models/wheel.py | 78 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 308 ++ .../pip/_internal/network/cache.py | 79 + .../pip/_internal/network/download.py | 182 ++ .../pip/_internal/network/lazy_wheel.py | 231 ++ .../pip/_internal/network/session.py | 421 +++ .../pip/_internal/network/utils.py | 97 + .../pip/_internal/network/xmlrpc.py | 52 + .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 37 + .../operations/build/metadata_legacy.py | 77 + .../pip/_internal/operations/build/wheel.py | 46 + .../operations/build/wheel_legacy.py | 115 + .../pip/_internal/operations/check.py | 27 +- .../pip/_internal/operations/freeze.py | 121 +- .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 52 + .../_internal/operations/install/legacy.py | 130 + .../pip/_internal/operations/install/wheel.py | 830 +++++ .../pip/_internal/operations/prepare.py | 709 ++-- .../site-packages/pip/_internal/pep425tags.py | 381 --- .../site-packages/pip/_internal/pyproject.py | 39 +- .../pip/_internal/req/__init__.py | 84 +- .../pip/_internal/req/constructors.py | 347 +- .../pip/_internal/req/req_file.py | 538 ++- .../pip/_internal/req/req_install.py | 1009 +++--- .../pip/_internal/req/req_set.py | 148 +- .../pip/_internal/req/req_tracker.py | 130 +- .../pip/_internal/req/req_uninstall.py | 116 +- .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../legacy/resolver.py} | 268 +- .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 82 + .../resolution/resolvelib/candidates.py | 594 ++++ .../resolution/resolvelib/factory.py | 459 +++ .../resolution/resolvelib/provider.py | 150 + .../resolution/resolvelib/requirements.py | 137 + .../resolution/resolvelib/resolver.py | 258 ++ .../pip/_internal/self_outdated_check.py} | 153 +- .../pip/_internal/utils/appdirs.py | 264 +- .../pip/_internal/utils/compat.py | 123 +- .../pip/_internal/utils/compatibility_tags.py | 166 + .../pip/_internal/utils/datetime.py | 14 + .../pip/_internal/utils/deprecation.py | 32 +- .../pip/_internal/utils/direct_url_helpers.py | 130 + .../pip/_internal/utils/distutils_args.py | 48 + .../pip/_internal/utils/encoding.py | 20 +- .../pip/_internal/utils/entrypoints.py | 31 + .../pip/_internal/utils/filesystem.py | 196 +- .../pip/_internal/utils/filetypes.py | 16 + .../pip/_internal/utils/glibc.py | 65 +- .../pip/_internal/utils/hashes.py | 40 +- .../_internal/utils/inject_securetransport.py | 36 + .../pip/_internal/utils/logging.py | 107 +- .../site-packages/pip/_internal/utils/misc.py | 894 +++-- .../pip/_internal/utils/models.py | 6 +- .../pip/_internal/utils/packaging.py | 73 +- .../pip/_internal/utils/parallel.py | 107 + .../pip/_internal/utils/pkg_resources.py | 44 + .../pip/_internal/utils/setuptools_build.py | 177 +- .../pip/_internal/utils/subprocess.py | 280 ++ .../pip/_internal/utils/temp_dir.py | 193 +- .../pip/_internal/utils/typing.py | 11 +- .../site-packages/pip/_internal/utils/ui.py | 441 --- .../pip/_internal/utils/unpacking.py | 281 ++ .../site-packages/pip/_internal/utils/urls.py | 55 + .../pip/_internal/utils/virtualenv.py | 116 + .../pip/_internal/utils/wheel.py | 225 ++ .../pip/_internal/vcs/__init__.py | 547 +--- .../site-packages/pip/_internal/vcs/bazaar.py | 73 +- .../site-packages/pip/_internal/vcs/git.py | 240 +- .../pip/_internal/vcs/mercurial.py | 111 +- .../pip/_internal/vcs/subversion.py | 246 +- .../pip/_internal/vcs/versioncontrol.py | 811 +++++ .../Lib/site-packages/pip/_internal/wheel.py | 1095 ------- .../pip/_internal/wheel_builder.py | 308 ++ .../Lib/site-packages/pip/_vendor/__init__.py | 37 +- .../Lib/site-packages/pip/_vendor/appdirs.py | 45 +- .../pip/_vendor/cachecontrol/__init__.py | 2 +- .../pip/_vendor/cachecontrol/adapter.py | 2 +- .../_vendor/cachecontrol/caches/file_cache.py | 4 +- .../pip/_vendor/cachecontrol/controller.py | 11 +- .../pip/_vendor/cachecontrol/serialize.py | 4 +- .../pip/_vendor/cachecontrol/wrapper.py | 2 +- .../pip/_vendor/certifi/__init__.py | 4 +- .../pip/_vendor/certifi/__main__.py | 14 +- .../pip/_vendor/certifi/cacert.pem | 602 ++-- .../site-packages/pip/_vendor/certifi/core.py | 54 +- .../pip/_vendor/colorama/__init__.py | 2 +- .../site-packages/pip/_vendor/contextlib2.py | 518 +++ .../pip/_vendor/distlib/__init__.py | 4 +- .../pip/_vendor/distlib/_backport/shutil.py | 9 +- .../_vendor/distlib/_backport/sysconfig.py | 8 +- .../pip/_vendor/distlib/compat.py | 2 +- .../pip/_vendor/distlib/database.py | 4 +- .../pip/_vendor/distlib/index.py | 2 +- .../pip/_vendor/distlib/locators.py | 27 +- .../pip/_vendor/distlib/metadata.py | 130 +- .../pip/_vendor/distlib/scripts.py | 60 +- .../site-packages/pip/_vendor/distlib/t32.exe | Bin 92672 -> 96768 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 102400 -> 105984 bytes .../site-packages/pip/_vendor/distlib/util.py | 11 +- .../site-packages/pip/_vendor/distlib/w32.exe | Bin 89088 -> 90112 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 99328 -> 99840 bytes .../pip/_vendor/distlib/wheel.py | 70 +- .../Lib/site-packages/pip/_vendor/distro.py | 101 +- .../pip/_vendor/html5lib/__init__.py | 2 +- .../pip/_vendor/html5lib/_ihatexml.py | 5 +- .../pip/_vendor/html5lib/_inputstream.py | 55 +- .../pip/_vendor/html5lib/_tokenizer.py | 16 +- .../pip/_vendor/html5lib/_trie/__init__.py | 13 +- .../pip/_vendor/html5lib/_trie/_base.py | 5 +- .../pip/_vendor/html5lib/_trie/datrie.py | 44 - .../pip/_vendor/html5lib/_utils.py | 49 +- .../pip/_vendor/html5lib/constants.py | 9 +- .../pip/_vendor/html5lib/filters/sanitizer.py | 20 + .../pip/_vendor/html5lib/html5parser.py | 734 ++--- .../pip/_vendor/html5lib/serializer.py | 2 +- .../pip/_vendor/html5lib/treebuilders/base.py | 8 +- .../pip/_vendor/html5lib/treebuilders/dom.py | 5 +- .../_vendor/html5lib/treebuilders/etree.py | 27 +- .../html5lib/treebuilders/etree_lxml.py | 64 +- .../_vendor/html5lib/treewalkers/__init__.py | 6 +- .../pip/_vendor/html5lib/treewalkers/etree.py | 1 + .../html5lib/treewalkers/etree_lxml.py | 4 +- .../site-packages/pip/_vendor/idna/core.py | 6 +- .../pip/_vendor/idna/idnadata.py | 155 +- .../pip/_vendor/idna/package_data.py | 2 +- .../pip/_vendor/idna/uts46data.py | 846 +++-- .../site-packages/pip/_vendor/ipaddress.py | 5 +- .../pip/_vendor/lockfile/__init__.py | 347 -- .../pip/_vendor/lockfile/linklockfile.py | 73 - .../pip/_vendor/lockfile/mkdirlockfile.py | 84 - .../pip/_vendor/lockfile/pidlockfile.py | 190 -- .../pip/_vendor/lockfile/sqlitelockfile.py | 156 - .../pip/_vendor/lockfile/symlinklockfile.py | 70 - .../pip/_vendor/msgpack/__init__.py | 30 +- .../pip/_vendor/msgpack/_version.py | 2 +- .../pip/_vendor/msgpack/exceptions.py | 45 +- .../site-packages/pip/_vendor/msgpack/ext.py | 191 ++ .../pip/_vendor/msgpack/fallback.py | 786 +++-- .../pip/_vendor/packaging/__about__.py | 4 +- .../pip/_vendor/packaging/_compat.py | 9 +- .../pip/_vendor/packaging/_structures.py | 26 +- .../pip/_vendor/packaging/_typing.py | 48 + .../pip/_vendor/packaging/markers.py | 56 +- .../pip/_vendor/packaging/requirements.py | 9 +- .../pip/_vendor/packaging/specifiers.py | 190 +- .../pip/_vendor/packaging/tags.py | 751 +++++ .../pip/_vendor/packaging/utils.py | 18 +- .../pip/_vendor/packaging/version.py | 151 +- .../pip/_vendor/pep517/__init__.py | 2 +- .../pip/_vendor/pep517/_in_process.py | 95 +- .../site-packages/pip/_vendor/pep517/build.py | 80 +- .../site-packages/pip/_vendor/pep517/check.py | 7 +- .../pip/_vendor/pep517/compat.py | 13 +- .../pip/_vendor/pep517/dirtools.py | 44 + .../pip/_vendor/pep517/envbuild.py | 29 +- .../site-packages/pip/_vendor/pep517/meta.py | 92 + .../pip/_vendor/pep517/wrappers.py | 183 +- .../pip/_vendor/pkg_resources/__init__.py | 147 +- .../pip/_vendor/progress/__init__.py | 80 +- .../site-packages/pip/_vendor/progress/bar.py | 5 +- .../pip/_vendor/progress/counter.py | 13 +- .../pip/_vendor/progress/helpers.py | 91 - .../pip/_vendor/progress/spinner.py | 7 +- .../site-packages/pip/_vendor/pyparsing.py | 2889 ++++++++++------- .../pip/_vendor/pytoml/__init__.py | 4 - .../site-packages/pip/_vendor/pytoml/core.py | 13 - .../pip/_vendor/pytoml/parser.py | 341 -- .../site-packages/pip/_vendor/pytoml/test.py | 30 - .../site-packages/pip/_vendor/pytoml/utils.py | 67 - .../pip/_vendor/pytoml/writer.py | 106 - .../pip/_vendor/requests/__init__.py | 37 +- .../pip/_vendor/requests/__version__.py | 8 +- .../site-packages/pip/_vendor/requests/api.py | 11 +- .../pip/_vendor/requests/auth.py | 4 +- .../pip/_vendor/requests/compat.py | 2 + .../pip/_vendor/requests/exceptions.py | 9 +- .../pip/_vendor/requests/models.py | 19 +- .../pip/_vendor/requests/sessions.py | 33 +- .../pip/_vendor/requests/status_codes.py | 15 +- .../pip/_vendor/requests/structures.py | 4 +- .../pip/_vendor/requests/utils.py | 11 +- .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/compat/__init__.py | 0 .../resolvelib/compat/collections_abc.py | 6 + .../pip/_vendor/resolvelib/providers.py | 109 + .../pip/_vendor/resolvelib/reporters.py | 42 + .../pip/_vendor/resolvelib/resolvers.py | 428 +++ .../pip/_vendor/resolvelib/structs.py | 68 + .../Lib/site-packages/pip/_vendor/six.py | 88 +- .../pip/_vendor/toml/__init__.py | 25 + .../site-packages/pip/_vendor/toml/common.py | 6 + .../site-packages/pip/_vendor/toml/decoder.py | 1052 ++++++ .../site-packages/pip/_vendor/toml/encoder.py | 304 ++ .../site-packages/pip/_vendor/toml/ordered.py | 15 + .../Lib/site-packages/pip/_vendor/toml/tz.py | 21 + .../pip/_vendor/urllib3/__init__.py | 56 +- .../pip/_vendor/urllib3/_collections.py | 37 +- .../pip/_vendor/urllib3/connection.py | 270 +- .../pip/_vendor/urllib3/connectionpool.py | 461 ++- .../urllib3/contrib/_appengine_environ.py | 26 +- .../contrib/_securetransport/bindings.py | 276 +- .../contrib/_securetransport/low_level.py | 52 +- .../pip/_vendor/urllib3/contrib/appengine.py | 121 +- .../pip/_vendor/urllib3/contrib/ntlmpool.py | 96 +- .../pip/_vendor/urllib3/contrib/pyopenssl.py | 167 +- .../urllib3/contrib/securetransport.py | 216 +- .../pip/_vendor/urllib3/contrib/socks.py | 136 +- .../pip/_vendor/urllib3/exceptions.py | 48 +- .../pip/_vendor/urllib3/fields.py | 165 +- .../pip/_vendor/urllib3/filepost.py | 14 +- .../pip/_vendor/urllib3/packages/__init__.py | 2 +- .../urllib3/packages/backports/makefile.py | 9 +- .../pip/_vendor/urllib3/packages/six.py | 321 +- .../packages/ssl_match_hostname/__init__.py | 2 +- .../ssl_match_hostname/_implementation.py | 56 +- .../pip/_vendor/urllib3/poolmanager.py | 208 +- .../pip/_vendor/urllib3/request.py | 79 +- .../pip/_vendor/urllib3/response.py | 262 +- .../pip/_vendor/urllib3/util/__init__.py | 60 +- .../pip/_vendor/urllib3/util/connection.py | 18 +- .../pip/_vendor/urllib3/util/request.py | 57 +- .../pip/_vendor/urllib3/util/response.py | 9 +- .../pip/_vendor/urllib3/util/retry.py | 102 +- .../pip/_vendor/urllib3/util/ssl_.py | 255 +- .../pip/_vendor/urllib3/util/timeout.py | 87 +- .../pip/_vendor/urllib3/util/url.py | 382 ++- .../pip/_vendor/urllib3/util/wait.py | 3 + .../Lib/site-packages/pip/_vendor/vendor.txt | 24 + .../INSTALLER | 0 .../METADATA | 2 +- .../pyOpenRPA-1.1.15.dist-info}/RECORD | 19 +- .../pyOpenRPA-1.1.15.dist-info/REQUESTED | 0 .../WHEEL | 0 .../top_level.txt | 0 .../RobotRDPActive/RobotRDPActive.py | 6 + .../pyOpenRPA/Orchestrator/Web/Index.js | 8 + .../pyOpenRPA/Orchestrator/Web/Index.xhtml | 4 +- .../Lib/site-packages/pyOpenRPA/__init__.py | 2 +- .../python-3.7.2.amd64/Scripts/pip.exe | Bin 102731 -> 106324 bytes .../python-3.7.2.amd64/Scripts/pip3.7.exe | Bin 102731 -> 106324 bytes .../python-3.7.2.amd64/Scripts/pip3.exe | Bin 102731 -> 106324 bytes .../RobotRDPActive/RobotRDPActive.py | 6 + Sources/pyOpenRPA/__init__.py | 2 +- v1.1.15 | 0 623 files changed, 59269 insertions(+), 31155 deletions(-) delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/RECORD delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/INSTALLER (100%) rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/LICENSE.txt (100%) rename Resources/{WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info => WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info}/METADATA (62%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/RECORD rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/WHEEL (70%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/entry_points.txt rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/top_level.txt (100%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/command_context.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/req_command.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/spinners.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/cache.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/debug.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/base.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/installed.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/sdist.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/wheel.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/download.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/collector.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/package_finder.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/main.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/direct_url.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/scheme.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/search_scope.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/target_python.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/wheel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/auth.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/cache.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/download.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/lazy_wheel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/session.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/utils.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/xmlrpc.py rename v1.1.14 => Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/__init__.py (100%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/legacy.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/wheel.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pep425tags.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/base.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/{resolve.py => resolution/legacy/resolver.py} (60%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py rename Resources/{WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/outdated.py => WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/self_outdated_check.py} (50%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/datetime.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/distutils_args.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filetypes.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/inject_securetransport.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/parallel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/pkg_resources.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/subprocess.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/ui.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/unpacking.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/urls.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/wheel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/versioncontrol.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel_builder.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/contextlib2.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/__init__.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_typing.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/tags.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/dirtools.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/meta.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/helpers.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/__init__.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/core.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/parser.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/test.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/utils.py delete mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/writer.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/__init__.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/common.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/decoder.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/encoder.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/ordered.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/tz.py create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/vendor.txt rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/INSTALLER (100%) rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/METADATA (99%) rename Resources/{WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info => WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info}/RECORD (97%) create mode 100644 Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/WHEEL (100%) rename Resources/WPy32-3720/python-3.7.2/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/top_level.txt (100%) delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/RECORD delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/INSTALLER (100%) rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/LICENSE.txt (100%) rename Resources/{WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info => WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info}/METADATA (62%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/RECORD rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/WHEEL (70%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/entry_points.txt rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pip-19.0.3.dist-info => pip-20.2.dist-info}/top_level.txt (100%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/command_context.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/req_command.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/spinners.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/cache.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/debug.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/base.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/installed.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/sdist.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/wheel.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/download.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/collector.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/package_finder.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/main.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/direct_url.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/scheme.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/search_scope.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/target_python.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/wheel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/auth.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/cache.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/download.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/lazy_wheel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/session.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/utils.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/xmlrpc.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/legacy.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/wheel.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pep425tags.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/base.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/{resolve.py => resolution/legacy/resolver.py} (60%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py rename Resources/{WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/outdated.py => WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/self_outdated_check.py} (50%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/datetime.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/distutils_args.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filetypes.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/inject_securetransport.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/parallel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/pkg_resources.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/subprocess.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/ui.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/unpacking.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/urls.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/wheel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/versioncontrol.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel_builder.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/contextlib2.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/__init__.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_typing.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/tags.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/dirtools.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/meta.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/helpers.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/__init__.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/core.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/parser.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/test.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/utils.py delete mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/writer.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/__init__.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/common.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/decoder.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/encoder.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/ordered.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/tz.py create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/vendor.txt rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/INSTALLER (100%) rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/METADATA (99%) rename Resources/{WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info => WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info}/RECORD (97%) create mode 100644 Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/WHEEL (100%) rename Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/{pyOpenRPA-1.1.14.dist-info => pyOpenRPA-1.1.15.dist-info}/top_level.txt (100%) create mode 100644 v1.1.15 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/RECORD b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/RECORD deleted file mode 100644 index 1b4666f2..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/RECORD +++ /dev/null @@ -1,620 +0,0 @@ -../../Scripts/pip.exe,sha256=IaoMm_4MA_y9tJM3bpm9dKiQB4eeiFUbJ7VFDuwhMF8,93003 -../../Scripts/pip3.7.exe,sha256=IaoMm_4MA_y9tJM3bpm9dKiQB4eeiFUbJ7VFDuwhMF8,93003 -../../Scripts/pip3.exe,sha256=IaoMm_4MA_y9tJM3bpm9dKiQB4eeiFUbJ7VFDuwhMF8,93003 -pip-19.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-19.0.3.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 -pip-19.0.3.dist-info/METADATA,sha256=UFkQ3jmvF9jPeZVMc6IScYpjOYGZx-05u0kqWTl8MnY,2892 -pip-19.0.3.dist-info/RECORD,, -pip-19.0.3.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pip-19.0.3.dist-info/entry_points.txt,sha256=S_zfxY25QtQDVY1BiLAmOKSkkI5llzCKPLiYOSEupsY,98 -pip-19.0.3.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__init__.py,sha256=_0sfqHzmBOW_4x3-R2jxH4luuX5Ffe32B7BaisaO2XI,23 -pip/__main__.py,sha256=L3IHqBeasELUHvwy5CT_izVEMhM12tve289qut49DvU,623 -pip/__pycache__/__init__.cpython-37.pyc,, -pip/__pycache__/__main__.cpython-37.pyc,, -pip/_internal/__init__.py,sha256=b0jSFCCViGhB1RWni35_NMkH3Y-mbZrV648DGMagDjs,2869 -pip/_internal/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/__pycache__/build_env.cpython-37.pyc,, -pip/_internal/__pycache__/cache.cpython-37.pyc,, -pip/_internal/__pycache__/configuration.cpython-37.pyc,, -pip/_internal/__pycache__/download.cpython-37.pyc,, -pip/_internal/__pycache__/exceptions.cpython-37.pyc,, -pip/_internal/__pycache__/index.cpython-37.pyc,, -pip/_internal/__pycache__/locations.cpython-37.pyc,, -pip/_internal/__pycache__/pep425tags.cpython-37.pyc,, -pip/_internal/__pycache__/pyproject.cpython-37.pyc,, -pip/_internal/__pycache__/resolve.cpython-37.pyc,, -pip/_internal/__pycache__/wheel.cpython-37.pyc,, -pip/_internal/build_env.py,sha256=M6gja0toc36njmTGewkXMx0A-ZiaG6kL3yIw-N8Eu9M,7439 -pip/_internal/cache.py,sha256=WfnnzjrXNi5-i1Ahy6UZWLs2LxSKAY5Sswri3z-pf68,7684 -pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 -pip/_internal/cli/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc,, -pip/_internal/cli/__pycache__/base_command.cpython-37.pyc,, -pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc,, -pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc,, -pip/_internal/cli/__pycache__/parser.cpython-37.pyc,, -pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc,, -pip/_internal/cli/autocompletion.py,sha256=ptvsMdGjq42pzoY4skABVF43u2xAtLJlXAulPi-A10Y,6083 -pip/_internal/cli/base_command.py,sha256=YymFGRVq-Z0RcOyV5YzcRDANLeV19Em4XkipwBFqkEU,12725 -pip/_internal/cli/cmdoptions.py,sha256=pZQFNv-E7S0k4IYm6MW127FfLO0RP4yKkfyfb3V_x90,23885 -pip/_internal/cli/main_parser.py,sha256=ReG-nZ95-7WxZJLY1wrwknPGbECOd-zkUnHiShKr5ZY,3016 -pip/_internal/cli/parser.py,sha256=VZKUKJPbU6I2cHPLDOikin-aCx7OvLcZ3fzYp3xytd8,9378 -pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 -pip/_internal/commands/__init__.py,sha256=CQAzhVx9ViPtqLNUvAeqnKj5iWfFEcqMx5RlZWjJ30c,2251 -pip/_internal/commands/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/commands/__pycache__/check.cpython-37.pyc,, -pip/_internal/commands/__pycache__/completion.cpython-37.pyc,, -pip/_internal/commands/__pycache__/configuration.cpython-37.pyc,, -pip/_internal/commands/__pycache__/download.cpython-37.pyc,, -pip/_internal/commands/__pycache__/freeze.cpython-37.pyc,, -pip/_internal/commands/__pycache__/hash.cpython-37.pyc,, -pip/_internal/commands/__pycache__/help.cpython-37.pyc,, -pip/_internal/commands/__pycache__/install.cpython-37.pyc,, -pip/_internal/commands/__pycache__/list.cpython-37.pyc,, -pip/_internal/commands/__pycache__/search.cpython-37.pyc,, -pip/_internal/commands/__pycache__/show.cpython-37.pyc,, -pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc,, -pip/_internal/commands/__pycache__/wheel.cpython-37.pyc,, -pip/_internal/commands/check.py,sha256=liigNVif0iz2mBfhvsajrLZT5zM5KIvgmKvhAW91EzA,1430 -pip/_internal/commands/completion.py,sha256=hqvCvoxsIHjysiD7olHKTqK2lzE1_lS6LWn69kN5qyI,2929 -pip/_internal/commands/configuration.py,sha256=265HWuUxPggCNcIeWHA3p-LDDiRVnexwFgwmHGgWOHY,7125 -pip/_internal/commands/download.py,sha256=XPe3Kuj9iZfXwOiJq70mYVYNZD5lJCLnGT_C61cOsKw,6623 -pip/_internal/commands/freeze.py,sha256=VvS3G0wrm_9BH3B7Ex5msLL_1UQTtCq5G8dDI63Iemo,3259 -pip/_internal/commands/hash.py,sha256=K1JycsD-rpjqrRcL_ijacY9UKmI82pQcLYq4kCM4Pv0,1681 -pip/_internal/commands/help.py,sha256=MwBhPJpW1Dt3GfJV3V8V6kgAy_pXT0jGrZJB1wCTW-E,1090 -pip/_internal/commands/install.py,sha256=OqLybBwThV0IRq0xwnlsENWBB9-hw8Dcv5pUPg5QtKw,22580 -pip/_internal/commands/list.py,sha256=cbJEvxkBlFfSjBalQrbTqb_KFR6eLMo7Mp_JXttPyQI,10150 -pip/_internal/commands/search.py,sha256=sLZ9icKMEEGekHvzRRZMiTd1zCFIZeDptyyU1mQCYzk,4728 -pip/_internal/commands/show.py,sha256=9EVh86vY0NZdlhT-wsuV-zq_MAV6qqV4S1Akn3wkUuw,6289 -pip/_internal/commands/uninstall.py,sha256=h0gfPF5jylDESx_IHgF6bZME7QAEOHzQHdn65GP-jrE,2963 -pip/_internal/commands/wheel.py,sha256=7MNPZqK9WWxZC3TgzvMBH-RPRlOFLpwq927lkzUiUjI,7167 -pip/_internal/configuration.py,sha256=KMgG3ufFrUKX_QESi2cMVvFi47tl845Bg1ZkNthlWik,13243 -pip/_internal/download.py,sha256=KoQvMd0OfeMUn-Wi_v2e99jxkue_zKkxFBHiiQDS3Z0,34696 -pip/_internal/exceptions.py,sha256=bRSURPPUs2wMBb2TillETj6EBTDgpp4fWp5CcKZe3K0,9145 -pip/_internal/index.py,sha256=sYBuVbKkl11YqinxBIxro8_tx0GQ_5n4gbx9EpS3xN0,37840 -pip/_internal/locations.py,sha256=1JWExRYwqJq6slzprUVq0u2nxTzmGem-8L1CxU0tdVs,6944 -pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 -pip/_internal/models/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/models/__pycache__/candidate.cpython-37.pyc,, -pip/_internal/models/__pycache__/format_control.cpython-37.pyc,, -pip/_internal/models/__pycache__/index.cpython-37.pyc,, -pip/_internal/models/__pycache__/link.cpython-37.pyc,, -pip/_internal/models/candidate.py,sha256=avICbDUtLA5zIwX_Xy4z3-Qg6tf_ysZzz30sdFbVnys,1094 -pip/_internal/models/format_control.py,sha256=p0L8487xgkUrGyvULKCBQcJW0uZbWeP3ZXE_eGBGfe8,2264 -pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060 -pip/_internal/models/link.py,sha256=mQu9rcPjaRGSqsboFLAdgMRT6B6iatiiCoToNHv4zS4,4817 -pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/operations/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/operations/__pycache__/check.cpython-37.pyc,, -pip/_internal/operations/__pycache__/freeze.cpython-37.pyc,, -pip/_internal/operations/__pycache__/prepare.cpython-37.pyc,, -pip/_internal/operations/check.py,sha256=KND1M5Bh_zMBP9hvvSovZPwolTxH3sWGiHD6hABegs8,5137 -pip/_internal/operations/freeze.py,sha256=To8UFKGiZIOfA87Y1S-7HVn_-cKjRYXJ4X45maMWA-c,9321 -pip/_internal/operations/prepare.py,sha256=l2SemS5Z_KYB0PQ7y2E12Yl28-rfegcmSRbqvElsQpI,16740 -pip/_internal/pep425tags.py,sha256=t0VKiMvgd1VYcTdJe4H_6_VYeCB6PMDwnqZrE69FYH0,13142 -pip/_internal/pyproject.py,sha256=nBCBkD95mF2BoYTOjyfS0zccTjYPgIUWM-NB9pOBgbM,6478 -pip/_internal/req/__init__.py,sha256=gneiGyc-U5QXYi2XN0q9QzcQ2TK3R7vwQ4AzTmk9rIk,2343 -pip/_internal/req/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/req/__pycache__/constructors.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_file.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_install.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_set.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc,, -pip/_internal/req/constructors.py,sha256=bMUEADysJNU7rnXK_k4OqpNXGMWFkE6b3JjqFULf0GU,11378 -pip/_internal/req/req_file.py,sha256=Onq9xqes1R1qptlkAUOhlvGO2JZLvVvOBA7aO72YIfc,13730 -pip/_internal/req/req_install.py,sha256=BXrpciRx6_Ah1OfeYHQt_8BKUgpRpgRPqOT0LobMsD8,39955 -pip/_internal/req/req_set.py,sha256=dwaxSEGvtFV4G6uW1dNLbfpV9xbPBBdDbHslR1FN7jc,8064 -pip/_internal/req/req_tracker.py,sha256=aAvF76NrFVc0SmOtj3Ee570i9g5yJbxv0uJsBxumbG8,2905 -pip/_internal/req/req_uninstall.py,sha256=OaIJ6Hdo-LJ27LU2cAPWzHfbl_3iobsGzay9wDlfLpk,21458 -pip/_internal/resolve.py,sha256=whoi0DJIk0B-j_W6wLkosFwcMKCImanHnpZKeYd-X9U,15226 -pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/utils/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc,, -pip/_internal/utils/__pycache__/compat.cpython-37.pyc,, -pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc,, -pip/_internal/utils/__pycache__/encoding.cpython-37.pyc,, -pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc,, -pip/_internal/utils/__pycache__/glibc.cpython-37.pyc,, -pip/_internal/utils/__pycache__/hashes.cpython-37.pyc,, -pip/_internal/utils/__pycache__/logging.cpython-37.pyc,, -pip/_internal/utils/__pycache__/misc.cpython-37.pyc,, -pip/_internal/utils/__pycache__/models.cpython-37.pyc,, -pip/_internal/utils/__pycache__/outdated.cpython-37.pyc,, -pip/_internal/utils/__pycache__/packaging.cpython-37.pyc,, -pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc,, -pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc,, -pip/_internal/utils/__pycache__/typing.cpython-37.pyc,, -pip/_internal/utils/__pycache__/ui.cpython-37.pyc,, -pip/_internal/utils/appdirs.py,sha256=d_iHU6K5MlL2Dq82QVtZgFXpyfrrDuDwuCmiI5H9tQ0,9435 -pip/_internal/utils/compat.py,sha256=AEVxz_VJCAVl2HoW0s3H07QXPG_xj_HfMGVws-mB9n8,8565 -pip/_internal/utils/deprecation.py,sha256=MF43y-bB3N2foClmZUdfBtUsfAOz8Bdck1EzRy5RVe4,3044 -pip/_internal/utils/encoding.py,sha256=jsXgq7MlYmX_fB9yqzC54H2SpPfQbzYfMXrx8PT15R4,1225 -pip/_internal/utils/filesystem.py,sha256=ojaIDvOFOtkpKme5se6X2N8ARmQxu8cxvaaI-NFqVtk,990 -pip/_internal/utils/glibc.py,sha256=lxM6vJc-nUhUX3Dc1UOFlNBdjCylo-9Ta6c536uyvSA,3296 -pip/_internal/utils/hashes.py,sha256=FMYKr_y6NAalGcjOkN5dgM91vVhm3J-hCAc70SCQPO8,3569 -pip/_internal/utils/logging.py,sha256=VjAGhQKvmuN3tUplwamHGVMQfZoBefGI7GtvlQDLW2g,9719 -pip/_internal/utils/misc.py,sha256=-4KcZiJ8ErnLzOZDYm6bCj-KwB-MbxJZbnTDxqT3eF4,33547 -pip/_internal/utils/models.py,sha256=DQYZSRhjvSdDTAaJLLCpDtxAn1S_-v_8nlNjv4T2jwY,1042 -pip/_internal/utils/outdated.py,sha256=vnSpakXMU3lFiFxyX3stWzTyu2OnWGG8KA2rdOlcrBY,5974 -pip/_internal/utils/packaging.py,sha256=cDVTZVp3eR2MQX45DYlkzGyHP6zcF45ujm5oCAoA230,2785 -pip/_internal/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 -pip/_internal/utils/temp_dir.py,sha256=0Xq5ZlOd2OOeHwKM6hGy66gnMGAbyhio7DtjLHd7DFg,5339 -pip/_internal/utils/typing.py,sha256=ztYtZAcqjCYDwP-WlF6EiAAskAsZBMMXtuqvfgZIlgQ,1139 -pip/_internal/utils/ui.py,sha256=l4CEswlh8fWvISW4-RUtlXtw2hFvko08OZBYCWBTxSw,14256 -pip/_internal/vcs/__init__.py,sha256=O1rQ5XoDr4r38kKktwuCL3SNU2h0WGjB-lVHjPuY-pw,17278 -pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/git.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc,, -pip/_internal/vcs/bazaar.py,sha256=AqsBYeXjl5Zw8IaoIVI8WStDE6_UqZ1RTfvVH5qZkG4,3670 -pip/_internal/vcs/git.py,sha256=zO-_jOa7baD_Y6y_zDFQVhYSvc1jgnDEA307y9LATAA,13407 -pip/_internal/vcs/mercurial.py,sha256=aAxoCGfLjHcxZtN7FSvFL28MwLOUL0dZzUssZ0IU__g,3447 -pip/_internal/vcs/subversion.py,sha256=hxFLX0Ncdth7dY7excIdFo6UGQrjuZ6KIeIL3jqr-3o,7081 -pip/_internal/wheel.py,sha256=110d-8C4sg_RmZHw-bVAPvHiAiF9TAhZJXo9tvN1PIk,41001 -pip/_vendor/__init__.py,sha256=vsMCQHIwFuzqN63uGhBNE0zimx6rlZl3SC-m7YHmjG0,4779 -pip/_vendor/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/__pycache__/appdirs.cpython-37.pyc,, -pip/_vendor/__pycache__/distro.cpython-37.pyc,, -pip/_vendor/__pycache__/ipaddress.cpython-37.pyc,, -pip/_vendor/__pycache__/pyparsing.cpython-37.pyc,, -pip/_vendor/__pycache__/retrying.cpython-37.pyc,, -pip/_vendor/__pycache__/six.cpython-37.pyc,, -pip/_vendor/appdirs.py,sha256=BENKsvcA08IpccD9345-rMrg3aXWFA1q6BFEglnHg6I,24547 -pip/_vendor/cachecontrol/__init__.py,sha256=6cRPchVqkAkeUtYTSW8qCetjSqJo-GxP-n4VMVDbvmc,302 -pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc,, -pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 -pip/_vendor/cachecontrol/adapter.py,sha256=eBGAtVNRZgtl_Kj5JV54miqL9YND-D0JZPahwY8kFtY,4863 -pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 -pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 -pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/file_cache.py,sha256=8vrSzzGcdfEfICago1uSFbkumNJMGLbCdEkXsmUIExw,4177 -pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 -pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 -pip/_vendor/cachecontrol/controller.py,sha256=U7g-YwizQ2O5NRgK_MZreF1ntM4E49C3PuF3od-Vwz4,13698 -pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 -pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 -pip/_vendor/cachecontrol/serialize.py,sha256=GebE34fgToyWwAsRPguh8hEPN6CqoG-5hRMXRsjVABQ,6954 -pip/_vendor/cachecontrol/wrapper.py,sha256=sfr9YHWx-5TwNz1H5rT6QOo8ggII6v3vbEDjQFwR6wc,671 -pip/_vendor/certifi/__init__.py,sha256=timLpLv3BNcGLLEz2s6gyA34hOhMb4AIPBz0zxOxna8,52 -pip/_vendor/certifi/__main__.py,sha256=NaCn6WtWME-zzVWQ2j4zFyl8cY4knDa9CwtHNIeFPhM,53 -pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc,, -pip/_vendor/certifi/__pycache__/core.cpython-37.pyc,, -pip/_vendor/certifi/cacert.pem,sha256=zGy4Y1gu9Zy-6CGvg3apdC7kXMm3f1ELolJwNDnBRv0,275834 -pip/_vendor/certifi/core.py,sha256=K_VfM6GwSemTFisUgFWyk__w1m9pCGFRF5zTzO5bGv0,288 -pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 -pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/version.cpython-37.pyc,, -pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 -pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc,, -pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 -pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 -pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 -pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 -pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 -pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 -pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 -pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 -pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 -pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 -pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 -pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 -pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 -pip/_vendor/colorama/__init__.py,sha256=lJdY6COz9uM_pXwuk9oLr0fp8H8q2RrUqN16GKabvq4,239 -pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc,, -pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 -pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462 -pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 -pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 -pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 -pip/_vendor/distlib/__init__.py,sha256=7uthK6m96pTekk8hjlT-MybcwYmmxwP8gEOxXVg1f2s,581 -pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/database.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/index.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/util.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/version.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 -pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc,, -pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 -pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647 -pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 -pip/_vendor/distlib/_backport/sysconfig.py,sha256=JdJ9ztRy4Hc-b5-VS74x3nUtdEIVr_OBvMsIb8O2sjc,26964 -pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 -pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404 -pip/_vendor/distlib/database.py,sha256=-KJH63AJ7hqjLtGCwOTrionhKr2Vsytdwkjyo8UdEco,51029 -pip/_vendor/distlib/index.py,sha256=Dd1kIV06XIdynNpKxHMMRRIKsXuoUsG7QIzntfVtZCI,21073 -pip/_vendor/distlib/locators.py,sha256=S9G2IsZp0RnMMbXGrT-gu7892pNpy1XMlUEuUHX3OI8,51828 -pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 -pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 -pip/_vendor/distlib/metadata.py,sha256=BNCnpRfFVslyZcosr4vnE_YbkRb3TNxXtk7TrDszJdc,40172 -pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 -pip/_vendor/distlib/scripts.py,sha256=NYqRJ2uuEuJwr_NNLzWH0m_s_YsobDFQb6HqxuQ2Sew,16638 -pip/_vendor/distlib/t32.exe,sha256=ftub1bsSPUCOnBn-eCtcarKTk0N0CBEP53BumkIxWJE,92672 -pip/_vendor/distlib/t64.exe,sha256=iChOG627LWTHY8-jzSwlo9SYU5a-0JHwQu4AqDz8I68,102400 -pip/_vendor/distlib/util.py,sha256=gwKL5geJKmtR4GeIUnoMAWjsPPG3tVP_mFxw_Sx-isc,59681 -pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 -pip/_vendor/distlib/w32.exe,sha256=NPYPpt7PIjVqABEu1CzabbDyHHkJpuw-_qZq_48H0j0,89088 -pip/_vendor/distlib/w64.exe,sha256=Yb-qr1OQEzL8KRGTk-XHUZDwMSljfQeZnVoTk-K4e7E,99328 -pip/_vendor/distlib/wheel.py,sha256=gV53KDG7BgbxsdeKjnATbP47gTEJRNylcIeE1TFin1o,39880 -pip/_vendor/distro.py,sha256=dOMrjIXv-3GmEbtP-NJc057Sv19P7ZAdke-v0TBeNio,42455 -pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162 -pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc,, -pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705 -pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552 -pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580 -pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289 -pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/_base.py,sha256=uJHVhzif9S0MJXgy9F98iEev5evi_rgUk5BmEbUSp8c,930 -pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 -pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 -pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015 -pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518 -pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc,, -pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 -pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 -pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 -pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 -pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 -pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248 -pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 -pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963 -pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758 -pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 -pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 -pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 -pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 -pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579 -pip/_vendor/html5lib/treebuilders/dom.py,sha256=SY3MsijXyzdNPc8aK5IQsupBoM8J67y56DgNtGvsb9g,8835 -pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764 -pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122 -pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714 -pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 -pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 -pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550 -pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309 -pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 -pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 -pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/codec.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/core.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc,, -pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 -pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 -pip/_vendor/idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733 -pip/_vendor/idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899 -pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 -pip/_vendor/idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21 -pip/_vendor/idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292 -pip/_vendor/ipaddress.py,sha256=2OgbkeAD2rLkcXqbcvof3J5R7lRwjNLoBySyTkBtKnc,79852 -pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 -pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pyc,, -pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 -pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 -pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 -pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 -pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 -pip/_vendor/msgpack/__init__.py,sha256=y0bk2YbzK6J2e0J_dyreN6nD7yM2IezT6m_tU2h-Mdg,1677 -pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc,, -pip/_vendor/msgpack/_version.py,sha256=dN7wVIjbyuQIJ35B2o6gymQNDLPlj_7-uTfgCv7KErM,20 -pip/_vendor/msgpack/exceptions.py,sha256=lPkAi_u12NlFajDz4FELSHEdfU8hrR3zeTvKX8aQuz4,1056 -pip/_vendor/msgpack/fallback.py,sha256=h0ll8xnq12mI9PuQ9Qd_Ihtt08Sp8L0JqhG9KY8Vyjk,36411 -pip/_vendor/packaging/__about__.py,sha256=Wg0-hNgTU2_lBZcGBh5pm1R9yroQ3rv-X0rig8KjA6o,744 -pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 -pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/version.cpython-37.pyc,, -pip/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865 -pip/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416 -pip/_vendor/packaging/markers.py,sha256=-QjvJkhSJBxBogO9J_EpPQudHaaLV3rgVYsBDqn-ZLc,8234 -pip/_vendor/packaging/requirements.py,sha256=grcnFU8x7KD230JaFLXtWl3VClLuOmsOy4c-m55tOWs,4700 -pip/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778 -pip/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520 -pip/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978 -pip/_vendor/pep517/__init__.py,sha256=nOY747zTld3oTdEetBG6DWxEcZXTeOQk0aHvbR-sa5w,84 -pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/build.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/check.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc,, -pip/_vendor/pep517/_in_process.py,sha256=xMY2kLutkjCti5WqTmKOLRRL3o8Ds_k-fObFyuMv1tk,6061 -pip/_vendor/pep517/build.py,sha256=-n8PT-ugS1TdqoTUY1vatDQjrLtx48K_-Quu2MuQBiA,2699 -pip/_vendor/pep517/check.py,sha256=Lu7nMdYu1JVV58fE3hv-d_avTy5h0yO9LsIzAt82Clk,5885 -pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 -pip/_vendor/pep517/compat.py,sha256=4SFG4QN-cNj8ebSa0wV0HUtEEQWwmbok2a0uk1gYEOM,631 -pip/_vendor/pep517/envbuild.py,sha256=9-u4KffexPMEm52rTaIjEOxsCAd2DMByxzv5H566QLw,5763 -pip/_vendor/pep517/wrappers.py,sha256=9dZn-q7F5KyQKUJMie2uKwur2FG0CLXz_kLZzkJOhZc,5912 -pip/_vendor/pkg_resources/__init__.py,sha256=JGk92Be39-a8sQIltjZF-Dk9ZOIAR0lpCZ9rYrGHfVM,104648 -pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc,, -pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 -pip/_vendor/progress/__init__.py,sha256=Hv3Y8Hr6RyM34NdZkrZQWMURjS2h5sONRHJSvZXWZgQ,3188 -pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/bar.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/counter.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/helpers.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc,, -pip/_vendor/progress/bar.py,sha256=hlkDAEv9pRRiWqR5XL6vIAgMG4u_dBGEW_8klQhBRq0,2942 -pip/_vendor/progress/counter.py,sha256=XtBuZY4yYmr50E2A_fAzjWhm0IkwaVwxNsNVYDE7nsw,1528 -pip/_vendor/progress/helpers.py,sha256=6FsBLh_xUlKiVua-zZIutCjxth-IO8FtyUj6I2tx9fg,2952 -pip/_vendor/progress/spinner.py,sha256=m7bASI2GUbLFG-PbAefdHtrrWWlJLFhhSBbw70gp2TY,1439 -pip/_vendor/pyparsing.py,sha256=jh8A5pZOiogg5mR2riJEb2vlfKQ4grylOcYSmW2SU0s,243692 -pip/_vendor/pytoml/__init__.py,sha256=W_SKx36Hsew-Fty36BOpreLm4uF4V_Tgkm_z9rIoOE8,127 -pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc,, -pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509 -pip/_vendor/pytoml/parser.py,sha256=2tDXkldqPQJhyadXzL2rGhVbjUyBNeXXhaEfncHl2iQ,10326 -pip/_vendor/pytoml/test.py,sha256=2nQs4aX3XQEaaQCx6x_OJTS2Hb0_IiTZRqNOeDmLCzo,1021 -pip/_vendor/pytoml/utils.py,sha256=JCLHx77Hu1R3F-bRgiROIiKyCzLwyebnp5P35cRJxWs,1665 -pip/_vendor/pytoml/writer.py,sha256=WbNNQg3sh_V-s3kt88LkNNbxEq6pPDdhRE-daJzArcI,3198 -pip/_vendor/requests/__init__.py,sha256=ZI8kbaEzLAxsqex3MmMPr-v24d1RfZbNAOY8fUxg2Xw,4074 -pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/api.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/auth.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/certs.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/help.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/models.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/packages.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/structures.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/requests/__version__.py,sha256=8KG3anaNCi-PEclPPOHJ_cv1udY_L1_njVr84gRZ9HM,436 -pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 -pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 -pip/_vendor/requests/api.py,sha256=hWZgfD7OriCZFOnpeq0bv2pbXDl8YXfxDwAcU036qDs,6253 -pip/_vendor/requests/auth.py,sha256=QB2-cSUj1jrvWZfPXttsZpyAacQgtKLVk14vQW9TpSE,10206 -pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 -pip/_vendor/requests/compat.py,sha256=FZX4Q_EMKiMnhZpZ3g_gOsT-j2ca9ij2gehDx1cwYeo,1941 -pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 -pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197 -pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 -pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 -pip/_vendor/requests/models.py,sha256=6s-37iAqXVptq8z7U_LoH_pbIPrCQUm_Z8QuIGE29Q0,34275 -pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 -pip/_vendor/requests/sessions.py,sha256=DjbCotDW6xSAaBsjbW-L8l4N0UcwmrxVNgSrZgIjGWM,29332 -pip/_vendor/requests/status_codes.py,sha256=XWlcpBjbCtq9sSqpH9_KKxgnLTf9Z__wCWolq21ySlg,4129 -pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981 -pip/_vendor/requests/utils.py,sha256=LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A,30049 -pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 -pip/_vendor/six.py,sha256=h9jch2pS86y4R36pKRS3LOYUCVFNIJMRwjZ4fJDtJ44,32452 -pip/_vendor/urllib3/__init__.py,sha256=EZviRQA_iuL_94EeJHY4JAArRXbRCkAzA0HH9iXZ15s,2722 -pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc,, -pip/_vendor/urllib3/_collections.py,sha256=-CAKsDE-WdubAjlBSZLx7b0e7WKenaNGwWvGLDEF1TM,10746 -pip/_vendor/urllib3/connection.py,sha256=KLFvknLgllcMkgJ-zUsFjCzOt9P03fDoIpTPz_vqXCw,13839 -pip/_vendor/urllib3/connectionpool.py,sha256=rgc_3D0VsD5VDxr4KzzA8Plee0Rmerm5WKb71FcxWu8,35097 -pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=lhYXvB5_oGKSeurX7za3XhcGyERvNjXRQ3eJp2GmQ3M,717 -pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=x2kLSh-ASZKsun0FxtraBuLVe3oHuth4YW6yZ5Vof-w,17560 -pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=Umy5u-3Z957GirdapnicXVOpHaM4xdOZABJuJxfaeJA,12162 -pip/_vendor/urllib3/contrib/appengine.py,sha256=VvDpkc5gf9dTXNxXmyG1mPdON_3DrYG_eW4uOqN98oQ,10938 -pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=5ZpMF7N9B6NEjVU-r-xjDOV_-hkNvsDoNc84J2yqauI,4459 -pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=-kI_9y99Iwybv6Wy8IF8PugVl61BeMBEEqGwrDYNCuc,15823 -pip/_vendor/urllib3/contrib/securetransport.py,sha256=BqXSlChN9_hjCWgyN6JdcgvBUdc37QCCX4u3_8zE_9o,30309 -pip/_vendor/urllib3/contrib/socks.py,sha256=Iom0snbHkCuZbZ7Sle2Kueha1W0jYAJ0SyCOtePLaio,6391 -pip/_vendor/urllib3/exceptions.py,sha256=rFeIfBNKC8KJ61ux-MtJyJlEC9G9ggkmCeF751JwVR4,6604 -pip/_vendor/urllib3/fields.py,sha256=D_TE_SK15YatdbhWDMN0OE3X6UCJn1RTkANINCYOobE,5943 -pip/_vendor/urllib3/filepost.py,sha256=40CROlpRKVBpFUkD0R6wJf_PpvbcRQRFUu0OOQlFkKM,2436 -pip/_vendor/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 -pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/makefile.py,sha256=so2z9BiNM8kh38Ve5tomQP_mp2_ubEqzdlCpLZKzzCI,1456 -pip/_vendor/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=WBVbxQBojNAxfZwNavkox3BgJiMA9BJmm-_fwd0jD_o,688 -pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=E-9J-kAaUn76WMZ4PpzKUxM4C3yjY7mopOpbPIy3Dso,5700 -pip/_vendor/urllib3/poolmanager.py,sha256=csE6Bh6L0FJ3iNOHk2z8KhMT8Eiq976b6pk8I6vrOC8,16853 -pip/_vendor/urllib3/request.py,sha256=OfelFYzPnxGlU3amEz9uBLjCBOriwgJh4QC_aW9SF3U,5991 -pip/_vendor/urllib3/response.py,sha256=ta1jp4B5PGBWzoAV1s48WLuHCRICQnK7F9m_kyK4Z8g,25609 -pip/_vendor/urllib3/util/__init__.py,sha256=6Ran4oAVIy40Cu_oEPWnNV9bwF5rXx6G1DUZ7oehjPY,1044 -pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc,, -pip/_vendor/urllib3/util/connection.py,sha256=-AyqcRTuNUHuo5ndtsU0Og_nMyCGATC-kYqOUdBHwIQ,4639 -pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 -pip/_vendor/urllib3/util/request.py,sha256=H5_lrHvtwl2U2BbT1UYN9HpruNc1gsNFlz2njQmhPrQ,3705 -pip/_vendor/urllib3/util/response.py,sha256=028PNXDZhwBtnm2uXvnAHi_l9_AAGrAMH2Igh2AbgWg,2586 -pip/_vendor/urllib3/util/retry.py,sha256=kFQTesNiwPp6ZeQo9VHeUO7b8qA-_l3BnErCAOEPo4Q,15105 -pip/_vendor/urllib3/util/ssl_.py,sha256=4qqBDM82bufhqqEd0b-99sObz95XmEVEXDVi5iAyCeE,13172 -pip/_vendor/urllib3/util/timeout.py,sha256=7lHNrgL5YH2cI1j-yZnzV_J8jBlRVdmFhQaNyM1_2b8,9757 -pip/_vendor/urllib3/util/url.py,sha256=qCY_HHUXvo05wAsEERALgExtlgxLnAHSQ7ce1b-g3SM,6487 -pip/_vendor/urllib3/util/wait.py,sha256=p4BZo_Ukp5JF0Dn6jro7cUfqIjnU6WFtuoA6poaV5Jk,5403 -pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 -pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc,, -pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 -pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 -pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 -pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt deleted file mode 100644 index f5809cb4..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -pip = pip._internal:main -pip3 = pip._internal:main -pip3.7 = pip._internal:main - diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/INSTALLER b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/INSTALLER similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/INSTALLER rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/INSTALLER diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/LICENSE.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/LICENSE.txt similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/LICENSE.txt rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/LICENSE.txt diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/METADATA b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/METADATA similarity index 62% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/METADATA rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/METADATA index a91529c0..e2266f5a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/METADATA +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/METADATA @@ -1,11 +1,14 @@ Metadata-Version: 2.1 Name: pip -Version: 19.0.3 +Version: 20.2 Summary: The PyPA recommended tool for installing Python packages. Home-page: https://pip.pypa.io/ Author: The pip developers -Author-email: pypa-dev@groups.google.com +Author-email: distutils-sig@python.org License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ Keywords: distutils easy_install egg setuptools wheel virtualenv Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable @@ -16,13 +19,13 @@ Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* pip - The Python Package Installer ================================== @@ -39,19 +42,26 @@ Please take a look at our documentation for how to install and use pip: * `Installation`_ * `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + * `Release notes`_ +* `Release process`_ + +In 2020, we're working on improvements to the heart of pip. Please `learn more and take our survey`_ to help us do it right. -If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms: +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: * `Issue tracking`_ * `Discourse channel`_ * `User IRC`_ -If you want to get involved head over to GitHub to get the source code and feel free to jump on the developer mailing lists and chat rooms: +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: * `GitHub page`_ -* `Dev mailing list`_ -* `Dev IRC`_ +* `Development documentation`_ +* `Development mailing list`_ +* `Development IRC`_ Code of Conduct --------------- @@ -59,17 +69,20 @@ Code of Conduct Everyone interacting in the pip project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. -.. _package installer: https://packaging.python.org/en/latest/current/ +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ .. _Python Package Index: https://pypi.org .. _Installation: https://pip.pypa.io/en/stable/installing.html .. _Usage: https://pip.pypa.io/en/stable/ .. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ .. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _learn more and take our survey: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging -.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev +.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ .. _User IRC: https://webchat.freenode.net/?channels=%23pypa -.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev +.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev .. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/RECORD b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/RECORD new file mode 100644 index 00000000..0b63aca1 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/RECORD @@ -0,0 +1,751 @@ +../../Scripts/pip.exe,sha256=VhAGG7jvv2q4NkDULQEruEtdZvX5Ed1qWNmafcz4e9s,97108 +../../Scripts/pip3.7.exe,sha256=VhAGG7jvv2q4NkDULQEruEtdZvX5Ed1qWNmafcz4e9s,97108 +../../Scripts/pip3.exe,sha256=VhAGG7jvv2q4NkDULQEruEtdZvX5Ed1qWNmafcz4e9s,97108 +pip-20.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-20.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 +pip-20.2.dist-info/METADATA,sha256=MmWpr-dzJVIUdbKOi-_LpiuTlvQ8ObgQi2WsXhnNXik,3706 +pip-20.2.dist-info/RECORD,, +pip-20.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pip-20.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125 +pip-20.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=uX9-VTiqu9kizIhD0eO5ozQ0kqAMlxo92GmK6wt25Iw,453 +pip/__main__.py,sha256=bqCAM1cj1HwHCDx3WJa-LJxOBXimGxE8OjBqAvnhVg0,911 +pip/__pycache__/__init__.cpython-37.pyc,, +pip/__pycache__/__main__.cpython-37.pyc,, +pip/_internal/__init__.py,sha256=2si23JBW1erg19xIJ8CD6tfGknz0ijtXmzuXjGfGMGE,495 +pip/_internal/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/__pycache__/build_env.cpython-37.pyc,, +pip/_internal/__pycache__/cache.cpython-37.pyc,, +pip/_internal/__pycache__/configuration.cpython-37.pyc,, +pip/_internal/__pycache__/exceptions.cpython-37.pyc,, +pip/_internal/__pycache__/locations.cpython-37.pyc,, +pip/_internal/__pycache__/main.cpython-37.pyc,, +pip/_internal/__pycache__/pyproject.cpython-37.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-37.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-37.pyc,, +pip/_internal/build_env.py,sha256=9_UaQ2fpsBvpKAji27f7bPAi2v3mb0cBvDYcejwFKNM,8088 +pip/_internal/cache.py,sha256=pT17VVxgzZK32aqY5FRS8GyAI73LKzNMF8ZelQ7Ojm0,12249 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-37.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-37.pyc,, +pip/_internal/cli/__pycache__/main.cpython-37.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-37.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-37.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-37.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-37.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc,, +pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547 +pip/_internal/cli/base_command.py,sha256=BWTztM4b6h8hodDHDKjgJ82jaSeru2AILAJxi1d_IP8,8810 +pip/_internal/cli/cmdoptions.py,sha256=M_BtuqeyRpZAUUYytts3pguBCF2RaGukVpDPE0niroI,28782 +pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975 +pip/_internal/cli/main.py,sha256=Hxc9dZyW3xiDsYZX-_J2cGXT5DWNLNn_Y7o9oUme-Ec,2616 +pip/_internal/cli/main_parser.py,sha256=voAtjo4WVPIYeu7Fqabva9SXaB3BjG0gH93GBfe6jHQ,2843 +pip/_internal/cli/parser.py,sha256=4FfwW8xB84CrkLs35ud90ZkhCcWyVkx17XD6j3XCW7c,9480 +pip/_internal/cli/progress_bars.py,sha256=J1zykt2LI4gbBeXorfYRmYV5FgXhcW4x3r6xE_a7Z7c,9121 +pip/_internal/cli/req_command.py,sha256=Eiz8TVzeqzG-40t7qLC1vO-vzjCRvX9C-qXMyfw9D1I,15132 +pip/_internal/cli/spinners.py,sha256=PS9s53LB5aDPelIn8FhKerK3bOdgeefFH5wSWJ2PCzI,5509 +pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/_internal/commands/__init__.py,sha256=yoLAnmEXjoQgYfDuwsuWG3RzzD19oeHobGEhmpIYsB4,4100 +pip/_internal/commands/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-37.pyc,, +pip/_internal/commands/__pycache__/check.cpython-37.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-37.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-37.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-37.pyc,, +pip/_internal/commands/__pycache__/download.cpython-37.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-37.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-37.pyc,, +pip/_internal/commands/__pycache__/help.cpython-37.pyc,, +pip/_internal/commands/__pycache__/install.cpython-37.pyc,, +pip/_internal/commands/__pycache__/list.cpython-37.pyc,, +pip/_internal/commands/__pycache__/search.cpython-37.pyc,, +pip/_internal/commands/__pycache__/show.cpython-37.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/commands/cache.py,sha256=U3rLjls0AMMO8PxnhXVwIp7Biyvns8-gBThKTH3tX7Y,5676 +pip/_internal/commands/check.py,sha256=fqRrz2uKPC8Qsx2rgLygAD2Rbr-qxp1Q55zUoyZzB9Q,1677 +pip/_internal/commands/completion.py,sha256=ObssM77quf61qvbuSE6XLwUBdm_WcWIvXFI-Hy1RBsI,3081 +pip/_internal/commands/configuration.py,sha256=IN2QBF653sRiRU7-pHTpnZ6_gyiXNKUQkLiLaNRLKNw,9344 +pip/_internal/commands/debug.py,sha256=otBZnpnostX2kmYyOl6g6CeCLmk6H00Tsj2CDsCtFXw,7314 +pip/_internal/commands/download.py,sha256=EKFlj_ceGUEJj6yCDw7P6w7yUoB16IcNHhT2qnCFDNQ,4918 +pip/_internal/commands/freeze.py,sha256=vLBBP1d8wgEXrmlh06hbz_x_Q1mWHUdiWDa9NP2eKLE,3452 +pip/_internal/commands/hash.py,sha256=v2nYCiEsEI9nEam1p6GwdG8xyj5gFv-4WrqvNexKmeY,1843 +pip/_internal/commands/help.py,sha256=ryuMDt2tc7ic3NJYMjjoNRH5r6LrB2yQVZvehAm8bLs,1270 +pip/_internal/commands/install.py,sha256=h2L8vS6t2DbGAdttkdZmMucK2eJG2CYvcwhDa7AdKrQ,28683 +pip/_internal/commands/list.py,sha256=jXkHHvScGVlenAjlOndIoqLxwKXwDs2RUcQeQS8X_eg,11281 +pip/_internal/commands/search.py,sha256=VnOvN6KjlUDe9cQ0MNFlgu5M1Sg-W54JU1KW5DvnmrA,5716 +pip/_internal/commands/show.py,sha256=r69-G8HIepDKm4SeyeHj0Ez1P9xoihrpVUyXm6NmXYY,6996 +pip/_internal/commands/uninstall.py,sha256=Ys8hwFsg0kvvGtLGYG3ibL5BKvURhlSlCX50ZQ-hsHk,3311 +pip/_internal/commands/wheel.py,sha256=-HSISE5AV29I752Aqw4DdmulrGd8rB_ZTOdpbJ6T8iM,6419 +pip/_internal/configuration.py,sha256=-Gxz2J-KuvxiqWIJ9F-XnYVZ5lKhNk7VO6ondEbH4EM,14115 +pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959 +pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425 +pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760 +pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086 +pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294 +pip/_internal/exceptions.py,sha256=ZVpArxQrSlm4qAMtHaY3nHvG_t5eSi3WCnMowdm_m8I,12637 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/index/__pycache__/collector.cpython-37.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-37.pyc,, +pip/_internal/index/collector.py,sha256=rMdGdAABOrvIl0DYlCMWXr7mIoqrU2VGeQpCuWiPu1Q,22838 +pip/_internal/index/package_finder.py,sha256=ISieDd20dOSndMNybafCu3pO2JR3BKOfHv92Bes0j0Q,37364 +pip/_internal/locations.py,sha256=7YjzJy2CroQD8GBMemnHWRl9448BSIt0lfH98B-Dkd8,6732 +pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-37.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-37.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-37.pyc,, +pip/_internal/models/__pycache__/index.cpython-37.pyc,, +pip/_internal/models/__pycache__/link.cpython-37.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-37.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-37.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-37.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/models/candidate.py,sha256=gACeCSHTIaWuB6RAeLmGJnbFFbKfp_47UERDoC_ldOU,1195 +pip/_internal/models/direct_url.py,sha256=MnBLPci1hE9Ndh6d3m0LAqB7hX3ci80CCJTE5eerFaQ,6900 +pip/_internal/models/format_control.py,sha256=RdnnmXxVJppCZWzWEmFTr-zD_m3G0izPLqJi6Iop75M,2823 +pip/_internal/models/index.py,sha256=carvxxaT7mJyoEkptaECHUZiNaA6R5NrsGF55zawNn8,1161 +pip/_internal/models/link.py,sha256=FMlxvqKmLoj7xTQSgKqfO2ehE1WcgD4C5DmEBuC_Qos,7470 +pip/_internal/models/scheme.py,sha256=EhPkT_6G0Md84JTLSVopYsp5H_K6BREYmFvU8H6wMK8,778 +pip/_internal/models/search_scope.py,sha256=Lum0mY4_pdR9DDBy6HV5xHGIMPp_kU8vMsqYKFHZip4,4751 +pip/_internal/models/selection_prefs.py,sha256=pgNjTfgePPiX1R5S2S8Yc6odOfU9NzG7YP_m_gnS0kw,2044 +pip/_internal/models/target_python.py,sha256=R7tAXI15B_cgw7Fgnq5cI9F-44goUZncH9JMtE8pXRw,4034 +pip/_internal/models/wheel.py,sha256=FTfzVb4WIbfIehxhdlAVvCil_MQ0-W44oyN56cE6NHc,2772 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/network/__pycache__/auth.cpython-37.pyc,, +pip/_internal/network/__pycache__/cache.cpython-37.pyc,, +pip/_internal/network/__pycache__/download.cpython-37.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-37.pyc,, +pip/_internal/network/__pycache__/session.cpython-37.pyc,, +pip/_internal/network/__pycache__/utils.cpython-37.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-37.pyc,, +pip/_internal/network/auth.py,sha256=W9WhWL2EUr6OJQ2SecDmcCvHg3_hIT-csbsne8Lk58k,11610 +pip/_internal/network/cache.py,sha256=6cCD7XNrqh1d1lOSY5U-0ZXOG1YwEgMYs-VhRZVyzMA,2329 +pip/_internal/network/download.py,sha256=VTGDO01_nX-5MCdatd4Icv0F88_M8N3WnW6BevA6a0o,5151 +pip/_internal/network/lazy_wheel.py,sha256=fdXGboeP1eEZ0nLVZHqGpDDFGjiBYdSgGnkUgu92SyA,7937 +pip/_internal/network/session.py,sha256=Zs0uiyPxTpfpgSv-ZI9hK9TjasmTplBuBivOTcUiJME,15208 +pip/_internal/network/utils.py,sha256=ZPHg7u6DEcg2EvILIdPECnvPLp21OPHxNVmeXfMy-n0,4172 +pip/_internal/network/xmlrpc.py,sha256=PFCiX_nnwYxC8SFIf7J3trP40ECGjA6fl2-IVNhbkPM,1882 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/__pycache__/check.cpython-37.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-37.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-37.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-37.pyc,, +pip/_internal/operations/build/metadata.py,sha256=2aILgWCQTF1aIhWuCH8TTSjv_kYmA3x1262fT2FQ6pQ,1254 +pip/_internal/operations/build/metadata_legacy.py,sha256=VgzBTk8naIO8-8N_ifEYF7ZAxWUDhphWVIaVlZ2FqYM,2011 +pip/_internal/operations/build/wheel.py,sha256=33vdkxTO-gNqrtWH1eNL_uZo4Irax85moDx2o9zae3M,1465 +pip/_internal/operations/build/wheel_legacy.py,sha256=N1aqNZyGURBX0Bj6wPmB0t4866oMbxoHUpC9pz6FyT0,3356 +pip/_internal/operations/check.py,sha256=JYDsVLvpFyJuJq0ttStgg8TRKbc0myYFAMnfnnQOREM,5215 +pip/_internal/operations/freeze.py,sha256=_vJSZwHBNzBV0GpRUSXhUJz3BrGFdcT2aTcWxH1L4P0,10373 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488 +pip/_internal/operations/install/legacy.py,sha256=zu3Gw54dgHtluyW5n8j5qKcAScidQXJvqB8fb0oLB-4,4281 +pip/_internal/operations/install/wheel.py,sha256=ea2nlXQbmnaVvsEtCW0hTwUEwGQJw1xg0bb2ZCw2_Yo,30057 +pip/_internal/operations/prepare.py,sha256=Rt7Yh7w10_Q-vI3b7R1wkt2R6XPX8YVUdODk-TaGI9c,19903 +pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400 +pip/_internal/req/__init__.py,sha256=s-E5Vxxqqpcs7xfY5gY69oHogsWJ4sLbnUiDoWmkHOU,3133 +pip/_internal/req/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc,, +pip/_internal/req/constructors.py,sha256=LrSHbRHu52-h6HM1qJKG68o1Jw5q8MvJGfr4As6j2uU,16387 +pip/_internal/req/req_file.py,sha256=p7n3Y0q275Eisqfxd0vtfnxYvlT6TCCY0tj75p-yiOY,19448 +pip/_internal/req/req_install.py,sha256=HZselo7A2jCV6sYIBe-coNF85rb6QTgBz1-ZW_AMIzU,33645 +pip/_internal/req/req_set.py,sha256=dxcfbieWYfYkTJNE07U8xaO40zLxl8BhWOcIHVFTmoo,7886 +pip/_internal/req/req_tracker.py,sha256=qWaiejNK6o6cqeyTOIGKIU1CoyrXCcqgMHYi3cqelOA,4690 +pip/_internal/req/req_uninstall.py,sha256=opMGDGb7ZaFippRbaarJaljtzl2CNZmBGEUSnTubE-A,23706 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-37.pyc,, +pip/_internal/resolution/base.py,sha256=xi72YmIS-lEjyK13PN_3qkGGthA4yGoK0C6qWynyHrE,682 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-37.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=d-qW6UUxbZqKyXmX2bqnW5C8UtnO0ZcsQuKw_QXualc,18755 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=dDdffs_0baMLJjQjR7aKlbfuvtbja-lC6f6zZveFzKM,2323 +pip/_internal/resolution/resolvelib/candidates.py,sha256=mJ5xy1932fFQMrrqg_OoQmCY9TWoM3C4JI_yAI2cjiU,20116 +pip/_internal/resolution/resolvelib/factory.py,sha256=DueukAQ1LRAwv-_JyjgIGIrw_a44RRBlXUoeFstFLQM,17169 +pip/_internal/resolution/resolvelib/provider.py,sha256=n7vfjL1UotxYYkQXNh8orjMOR1G9NNSwoGb2yK9d9VE,6081 +pip/_internal/resolution/resolvelib/requirements.py,sha256=lGvoHRhkusRfaz4cFxYBoQNqxS6TeuO3K68qlui6g-0,4511 +pip/_internal/resolution/resolvelib/resolver.py,sha256=b05AK409WdUkCQKhNp0Uh61iMw4a2hFR8fg27dQnZ-g,10077 +pip/_internal/self_outdated_check.py,sha256=q6_nqUHPpt-DScwD97h7FCSqd4nI1s-xkpOI4I5Za3Y,6779 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-37.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-37.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-37.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-37.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-37.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-37.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-37.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-37.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-37.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-37.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-37.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-37.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-37.pyc,, +pip/_internal/utils/__pycache__/models.cpython-37.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-37.pyc,, +pip/_internal/utils/__pycache__/parallel.cpython-37.pyc,, +pip/_internal/utils/__pycache__/pkg_resources.cpython-37.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-37.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc,, +pip/_internal/utils/__pycache__/typing.cpython-37.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-37.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-37.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/utils/appdirs.py,sha256=RZzUG-Bkh2b-miX0DSZ3v703_-bgK-v0PfWCCjwVE9g,1349 +pip/_internal/utils/compat.py,sha256=GoCSUMoUmTGeg5irQGLDZ7v12As87yHrMzBXEke-njg,8865 +pip/_internal/utils/compatibility_tags.py,sha256=EtBJj-pstj_U0STUZ8FjlG7YDTjuRZUy6GY1cM86yv8,5439 +pip/_internal/utils/datetime.py,sha256=KL-vIdGU9JIpGB5NYkmwXWkH-G_2mvvABlmRtoSZsao,295 +pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318 +pip/_internal/utils/direct_url_helpers.py,sha256=bZCBNwPQVyZpYGjX_VcomvVvRHvKw-9JzEV-Ft09LQc,4359 +pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350 +pip/_internal/utils/encoding.py,sha256=wHDJ25yCT_T4ySscCL3P978OpLrfDCpitg8D64IEXMY,1284 +pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152 +pip/_internal/utils/filesystem.py,sha256=-fU3XteCAIJwf_9FvCZU7vhywvt3nuf_cqkCdwgy1Y8,6943 +pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571 +pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297 +pip/_internal/utils/hashes.py,sha256=xHmrqNwC1eBN0oY0R_RXLJLXGvFdo5gwmbz_pas94k8,4358 +pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810 +pip/_internal/utils/logging.py,sha256=YIfuDUEkmdn9cIRQ_Ec8rgXs1m5nOwDECtZqM4CBH5U,13093 +pip/_internal/utils/misc.py,sha256=17HkwcfxBA8Y8-rD-rM39gB1vQ2U9-EkigdclDe61eo,27394 +pip/_internal/utils/models.py,sha256=HqiBVtTbW_b_Umvj2fjhDWOHo2RKhPwSz4iAYkQZ688,1201 +pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035 +pip/_internal/utils/parallel.py,sha256=7az3aaTMCkqpaLFbpYYOvk0rj7Hu5YH1NPXXomqjgf4,3404 +pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254 +pip/_internal/utils/setuptools_build.py,sha256=E1KswI7wfNnCDE5R6G8c9ZbByENpu7NqocjY26PCQDw,5058 +pip/_internal/utils/subprocess.py,sha256=UkPe89gcjxBMx73uutoeJXgD3kwdlL6YO16BkjDdVSI,9924 +pip/_internal/utils/temp_dir.py,sha256=blmG0jEvEgdxbYUt_V15bgcTIJIrxZwAw8QZlCTJYDE,8378 +pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401 +pip/_internal/utils/unpacking.py,sha256=YFAckhqqvmehA8Kan5vd3b1kN_9TafqmOk4b-yz4fho,9488 +pip/_internal/utils/urls.py,sha256=q2rw1kMiiig_XZcoyJSsWMJQqYw-2wUmrMoST4mCW_I,1527 +pip/_internal/utils/virtualenv.py,sha256=iVJ8ZlbNtGon6I4uZFsY2SidrUf1vt3YHrgS5CuU98w,3553 +pip/_internal/utils/wheel.py,sha256=wFzn3h8GqYvgsyWPZtUyn0Rb3MJzmtyP3owMOhKnmL0,7303 +pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617 +pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc,, +pip/_internal/vcs/bazaar.py,sha256=5rRR02uDZTLaxQT-R5Obd8FZDOMlShqYds-pwVSJJs8,3887 +pip/_internal/vcs/git.py,sha256=kvB729wrKY0OWMSgOS1pUly4LosZp8utrd3kOQsWalA,13985 +pip/_internal/vcs/mercurial.py,sha256=FzCGmYzVZvB-vyM73fKcQk2B4jMNXGnXlQ2bJ7nmglM,5162 +pip/_internal/vcs/subversion.py,sha256=JZs3JnAX4flHOAXnoavs5viA8vZr00_xt_yXYf-9L2A,12247 +pip/_internal/vcs/versioncontrol.py,sha256=WpxeTRC0NoGB2uXJdmfq4pPxY-p7sk1rV_WkxMxgzQA,25966 +pip/_internal/wheel_builder.py,sha256=6w1VPXrpUvCCPlV0cI1wNaCqNz4laF6B6whvaxl9cns,9522 +pip/_vendor/__init__.py,sha256=CsxnpYPbi_2agrDI79iQrCmQeZRcwwIF0C6cm_1RynU,4588 +pip/_vendor/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/__pycache__/appdirs.cpython-37.pyc,, +pip/_vendor/__pycache__/contextlib2.cpython-37.pyc,, +pip/_vendor/__pycache__/distro.cpython-37.pyc,, +pip/_vendor/__pycache__/ipaddress.cpython-37.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-37.pyc,, +pip/_vendor/__pycache__/retrying.cpython-37.pyc,, +pip/_vendor/__pycache__/six.cpython-37.pyc,, +pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907 +pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 +pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882 +pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 +pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 +pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149 +pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 +pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 +pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091 +pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690 +pip/_vendor/certifi/__init__.py,sha256=u1E_DrSGj_nnEkK5VglvEqP8D80KpghLVWL0A_pq41A,62 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-37.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=GhT24f0R7_9y4YY_hkXwkO7BthZhRGDCEMO348E9S14,282394 +pip/_vendor/certifi/core.py,sha256=jBrwKEWpG0IKcuozK0BQ2HHGp8adXAOyBPC7ddgR6vM,2315 +pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-37.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +pip/_vendor/colorama/__init__.py,sha256=DqjXH9URVP3IJwmMt7peYw50ns1RNAymIB9-XdPEFV8,239 +pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 +pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462 +pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 +pip/_vendor/contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915 +pip/_vendor/distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581 +pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc,, +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408 +pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 +pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 +pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +pip/_vendor/distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962 +pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +pip/_vendor/distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180 +pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 +pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 +pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845 +pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 +pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 +pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 +pip/_vendor/distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144 +pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628 +pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 +pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc,, +pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 +pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 +pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 +pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 +pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc,, +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 +pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc,, +pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +pip/_vendor/idna/core.py,sha256=jCoaLb3bA2tS_DDx9PpGuNTEZZN2jAzB369aP-IHYRE,11951 +pip/_vendor/idna/idnadata.py,sha256=gmzFwZWjdms3kKZ_M_vwz7-LP_SCgYfSeE03B21Qpsk,42350 +pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +pip/_vendor/idna/package_data.py,sha256=bxBjpLnE06_1jSYKEy5svOMu1zM3OMztXVUb1tPlcp0,22 +pip/_vendor/idna/uts46data.py,sha256=lMdw2zdjkH1JUWXPPEfFUSYT3Fyj60bBmfLvvy5m7ko,202084 +pip/_vendor/ipaddress.py,sha256=-0RmurI31XgAaN20WCi0zrcuoat90nNA70_6yGlx2PU,79875 +pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 +pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc,, +pip/_vendor/msgpack/_version.py,sha256=hu7lzmZ_ClOaOOmRsWb4xomhzQ4UIsLsvv8KY6UysHE,20 +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=nV19BzE9Be8SJHrxxYJHFbvEHJaXcP3avRkHVp5wovM,6034 +pip/_vendor/msgpack/fallback.py,sha256=Z8V3iYUUPqKVy4WWTk64Vq3G0PylQIOmlWvgnMhmkdU,37133 +pip/_vendor/packaging/__about__.py,sha256=PNMsaZn4UcCHyubgROH1bl6CluduPjI5kFrSp_Zgklo,736 +pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_typing.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-37.pyc,, +pip/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128 +pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022 +pip/_vendor/packaging/_typing.py,sha256=VgA0AAvsc97KB5nF89zoudOyCMEsV7FlaXzZbYqEkzA,1824 +pip/_vendor/packaging/markers.py,sha256=V_RdoQqOUbSfy7y9o2vRk7BkzAh3yneC82cuWpKrqOg,9491 +pip/_vendor/packaging/requirements.py,sha256=F93hkn7i8NKRZP-FtdTIlhz1PUsRjhe6eRbsBXX0Uh4,4903 +pip/_vendor/packaging/specifiers.py,sha256=uYp9l13F0LcknS6d4N60ytiBgFmIhKideOq9AnsxTco,31944 +pip/_vendor/packaging/tags.py,sha256=NKMS37Zo_nWrZxgsD6zbXsXgc9edn9m160cBiLmHJdE,24067 +pip/_vendor/packaging/utils.py,sha256=RShlvnjO2CtYSD8uri32frMMFMTmB-3ihsq1-ghzLEw,1811 +pip/_vendor/packaging/version.py,sha256=Cnbm-OO9D_qd8ZTFxzFcjSavexSYFZmyeaoPvMsjgPc,15470 +pip/_vendor/pep517/__init__.py,sha256=r5uA106NGJa3slspaD2m32aFpFUiZX-mZ9vIlzAEOp4,84 +pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/build.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/check.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/dirtools.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/meta.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc,, +pip/_vendor/pep517/_in_process.py,sha256=XrKOTURJdia5R7i3i_OQmS89LASFXE3HQXfX63qZBIE,8438 +pip/_vendor/pep517/build.py,sha256=DN4ouyj_bd00knOKqv0KHRtN0-JezJoNNZQmcDi4juk,3335 +pip/_vendor/pep517/check.py,sha256=YoaNE3poJGpz96biVCYwtcDshwEGE2HRU5KKya9yfpY,5961 +pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 +pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780 +pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 +pip/_vendor/pep517/envbuild.py,sha256=szKUFlO50X1ahQfXwz4hD9V2VE_bz9MLVPIeidsFo4w,6041 +pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 +pip/_vendor/pep517/wrappers.py,sha256=yFU4Lp7TIYbmuVOTY-pXnlyGZ3F_grIi-JlLkpGN8Gk,10783 +pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc,, +pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 +pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857 +pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc,, +pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854 +pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372 +pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380 +pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394 +pip/_vendor/requests/__init__.py,sha256=orzv4-1uejMDc2v3LnTVneINGXiwqXSfrASoFBsYblE,4465 +pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-37.pyc,, +pip/_vendor/requests/__version__.py,sha256=Xwky1FMlMkJJGidBM50JC7FKcosWzkjIW-WhQGrBdFM,441 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 +pip/_vendor/requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496 +pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045 +pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 +pip/_vendor/requests/exceptions.py,sha256=d9fJJw8YFBB9VzG9qhvxLuOx6be3c_Dwbck-dVUEAcs,3173 +pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 +pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 +pip/_vendor/requests/models.py,sha256=_tKIbrscbGvaTdX1UHCwRaiYmPF9VBIuBeydr4Qx1Tg,34287 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=OBtwQs1vjkB1xamFdi_p5y8BVeX16BJoQcwSwx_Y3fI,29316 +pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 +pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 +pip/_vendor/requests/utils.py,sha256=VBs99cvV8Z29WGXeWZqHzZ80_nu1AwwjYzJfe0wQIvs,30176 +pip/_vendor/resolvelib/__init__.py,sha256=sqMOy4CbVJQiaG9bCPj0oAntGAVy-RWdPfVaC9XDIEQ,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=mtTkpr3Gf3OGvU1PD8YuvrJRhVbioxV82T-niFPoX3o,127 +pip/_vendor/resolvelib/providers.py,sha256=TZDCmL-Ic-R5JRIZY8G4FLG5xB2343B0DfuK7aw2Yqw,4547 +pip/_vendor/resolvelib/reporters.py,sha256=ZPSJnVfK8WvXTbX8jE0Nren0-_Hg9ym4epCUPtU8Y0U,1405 +pip/_vendor/resolvelib/resolvers.py,sha256=lQTGcc-2fgHbmdiLzeNDUxVmGc5ZFjkAL6JrVqnqJIw,15018 +pip/_vendor/resolvelib/structs.py,sha256=yrdhd-n7DercimPGclXe20rgqhlxw8PnxC0wmcXO19Y,2016 +pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 +pip/_vendor/six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159 +pip/_vendor/toml/__init__.py,sha256=rJ1pu933HgUtyeeNiusoPd5jJOPNhaKHhSSld3o8AQo,747 +pip/_vendor/toml/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/common.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/decoder.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/encoder.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/ordered.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/tz.cpython-37.pyc,, +pip/_vendor/toml/common.py,sha256=ViBccAduP6eZNJAb1POhRhjOAi56TDsNgWJ1TjgXAug,242 +pip/_vendor/toml/decoder.py,sha256=atpXmyFCzNGiqhkcYLySBuJQkPeSHDzBz47sEaX1amw,38696 +pip/_vendor/toml/encoder.py,sha256=fPqLyFdPAam17X9SELz2TMp9affkfHCmgWZxRKcmzhY,9955 +pip/_vendor/toml/ordered.py,sha256=UWt5Eka90IWVBYdvLgY5PXnkBcVYpHjnw9T67rM85T8,378 +pip/_vendor/toml/tz.py,sha256=DrAgI3wZxZiGcLuV_l8ueA_nPrYoxQ3hZA9tJSjWRsQ,618 +pip/_vendor/urllib3/__init__.py,sha256=rdFZCO1L7e8861ZTvo8AiSKwxCe9SnWQUQwJ599YV9c,2683 +pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792 +pip/_vendor/urllib3/connection.py,sha256=Fln8a_bkegdNMkFoSOwyI0PJvL1OqzVUO6ifihKOTpc,14461 +pip/_vendor/urllib3/connectionpool.py,sha256=egdaX-Db_LVXifDxv3JY0dHIpQqDv0wC0_9Eeh8FkPM,35725 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956 +pip/_vendor/urllib3/contrib/appengine.py,sha256=gfdK4T7CRin7v9HRhHDbDh-Hbk66hHDWeoz7nV3PJo8,11034 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=9gm5kpC0ScbDCWobeCrh5LDqS8HgU8FNhmk5v8qQ5Bs,16582 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=vBDFjSnH2gWa-ztMKVaiwW46K1mlDZKqvo_VAonfdcY,32401 +pip/_vendor/urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036 +pip/_vendor/urllib3/exceptions.py,sha256=D2Jvab7M7m_n0rnmBmq481paoVT32VvVeB6VeQM0y-w,7172 +pip/_vendor/urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553 +pip/_vendor/urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418 +pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536 +pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688 +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=rvQDQviqQLtPJB6MfEgABnBFj3nXft7ZJ3Dx-BC0AQY,5696 +pip/_vendor/urllib3/poolmanager.py,sha256=iWEAIGrVNGoOmQyfiFwCqG-IyYy6GIQ-jJ9QCsX9li4,17861 +pip/_vendor/urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018 +pip/_vendor/urllib3/response.py,sha256=eo1Sfkn2x44FtjgP3qwwDsG9ak84spQAxEGy7Ovd4Pc,28221 +pip/_vendor/urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637 +pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 +pip/_vendor/urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815 +pip/_vendor/urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573 +pip/_vendor/urllib3/util/retry.py,sha256=3wbv7SdzYNOxPcBiFkPCubTbK1_6vWSepznOXirhUfA,15543 +pip/_vendor/urllib3/util/ssl_.py,sha256=N7gqt2iqzKBsWGmc61YeKNSPri6Ns2iZ_MD5hV2y8tU,14523 +pip/_vendor/urllib3/util/timeout.py,sha256=3qawUo-TZq4q7tyeRToMIOdNGEOBjOOQVq7nHnLryP4,9947 +pip/_vendor/urllib3/util/url.py,sha256=S4YyAwWKJPjFFECC7l9Vp9EKqRH1XAb-uQFANn1Tak0,13981 +pip/_vendor/urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406 +pip/_vendor/vendor.txt,sha256=bWUiaRjMJhuUsqFZHEJkBH_6lJ_Avl9cOyszcI74IHs,437 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/WHEEL b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/WHEEL similarity index 70% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/WHEEL rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/WHEEL index c8240f03..ef99c6cf 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/WHEEL +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) +Generator: bdist_wheel (0.34.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/entry_points.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/entry_points.txt new file mode 100644 index 00000000..d48bd8a8 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.8 = pip._internal.cli.main:main + diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/top_level.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/top_level.txt similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/top_level.txt rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-20.2.dist-info/top_level.txt diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__init__.py index f48c1ca6..b67e61d0 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__init__.py @@ -1 +1,18 @@ -__version__ = "19.0.3" +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + +__version__ = "20.2" + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__main__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__main__.py index 0c223f8c..7c2505fa 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__main__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/__main__.py @@ -3,6 +3,13 @@ from __future__ import absolute_import import os import sys +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip +if sys.path[0] in ('', os.getcwd()): + sys.path.pop(0) + # If we are running from a wheel, add the wheel to sys.path # This allows the usage python pip-*.whl/pip install pip-*.whl if __package__ == '': @@ -13,7 +20,7 @@ if __package__ == '': path = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, path) -from pip._internal import main as _main # isort:skip # noqa +from pip._internal.cli.main import main as _main # isort:skip # noqa if __name__ == '__main__': sys.exit(_main()) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/__init__.py index 276124df..264c2cab 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/__init__.py @@ -1,78 +1,17 @@ -#!/usr/bin/env python -from __future__ import absolute_import +import pip._internal.utils.inject_securetransport # noqa +from pip._internal.utils.typing import MYPY_CHECK_RUNNING -import locale -import logging -import os -import warnings - -import sys - -# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, -# but if invoked (i.e. imported), it will issue a warning to stderr if socks -# isn't available. requests unconditionally imports urllib3's socks contrib -# module, triggering this warning. The warning breaks DEP-8 tests (because of -# the stderr output) and is just plain annoying in normal usage. I don't want -# to add socks as yet another dependency for pip, nor do I want to allow-stder -# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to -# be done before the import of pip.vcs. -from pip._vendor.urllib3.exceptions import DependencyWarning -warnings.filterwarnings("ignore", category=DependencyWarning) # noqa - -# We want to inject the use of SecureTransport as early as possible so that any -# references or sessions or what have you are ensured to have it, however we -# only want to do this in the case that we're running on macOS and the linked -# OpenSSL is too old to handle TLSv1.2 -try: - import ssl -except ImportError: - pass -else: - # Checks for OpenSSL 1.0.1 on MacOS - if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - pass - else: - securetransport.inject_into_urllib3() - -from pip._internal.cli.autocompletion import autocomplete -from pip._internal.cli.main_parser import parse_command -from pip._internal.commands import commands_dict -from pip._internal.exceptions import PipError -from pip._internal.utils import deprecation -from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa -from pip._vendor.urllib3.exceptions import InsecureRequestWarning - -logger = logging.getLogger(__name__) - -# Hide the InsecureRequestWarning from urllib3 -warnings.filterwarnings("ignore", category=InsecureRequestWarning) +if MYPY_CHECK_RUNNING: + from typing import Optional, List def main(args=None): - if args is None: - args = sys.argv[1:] - - # Configure our deprecation warnings to be sent through loggers - deprecation.install_warning_logger() - - autocomplete() + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. - try: - cmd_name, cmd_args = parse_command(args) - except PipError as exc: - sys.stderr.write("ERROR: %s" % exc) - sys.stderr.write(os.linesep) - sys.exit(1) + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper - # Needed for locale.getpreferredencoding(False) to work - # in pip._internal.utils.encoding.auto_decode - try: - locale.setlocale(locale.LC_ALL, '') - except locale.Error as e: - # setlocale can apparently crash if locale are uninitialized - logger.debug("Ignoring error %s when setting locale", e) - command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args)) - return command.main(cmd_args) + return _wrapper(args) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/build_env.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/build_env.py index d744cc78..28d1ad68 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/build_env.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/build_env.py @@ -12,14 +12,15 @@ from sysconfig import get_paths from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet from pip import __file__ as pip_location -from pip._internal.utils.misc import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner if MYPY_CHECK_RUNNING: - from typing import Tuple, Set, Iterable, Optional, List # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 + from types import TracebackType + from typing import Tuple, Set, Iterable, Optional, List, Type + from pip._internal.index.package_finder import PackageFinder logger = logging.getLogger(__name__) @@ -50,11 +51,12 @@ class BuildEnvironment(object): def __init__(self): # type: () -> None - self._temp_dir = TempDirectory(kind="build-env") - self._temp_dir.create() + temp_dir = TempDirectory( + kind=tempdir_kinds.BUILD_ENV, globally_managed=True + ) self._prefixes = OrderedDict(( - (name, _Prefix(os.path.join(self._temp_dir.path, name))) + (name, _Prefix(os.path.join(temp_dir.path, name))) for name in ('normal', 'overlay') )) @@ -73,7 +75,7 @@ class BuildEnvironment(object): get_python_lib(plat_specific=True), ) } - self._site_dir = os.path.join(self._temp_dir.path, 'site') + self._site_dir = os.path.join(temp_dir.path, 'site') if not os.path.exists(self._site_dir): os.mkdir(self._site_dir) with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: @@ -105,6 +107,7 @@ class BuildEnvironment(object): ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)) def __enter__(self): + # type: () -> None self._save_env = { name: os.environ.get(name, None) for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') @@ -123,17 +126,19 @@ class BuildEnvironment(object): 'PYTHONPATH': os.pathsep.join(pythonpath), }) - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None for varname, old_value in self._save_env.items(): if old_value is None: os.environ.pop(varname, None) else: os.environ[varname] = old_value - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() - def check_requirements(self, reqs): # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] """Return 2 sets: @@ -158,7 +163,7 @@ class BuildEnvironment(object): finder, # type: PackageFinder requirements, # type: Iterable[str] prefix_as_string, # type: str - message # type: Optional[str] + message # type: str ): # type: (...) -> None prefix = self._prefixes[prefix_as_string] @@ -177,22 +182,27 @@ class BuildEnvironment(object): formats = getattr(finder.format_control, format_control) args.extend(('--' + format_control.replace('_', '-'), ','.join(sorted(formats or {':none:'})))) - if finder.index_urls: - args.extend(['-i', finder.index_urls[0]]) - for extra_index in finder.index_urls[1:]: + + index_urls = finder.index_urls + if index_urls: + args.extend(['-i', index_urls[0]]) + for extra_index in index_urls[1:]: args.extend(['--extra-index-url', extra_index]) else: args.append('--no-index') for link in finder.find_links: args.extend(['--find-links', link]) - for _, host, _ in finder.secure_origins: + + for host in finder.trusted_hosts: args.extend(['--trusted-host', host]) if finder.allow_all_prereleases: args.append('--pre') + if finder.prefer_binary: + args.append('--prefer-binary') args.append('--') args.extend(requirements) with open_spinner(message) as spinner: - call_subprocess(args, show_stdout=False, spinner=spinner) + call_subprocess(args, spinner=spinner) class NoOpBuildEnvironment(BuildEnvironment): @@ -200,16 +210,32 @@ class NoOpBuildEnvironment(BuildEnvironment): """ def __init__(self): + # type: () -> None pass def __enter__(self): + # type: () -> None pass - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None pass def cleanup(self): + # type: () -> None pass - def install_requirements(self, finder, requirements, prefix, message): + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: str + ): + # type: (...) -> None raise NotImplementedError() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cache.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cache.py index eb295c4e..07db948b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cache.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cache.py @@ -1,27 +1,38 @@ """Cache Management """ -import errno import hashlib +import json import logging import os +from pip._vendor.packaging.tags import interpreter_name, interpreter_version from pip._vendor.packaging.utils import canonicalize_name -from pip._internal.download import path_to_url +from pip._internal.exceptions import InvalidWheelFilename from pip._internal.models.link import Link -from pip._internal.utils.compat import expanduser -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import InvalidWheelFilename, Wheel +from pip._internal.utils.urls import path_to_url if MYPY_CHECK_RUNNING: - from typing import Optional, Set, List, Any # noqa: F401 - from pip._internal.index import FormatControl # noqa: F401 + from typing import Optional, Set, List, Any, Dict + + from pip._vendor.packaging.tags import Tag + + from pip._internal.models.format_control import FormatControl logger = logging.getLogger(__name__) +def _hash_dict(d): + # type: (Dict[str, str]) -> str + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + class Cache(object): """An abstract class - provides cache directories for data from links @@ -36,16 +47,19 @@ class Cache(object): def __init__(self, cache_dir, format_control, allowed_formats): # type: (str, FormatControl, Set[str]) -> None super(Cache, self).__init__() - self.cache_dir = expanduser(cache_dir) if cache_dir else None + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None self.format_control = format_control self.allowed_formats = allowed_formats _valid_formats = {"source", "binary"} assert self.allowed_formats.union(_valid_formats) == _valid_formats - def _get_cache_path_parts(self, link): + def _get_cache_path_parts_legacy(self, link): # type: (Link) -> List[str] """Get parts of part that must be os.path.joined with cache_dir + + Legacy cache key (pip < 20) for compatibility with older caches. """ # We want to generate an url to use as our cache key, we don't want to @@ -69,30 +83,72 @@ class Cache(object): return parts - def _get_candidates(self, link, package_name): - # type: (Link, Optional[str]) -> List[Any] + def _get_cache_path_parts(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, canonical_package_name): + # type: (Link, str) -> List[Any] can_not_cache = ( not self.cache_dir or - not package_name or + not canonical_package_name or not link ) if can_not_cache: return [] - canonical_name = canonicalize_name(package_name) formats = self.format_control.get_allowed_formats( - canonical_name + canonical_package_name ) if not self.allowed_formats.intersection(formats): return [] - root = self.get_path_for_link(link) - try: - return os.listdir(root) - except OSError as err: - if err.errno in {errno.ENOENT, errno.ENOTDIR}: - return [] - raise + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + # TODO remove legacy path lookup in pip>=21 + legacy_path = self.get_path_for_link_legacy(link) + if os.path.isdir(legacy_path): + for candidate in os.listdir(legacy_path): + candidates.append((candidate, legacy_path)) + return candidates + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + raise NotImplementedError() def get_path_for_link(self, link): # type: (Link) -> str @@ -100,24 +156,18 @@ class Cache(object): """ raise NotImplementedError() - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link """Returns a link to a cached item if it exists, otherwise returns the passed link. """ raise NotImplementedError() - def _link_for_candidate(self, link, candidate): - # type: (Link, str) -> Link - root = self.get_path_for_link(link) - path = os.path.join(root, candidate) - - return Link(path_to_url(path)) - - def cleanup(self): - # type: () -> None - pass - class SimpleWheelCache(Cache): """A cache of wheels for future installs. @@ -129,6 +179,12 @@ class SimpleWheelCache(Cache): cache_dir, format_control, {"binary"} ) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + parts = self._get_cache_path_parts_legacy(link) + assert self.cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + def get_path_for_link(self, link): # type: (Link) -> str """Return a directory to store cached wheels for link @@ -146,28 +202,53 @@ class SimpleWheelCache(Cache): :param link: The link of the sdist for which this will cache wheels. """ parts = self._get_cache_path_parts(link) - + assert self.cache_dir # Store wheels within the root cache_dir return os.path.join(self.cache_dir, "wheels", *parts) - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link candidates = [] - for wheel_name in self._get_candidates(link, package_name): + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates( + link, canonical_package_name + ): try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue - if not wheel.supported(): + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, link, package_name, + ) + continue + if not wheel.supported(supported_tags): # Built for a different python/arch/etc continue - candidates.append((wheel.support_index_min(), wheel_name)) + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) if not candidates: return link - return self._link_for_candidate(link, min(candidates)[1]) + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) class EphemWheelCache(SimpleWheelCache): @@ -176,16 +257,24 @@ class EphemWheelCache(SimpleWheelCache): def __init__(self, format_control): # type: (FormatControl) -> None - self._temp_dir = TempDirectory(kind="ephem-wheel-cache") - self._temp_dir.create() + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) super(EphemWheelCache, self).__init__( self._temp_dir.path, format_control ) - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() + +class CacheEntry(object): + def __init__( + self, + link, # type: Link + persistent, # type: bool + ): + self.link = link + self.persistent = persistent class WheelCache(Cache): @@ -203,6 +292,10 @@ class WheelCache(Cache): self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link_legacy(link) + def get_path_for_link(self, link): # type: (Link) -> str return self._wheel_cache.get_path_for_link(link) @@ -211,14 +304,43 @@ class WheelCache(Cache): # type: (Link) -> str return self._ephem_cache.get_path_for_link(link) - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link - retval = self._wheel_cache.get(link, package_name) - if retval is link: - retval = self._ephem_cache.get(link, package_name) - return retval - - def cleanup(self): - # type: () -> None - self._wheel_cache.cleanup() - self._ephem_cache.cleanup() + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Optional[CacheEntry] + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/autocompletion.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/autocompletion.py index 0a04199e..329de602 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/autocompletion.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/autocompletion.py @@ -4,13 +4,19 @@ import optparse import os import sys +from itertools import chain from pip._internal.cli.main_parser import create_main_parser -from pip._internal.commands import commands_dict, get_summaries +from pip._internal.commands import commands_dict, create_command from pip._internal.utils.misc import get_installed_distributions +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Iterable, List, Optional def autocomplete(): + # type: () -> None """Entry Point for completion of main and subcommand options. """ # Don't complete if user hasn't sourced bash_completion file. @@ -23,17 +29,18 @@ def autocomplete(): except IndexError: current = '' - subcommands = [cmd for cmd, summary in get_summaries()] + parser = create_main_parser() + subcommands = list(commands_dict) options = [] - # subcommand - try: - subcommand_name = [w for w in cwords if w in subcommands][0] - except IndexError: - subcommand_name = None - parser = create_main_parser() + # subcommand + subcommand_name = None # type: Optional[str] + for word in cwords: + if word in subcommands: + subcommand_name = word + break # subcommand options - if subcommand_name: + if subcommand_name is not None: # special case: 'help' subcommand has no options if subcommand_name == 'help': sys.exit(1) @@ -54,7 +61,7 @@ def autocomplete(): print(dist) sys.exit(1) - subcommand = commands_dict[subcommand_name]() + subcommand = create_command(subcommand_name) for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: @@ -73,8 +80,8 @@ def autocomplete(): # get completion files and directories if ``completion_type`` is # ````, ```` or ```` if completion_type: - options = auto_complete_paths(current, completion_type) - options = ((opt, 0) for opt in options) + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] for option in options: opt_label = option[0] # append '=' to options which require args @@ -86,22 +93,25 @@ def autocomplete(): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) - opts = (o for it in opts for o in it) + flattened_opts = chain.from_iterable(opts) if current.startswith('-'): - for opt in opts: + for opt in flattened_opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts else: # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, opts) + completion_type = get_path_completion_type(cwords, cword, + flattened_opts) if completion_type: - subcommands = auto_complete_paths(current, completion_type) + subcommands = list(auto_complete_paths(current, + completion_type)) print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1) def get_path_completion_type(cwords, cword, opts): + # type: (List[str], int, Iterable[Any]) -> Optional[str] """Get the type of path completion (``file``, ``dir``, ``path`` or None) :param cwords: same as the environmental variable ``COMP_WORDS`` @@ -110,7 +120,7 @@ def get_path_completion_type(cwords, cword, opts): :return: path completion type (``file``, ``dir``, ``path`` or None) """ if cword < 2 or not cwords[cword - 2].startswith('-'): - return + return None for opt in opts: if opt.help == optparse.SUPPRESS_HELP: continue @@ -120,9 +130,11 @@ def get_path_completion_type(cwords, cword, opts): x in ('path', 'file', 'dir') for x in opt.metavar.split('/')): return opt.metavar + return None def auto_complete_paths(current, completion_type): + # type: (str, str) -> Iterable[str] """If ``completion_type`` is ``file`` or ``path``, list all regular files and directories starting with ``current``; otherwise only list directories starting with ``current``. diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/base_command.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/base_command.py index 3ceea499..c3b6a856 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/base_command.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/base_command.py @@ -1,4 +1,5 @@ """Base Command class, and related routines""" + from __future__ import absolute_import, print_function import logging @@ -10,65 +11,75 @@ import sys import traceback from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, ) from pip._internal.cli.status_codes import ( - ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, ) -from pip._internal.download import PipSession from pip._internal.exceptions import ( - BadCommand, CommandError, InstallationError, PreviousBuildDirError, + BadCommand, + CommandError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + SubProcessError, UninstallationError, ) -from pip._internal.index import PackageFinder -from pip._internal.locations import running_under_virtualenv -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, -) -from pip._internal.req.req_file import parse_requirements from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filesystem import check_path_owner from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging -from pip._internal.utils.misc import ( - get_prog, normalize_path, redact_password_from_url, +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import ( + global_tempdir_manager, + tempdir_registry, ) -from pip._internal.utils.outdated import pip_version_check from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv if MYPY_CHECK_RUNNING: - from typing import Optional, List, Tuple, Any # noqa: F401 - from optparse import Values # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.req.req_set import RequirementSet # noqa: F401 + from typing import List, Optional, Tuple, Any + from optparse import Values + + from pip._internal.utils.temp_dir import ( + TempDirectoryTypeRegistry as TempDirRegistry + ) __all__ = ['Command'] logger = logging.getLogger(__name__) -class Command(object): - name = None # type: Optional[str] - usage = None # type: Optional[str] - hidden = False # type: bool +class Command(CommandContextMixIn): + usage = None # type: str ignore_require_venv = False # type: bool - def __init__(self, isolated=False): - # type: (bool) -> None + def __init__(self, name, summary, isolated=False): + # type: (str, str, bool) -> None + super(Command, self).__init__() parser_kw = { 'usage': self.usage, - 'prog': '%s %s' % (get_prog(), self.name), + 'prog': '{} {}'.format(get_prog(), name), 'formatter': UpdatingDefaultsHelpFormatter(), 'add_help_option': False, - 'name': self.name, + 'name': name, 'description': self.__doc__, 'isolated': isolated, } + self.name = name + self.summary = summary self.parser = ConfigOptionParser(**parser_kw) + self.tempdir_registry = None # type: Optional[TempDirRegistry] + # Commands should add options to this option group - optgroup_name = '%s Options' % self.name.capitalize() + optgroup_name = '{} Options'.format(self.name.capitalize()) self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) # Add the general options @@ -78,54 +89,49 @@ class Command(object): ) self.parser.add_option_group(gen_opts) - def run(self, options, args): - # type: (Values, List[Any]) -> Any - raise NotImplementedError - - def _build_session(self, options, retries=None, timeout=None): - # type: (Values, Optional[int], Optional[int]) -> PipSession - session = PipSession( - cache=( - normalize_path(os.path.join(options.cache_dir, "http")) - if options.cache_dir else None - ), - retries=retries if retries is not None else options.retries, - insecure_hosts=options.trusted_hosts, - ) - - # Handle custom ca-bundles from the user - if options.cert: - session.verify = options.cert - - # Handle SSL client certificate - if options.client_cert: - session.cert = options.client_cert - - # Handle timeouts - if options.timeout or timeout: - session.timeout = ( - timeout if timeout is not None else options.timeout - ) + self.add_options() - # Handle configured proxies - if options.proxy: - session.proxies = { - "http": options.proxy, - "https": options.proxy, - } + def add_options(self): + # type: () -> None + pass - # Determine if we can prompt the user for authentication or not - session.auth.prompting = not options.no_input + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, 'no_index') - return session + def run(self, options, args): + # type: (Values, List[Any]) -> int + raise NotImplementedError def parse_args(self, args): - # type: (List[str]) -> Tuple + # type: (List[str]) -> Tuple[Any, Any] # factored out for testability return self.parser.parse_args(args) def main(self, args): # type: (List[str]) -> int + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args): + # type: (List[str]) -> int + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + options, args = self.parse_args(args) # Set verbosity so that it can be used elsewhere. @@ -137,23 +143,20 @@ class Command(object): user_log_file=options.log, ) - if sys.version_info[:2] == (3, 4): - deprecated( - "Python 3.4 support has been deprecated. pip 19.1 will be the " - "last one supporting it. Please upgrade your Python as Python " - "3.4 won't be maintained after March 2019 (cf PEP 429).", - replacement=None, - gone_in='19.2', - ) - elif sys.version_info[:2] == (2, 7): + if ( + sys.version_info[:2] == (2, 7) and + not options.no_python_version_warning + ): message = ( - "A future version of pip will drop support for Python 2.7." + "pip 21.0 will drop support for Python 2.7 in January 2021. " + "More details about Python 2 support in pip can be found at " + "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa ) if platform.python_implementation() == "CPython": message = ( - "Python 2.7 will reach the end of its life on January " + "Python 2.7 reached the end of its life on January " "1st, 2020. Please upgrade your Python as Python 2.7 " - "won't be maintained after that date. " + "is no longer maintained. " ) + message deprecated(message, replacement=None, gone_in=None) @@ -175,24 +178,57 @@ class Command(object): ) sys.exit(VIRTUALENV_NOT_FOUND) + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + if getattr(options, "build_dir", None): + deprecated( + reason=( + "The -b/--build/--build-dir/--build-directory " + "option is deprecated." + ), + replacement=( + "use the TMPDIR/TEMP/TMP environment variable, " + "possibly combined with --no-clean" + ), + gone_in="20.3", + issue=8333, + ) + + if 'resolver' in options.unstable_features: + logger.critical( + "--unstable-feature=resolver is no longer supported, and " + "has been replaced with --use-feature=2020-resolver instead." + ) + sys.exit(ERROR) + try: status = self.run(options, args) - # FIXME: all commands should return an exit status - # and when it is done, isinstance is not needed anymore - if isinstance(status, int): - return status + assert isinstance(status, int) + return status except PreviousBuildDirError as exc: logger.critical(str(exc)) logger.debug('Exception information:', exc_info=True) return PREVIOUS_BUILD_DIR_ERROR - except (InstallationError, UninstallationError, BadCommand) as exc: + except (InstallationError, UninstallationError, BadCommand, + SubProcessError, NetworkConnectionError) as exc: logger.critical(str(exc)) logger.debug('Exception information:', exc_info=True) return ERROR except CommandError as exc: - logger.critical('ERROR: %s', exc) + logger.critical('%s', exc) logger.debug('Exception information:', exc_info=True) return ERROR @@ -214,128 +250,4 @@ class Command(object): return UNKNOWN_ERROR finally: - allow_version_check = ( - # Does this command have the index_group options? - hasattr(options, "no_index") and - # Is this command allowed to perform this check? - not (options.disable_pip_version_check or options.no_index) - ) - # Check if we're using the latest version of pip available - if allow_version_check: - session = self._build_session( - options, - retries=0, - timeout=min(5, options.timeout) - ) - with session: - pip_version_check(session, options) - - # Shutdown the logging module - logging.shutdown() - - return SUCCESS - - -class RequirementCommand(Command): - - @staticmethod - def populate_requirement_set(requirement_set, # type: RequirementSet - args, # type: List[str] - options, # type: Values - finder, # type: PackageFinder - session, # type: PipSession - name, # type: str - wheel_cache # type: Optional[WheelCache] - ): - # type: (...) -> None - """ - Marshal cmd line args into a requirement set. - """ - # NOTE: As a side-effect, options.require_hashes and - # requirement_set.require_hashes may be updated - - for filename in options.constraints: - for req_to_add in parse_requirements( - filename, - constraint=True, finder=finder, options=options, - session=session, wheel_cache=wheel_cache): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in args: - req_to_add = install_req_from_line( - req, None, isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in options.editables: - req_to_add = install_req_from_editable( - req, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for filename in options.requirements: - for req_to_add in parse_requirements( - filename, - finder=finder, options=options, session=session, - wheel_cache=wheel_cache, - use_pep517=options.use_pep517): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - # If --require-hashes was a line in a requirements file, tell - # RequirementSet about it: - requirement_set.require_hashes = options.require_hashes - - if not (args or options.editables or options.requirements): - opts = {'name': name} - if options.find_links: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(maybe you meant "pip %(name)s %(links)s"?)' % - dict(opts, links=' '.join(options.find_links))) - else: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(see "pip help %(name)s")' % opts) - - def _build_package_finder( - self, - options, # type: Values - session, # type: PipSession - platform=None, # type: Optional[str] - python_versions=None, # type: Optional[List[str]] - abi=None, # type: Optional[str] - implementation=None # type: Optional[str] - ): - # type: (...) -> PackageFinder - """ - Create a package finder appropriate to this requirement command. - """ - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug( - 'Ignoring indexes: %s', - ','.join(redact_password_from_url(url) for url in index_urls), - ) - index_urls = [] - - return PackageFinder( - find_links=options.find_links, - format_control=options.format_control, - index_urls=index_urls, - trusted_hosts=options.trusted_hosts, - allow_all_prereleases=options.pre, - session=session, - platform=platform, - versions=python_versions, - abi=abi, - implementation=implementation, - prefer_binary=options.prefer_binary, - ) + self.handle_pip_version_check(options) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/cmdoptions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/cmdoptions.py index 5cf5ee97..ed42c5f5 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/cmdoptions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/cmdoptions.py @@ -5,31 +5,38 @@ The principle here is to define options once, but *not* instantiate them globally. One reason being that options with action='append' can carry state between parses. pip parses general options twice internally, and shouldn't pass on state. To be consistent, all options will follow this design. - """ + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import +import os import textwrap import warnings from distutils.util import strtobool from functools import partial from optparse import SUPPRESS_HELP, Option, OptionGroup +from textwrap import dedent +from pip._internal.cli.progress_bars import BAR_TYPES from pip._internal.exceptions import CommandError -from pip._internal.locations import USER_CACHE_DIR, src_prefix +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix from pip._internal.models.format_control import FormatControl from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython from pip._internal.utils.hashes import STRONG_HASHES from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import BAR_TYPES if MYPY_CHECK_RUNNING: - from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401 - from optparse import OptionParser, Values # noqa: F401 - from pip._internal.cli.parser import ConfigOptionParser # noqa: F401 + from typing import Any, Callable, Dict, Optional, Tuple + from optparse import OptionParser, Values + from pip._internal.cli.parser import ConfigOptionParser def raise_option_error(parser, option, msg): + # type: (OptionParser, Option, str) -> None """ Raise an option parsing error using parser.error(). @@ -68,14 +75,15 @@ def check_install_build_global(options, check_options=None): check_options = options def getname(n): + # type: (str) -> Optional[Any] return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control control.disallow_binaries() warnings.warn( - 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', stacklevel=2, + 'Disabling all use of wheels due to the use of --build-option ' + '/ --global-option / --install-option.', stacklevel=2, ) @@ -101,7 +109,7 @@ def check_dist_restriction(options, check_target=False): # Installations or downloads using dist restrictions must not combine # source distributions and dist-specific wheels, as they are not - # gauranteed to be locally compatible. + # guaranteed to be locally compatible. if dist_restriction_set and sdist_dependencies_allowed: raise CommandError( "When restricting platform and interpreter constraints using " @@ -119,6 +127,17 @@ def check_dist_restriction(options, check_target=False): ) +def _path_option_check(option, opt, value): + # type: (Option, str, str) -> str + return os.path.expanduser(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path",) + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["path"] = _path_option_check + + ########### # options # ########### @@ -206,10 +225,11 @@ progress_bar = partial( ) # type: Callable[..., Option] log = partial( - Option, + PipOption, "--log", "--log-file", "--local-log", dest="log", metavar="path", + type="path", help="Path to a verbose appending log." ) # type: Callable[..., Option] @@ -220,7 +240,7 @@ no_input = partial( dest='no_input', action='store_true', default=False, - help=SUPPRESS_HELP + help="Disable prompting for input." ) # type: Callable[..., Option] proxy = partial( @@ -252,16 +272,6 @@ timeout = partial( help='Set the socket timeout (default %default seconds).', ) # type: Callable[..., Option] -skip_requirements_regex = partial( - Option, - # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - def exists_action(): # type: () -> Option @@ -275,24 +285,24 @@ def exists_action(): action='append', metavar='action', help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).", + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", ) cert = partial( - Option, + PipOption, '--cert', dest='cert', - type='str', + type='path', metavar='path', help="Path to alternate CA bundle.", ) # type: Callable[..., Option] client_cert = partial( - Option, + PipOption, '--client-cert', dest='client_cert', - type='str', + type='path', default=None, metavar='path', help="Path to SSL client certificate, a single file containing the " @@ -305,7 +315,7 @@ index_url = partial( dest='index_url', metavar='URL', default=PyPI.simple_url, - help="Base URL of Python Package Index (default %default). " + help="Base URL of the Python Package Index (default %default). " "This should point to a repository compliant with PEP 503 " "(the simple repository API) or a local directory laid out " "in the same format.", @@ -313,6 +323,7 @@ index_url = partial( def extra_index_url(): + # type: () -> Option return Option( '--extra-index-url', dest='extra_index_urls', @@ -343,9 +354,11 @@ def find_links(): action='append', default=[], metavar='url', - help="If a url or path to an html file, then parse for links to " - "archives. If a local path or file:// url that's a directory, " - "then look for archives in the directory listing.", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", ) @@ -357,8 +370,8 @@ def trusted_host(): action="append", metavar="HOSTNAME", default=[], - help="Mark this host as trusted, even though it does not have valid " - "or any HTTPS.", + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", ) @@ -401,12 +414,21 @@ def editable(): ) +def _handle_src(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + src = partial( - Option, + PipOption, '--src', '--source', '--source-dir', '--source-directory', dest='src_dir', + type='path', metavar='dir', - default=src_prefix, + default=get_src_prefix(), + action='callback', + callback=_handle_src, help='Directory to check out editable projects into. ' 'The default in a virtualenv is "/src". ' 'The default for global installs is "/src".' @@ -442,12 +464,12 @@ def no_binary(): "--no-binary", dest="format_control", action="callback", callback=_handle_no_binary, type="str", default=format_control, - help="Do not use binary packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all binary packages, :none: to empty the set, or one or " - "more package names with commas between them. Note that some " - "packages are tricky to compile and may fail to install when " - "this option is used on them.", + help='Do not use binary packages. Can be supplied multiple times, and ' + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + 'the colons), or one or more package names with commas between ' + 'them (no colons). Note that some packages are tricky to compile ' + 'and may fail to install when this option is used on them.', ) @@ -458,12 +480,12 @@ def only_binary(): "--only-binary", dest="format_control", action="callback", callback=_handle_only_binary, type="str", default=format_control, - help="Do not use source packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all source packages, :none: to empty the set, or one or " - "more package names with commas between them. Packages without " - "binary distributions will fail to install when this option is " - "used on them.", + help='Do not use source packages. Can be supplied multiple times, and ' + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + 'or more package names with commas between them. Packages ' + 'without binary distributions will fail to install when this ' + 'option is used on them.', ) @@ -478,18 +500,69 @@ platform = partial( ) # type: Callable[..., Option] +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value): + # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split('.') + if len(parts) > 3: + return ((), 'at most three version parts are allowed') + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), 'each version part must be an integer') + + return (version_info, None) + + +def _handle_python_version(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = ( + 'invalid --python-version value: {!r}: {}'.format( + value, error_msg, + ) + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + python_version = partial( Option, '--python-version', dest='python_version', metavar='python_version', + action='callback', + callback=_handle_python_version, type='str', default=None, - help=("Only use wheels compatible with Python " - "interpreter version . If not specified, then the " - "current system interpreter minor version is used. A major " - "version (e.g. '2') can be specified to match all " - "minor revs of that major version. A minor version " - "(e.g. '34') can also be specified."), + help=dedent("""\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """), ) # type: Callable[..., Option] @@ -522,6 +595,26 @@ abi = partial( ) # type: Callable[..., Option] +def add_target_python_options(cmd_opts): + # type: (OptionGroup) -> None + cmd_opts.add_option(platform()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abi()) + + +def make_target_python(options): + # type: (Values) -> TargetPython + target_python = TargetPython( + platform=options.platform, + py_version_info=options.python_version, + abi=options.abi, + implementation=options.implementation, + ) + + return target_python + + def prefer_binary(): # type: () -> Option return Option( @@ -534,16 +627,18 @@ def prefer_binary(): cache_dir = partial( - Option, + PipOption, "--cache-dir", dest="cache_dir", default=USER_CACHE_DIR, metavar="dir", + type='path', help="Store the cache data in ." ) # type: Callable[..., Option] -def no_cache_dir_callback(option, opt, value, parser): +def _handle_no_cache_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-cache-dir option. @@ -575,7 +670,7 @@ no_cache = partial( "--no-cache-dir", dest="cache_dir", action="callback", - callback=no_cache_dir_callback, + callback=_handle_no_cache_dir, help="Disable the cache.", ) # type: Callable[..., Option] @@ -588,12 +683,24 @@ no_deps = partial( help="Don't install package dependencies.", ) # type: Callable[..., Option] + +def _handle_build_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + if value: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + build_dir = partial( - Option, + PipOption, '-b', '--build', '--build-dir', '--build-directory', dest='build_dir', + type='path', metavar='dir', - help='Directory to unpack packages into and build in. Note that ' + action='callback', + callback=_handle_build_dir, + help='(DEPRECATED) ' + 'Directory to unpack packages into and build in. Note that ' 'an initial build still takes place in a temporary directory. ' 'The location of temporary directories can be controlled by setting ' 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' @@ -620,7 +727,8 @@ no_build_isolation = partial( ) # type: Callable[..., Option] -def no_use_pep517_callback(option, opt, value, parser): +def _handle_no_use_pep517(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-use-pep517 option. @@ -658,7 +766,7 @@ no_use_pep517 = partial( '--no-use-pep517', dest='use_pep517', action='callback', - callback=no_use_pep517_callback, + callback=_handle_no_use_pep517, default=None, help=SUPPRESS_HELP ) # type: Any @@ -714,31 +822,21 @@ disable_pip_version_check = partial( ) # type: Callable[..., Option] -# Deprecated, Remove later -always_unzip = partial( - Option, - '-Z', '--always-unzip', - dest='always_unzip', - action='store_true', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - - -def _merge_hash(option, opt_str, value, parser): +def _handle_merge_hash(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None """Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.""" if not parser.values.hashes: - parser.values.hashes = {} # type: ignore + parser.values.hashes = {} try: algo, digest = value.split(':', 1) except ValueError: - parser.error('Arguments to %s must be a hash name ' - 'followed by a value, like --hash=sha256:abcde...' % - opt_str) + parser.error('Arguments to {} must be a hash name ' # noqa + 'followed by a value, like --hash=sha256:' + 'abcde...'.format(opt_str)) if algo not in STRONG_HASHES: - parser.error('Allowed hash algorithms for %s are %s.' % - (opt_str, ', '.join(STRONG_HASHES))) + parser.error('Allowed hash algorithms for {} are {}.'.format( # noqa + opt_str, ', '.join(STRONG_HASHES))) parser.values.hashes.setdefault(algo, []).append(digest) @@ -749,7 +847,7 @@ hash = partial( # __dict__ copying in process_line(). dest='hashes', action='callback', - callback=_merge_hash, + callback=_handle_merge_hash, type='string', help="Verify that the package's archive matches this " 'hash before installing. Example: --hash=sha256:abcdef...', @@ -768,6 +866,71 @@ require_hashes = partial( ) # type: Callable[..., Option] +list_path = partial( + PipOption, + '--path', + dest='path', + type='path', + action='append', + help='Restrict to the specified installation path for listing ' + 'packages (can be used multiple times).' +) # type: Callable[..., Option] + + +def check_list_path_option(options): + # type: (Values) -> None + if options.path and (options.user or options.local): + raise CommandError( + "Cannot combine '--path' with '--user' or '--local'" + ) + + +no_python_version_warning = partial( + Option, + '--no-python-version-warning', + dest='no_python_version_warning', + action='store_true', + default=False, + help='Silence deprecation warnings for upcoming unsupported Pythons.', +) # type: Callable[..., Option] + + +unstable_feature = partial( + Option, + '--unstable-feature', + dest='unstable_features', + metavar='feature', + action='append', + default=[], + choices=['resolver'], + help=SUPPRESS_HELP, # TODO: drop this in pip 20.3 +) # type: Callable[..., Option] + +use_new_feature = partial( + Option, + '--use-feature', + dest='features_enabled', + metavar='feature', + action='append', + default=[], + choices=['2020-resolver', 'fast-deps'], + help='Enable new functionality, that may be backward incompatible.', +) # type: Callable[..., Option] + +use_deprecated_feature = partial( + Option, + '--use-deprecated', + dest='deprecated_features_enabled', + metavar='feature', + action='append', + default=[], + choices=[], + help=( + 'Enable deprecated functionality, that will be removed in the future.' + ), +) # type: Callable[..., Option] + + ########## # groups # ########## @@ -786,7 +949,6 @@ general_group = { proxy, retries, timeout, - skip_requirements_regex, exists_action, trusted_host, cert, @@ -795,6 +957,10 @@ general_group = { no_cache, disable_pip_version_check, no_color, + no_python_version_warning, + unstable_feature, + use_new_feature, + use_deprecated_feature, ] } # type: Dict[str, Any] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/command_context.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 00000000..d1a64a77 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,36 @@ +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, ContextManager, TypeVar + + _T = TypeVar('_T', covariant=True) + + +class CommandContextMixIn(object): + def __init__(self): + # type: () -> None + super(CommandContextMixIn, self).__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self): + # type: () -> Iterator[None] + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider): + # type: (ContextManager[_T]) -> _T + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main.py new file mode 100644 index 00000000..172f30dd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,75 @@ +"""Primary application entrypoint. +""" +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + +def main(args=None): + # type: (Optional[List[str]]) -> int + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write("ERROR: {}".format(exc)) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main_parser.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main_parser.py index b17c7492..08c82c1f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main_parser.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/main_parser.py @@ -4,20 +4,18 @@ import os import sys -from pip import __version__ from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, -) -from pip._internal.commands import ( - commands_dict, get_similar_commands, get_summaries, + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, ) +from pip._internal.commands import commands_dict, get_similar_commands from pip._internal.exceptions import CommandError -from pip._internal.utils.misc import get_prog +from pip._internal.utils.misc import get_pip_version, get_prog from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Tuple, List # noqa: F401 + from typing import Tuple, List __all__ = ["create_main_parser", "parse_command"] @@ -39,12 +37,7 @@ def create_main_parser(): parser = ConfigOptionParser(**parser_kw) parser.disable_interspersed_args() - pip_pkg_dir = os.path.abspath(os.path.join( - os.path.dirname(__file__), "..", "..", - )) - parser.version = 'pip %s from %s (python %s)' % ( - __version__, pip_pkg_dir, sys.version[:3], - ) + parser.version = get_pip_version() # add the general options gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) @@ -54,8 +47,10 @@ def create_main_parser(): parser.main = True # type: ignore # create command listing for description - command_summaries = get_summaries() - description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + description = [''] + [ + '{name:27} {command_info.summary}'.format(**locals()) + for name, command_info in commands_dict.items() + ] parser.description = '\n'.join(description) return parser @@ -91,9 +86,9 @@ def parse_command(args): if cmd_name not in commands_dict: guess = get_similar_commands(cmd_name) - msg = ['unknown command "%s"' % cmd_name] + msg = ['unknown command "{}"'.format(cmd_name)] if guess: - msg.append('maybe you meant "%s"' % guess) + msg.append('maybe you meant "{}"'.format(guess)) raise CommandError(' - '.join(msg)) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/parser.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/parser.py index e1eaac42..04e00b72 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/parser.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/parser.py @@ -1,4 +1,8 @@ """Base option parser setup""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -27,14 +31,14 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) def format_option_strings(self, option): - return self._format_option_strings(option, ' <%s>', ', ') + return self._format_option_strings(option) - def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '): """ Return a comma-separated list of option strings and metavars. :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') - :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param mvarfmt: metavar format string :param optsep: separator """ opts = [] @@ -48,7 +52,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): if option.takes_value(): metavar = option.metavar or option.dest.lower() - opts.append(mvarfmt % metavar.lower()) + opts.append(mvarfmt.format(metavar.lower())) return ''.join(opts) @@ -62,7 +66,8 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): Ensure there is only one newline between usage and the first heading if there is no description. """ - msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + msg = '\nUsage: {}\n'.format( + self.indent_lines(textwrap.dedent(usage), " ")) return msg def format_description(self, description): @@ -78,7 +83,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): description = description.rstrip() # dedent, then reindent description = self.indent_lines(textwrap.dedent(description), " ") - description = '%s:\n%s\n' % (label, description) + description = '{}:\n{}\n'.format(label, description) return description else: return '' @@ -146,7 +151,7 @@ class ConfigOptionParser(CustomOptionParser): try: return option.check_value(key, val) except optparse.OptionValueError as exc: - print("An error occurred during configuration: %s" % exc) + print("An error occurred during configuration: {}".format(exc)) sys.exit(3) def _get_ordered_configuration_items(self): @@ -245,7 +250,7 @@ class ConfigOptionParser(CustomOptionParser): def error(self, msg): self.print_usage(sys.stderr) - self.exit(UNKNOWN_ERROR, "%s\n" % msg) + self.exit(UNKNOWN_ERROR, "{}\n".format(msg)) def invalid_config_error_message(action, key, val): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/progress_bars.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 00000000..69338552 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,280 @@ +from __future__ import division + +import itertools +import sys +from signal import SIGINT, default_int_handler, signal + +from pip._vendor import six +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, List + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +def _select_progress_class(preferred, fallback): + # type: (Bar, Bar) -> Bar + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + """ + Save the original SIGINT handler for later. + """ + # https://github.com/python/mypy/issues/5887 + super(InterruptibleMixin, self).__init__( # type: ignore + *args, + **kwargs + ) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + # type: () -> None + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() # type: ignore + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): # type: ignore + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + + def update(self): + # type: () -> None + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # https://github.com/python/mypy/issues/5887 + super(DownloadProgressMixin, self).__init__( # type: ignore + *args, + **kwargs + ) + self.message = (" " * ( + get_indentation() + 2 + )) + self.message # type: str + + @property + def downloaded(self): + # type: () -> str + return format_size(self.index) # type: ignore + + @property + def download_speed(self): + # type: () -> str + # Avoid zero division errors... + if self.avg == 0.0: # type: ignore + return "..." + return format_size(1 / self.avg) + "/s" # type: ignore + + @property + def pretty_eta(self): + # type: () -> str + if self.eta: # type: ignore + return "eta {}".format(self.eta_td) # type: ignore + return "" + + def iter(self, it): # type: ignore + for x in it: + yield x + # B305 is incorrectly raised here + # https://github.com/PyCQA/flake8-bugbear/issues/59 + self.next(len(x)) # noqa: B305 + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: # type: ignore + self.hide_cursor = False + + # https://github.com/python/mypy/issues/5887 + super(WindowsMixin, self).__init__(*args, **kwargs) # type: ignore + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) # type: ignore + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, + _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): + pass + + +class DownloadBar(BaseDownloadProgressBar, + Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, + FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, + BlueEmojiBar): + pass + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + # type: () -> str + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + # type: () -> None + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) +} + + +def DownloadProgressProvider(progress_bar, max=None): # type: ignore + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/req_command.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 00000000..78b5ce6a --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,402 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +from functools import partial + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.download import Downloader +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.temp_dir import tempdir_kinds +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, List, Optional, Tuple + + from pip._internal.cache import WheelCache + from pip._internal.models.target_python import TargetPython + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.resolution.base import BaseResolver + from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + ) + + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + def __init__(self): + # type: () -> None + super(SessionCommandMixin, self).__init__() + self._session = None # Optional[PipSession] + + @classmethod + def _get_index_urls(cls, options): + # type: (Values) -> Optional[List[str]] + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options): + # type: (Values) -> PipSession + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session(self, options, retries=None, timeout=None): + # type: (Values, Optional[int], Optional[int]) -> PipSession + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, 'no_index') + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def with_cleanup(func): + # type: (Any) -> Any + """Decorator for common logic related to managing temporary + directories. + """ + def configure_tempdir_registry(registry): + # type: (TempDirectoryTypeRegistry) -> None + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper(self, options, args): + # type: (RequirementCommand, Values, List[Any]) -> Optional[int] + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + + def __init__(self, *args, **kw): + # type: (Any, Any) -> None + super(RequirementCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def make_requirement_preparer( + temp_build_dir, # type: TempDirectory + options, # type: Values + req_tracker, # type: RequirementTracker + session, # type: PipSession + finder, # type: PackageFinder + use_user_site, # type: bool + download_dir=None, # type: str + wheel_download_dir=None, # type: str + ): + # type: (...) -> RequirementPreparer + """ + Create a RequirementPreparer instance for the given parameters. + """ + downloader = Downloader(session, progress_bar=options.progress_bar) + + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + wheel_download_dir=wheel_download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + downloader=downloader, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + ) + + @staticmethod + def make_resolver( + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + options, # type: Values + wheel_cache=None, # type: Optional[WheelCache] + use_user_site=False, # type: bool + ignore_installed=True, # type: bool + ignore_requires_python=False, # type: bool + force_reinstall=False, # type: bool + upgrade_strategy="to-satisfy-only", # type: str + use_pep517=None, # type: Optional[bool] + py_version_info=None # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> BaseResolver + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if '2020-resolver' in options.features_enabled: + import pip._internal.resolution.resolvelib.resolver + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + lazy_wheel='fast-deps' in options.features_enabled, + ) + import pip._internal.resolution.legacy.resolver + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args, # type: List[str] + options, # type: Values + finder, # type: PackageFinder + session, # type: PipSession + ): + # type: (...) -> List[InstallRequirement] + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements = [] # type: List[InstallRequirement] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, None, isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, + finder=finder, options=options, session=session): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {'name': self.name} + if options.find_links: + raise CommandError( + 'You must give at least one requirement to {name} ' + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=' '.join(options.find_links)))) + else: + raise CommandError( + 'You must give at least one requirement to {name} ' + '(see "pip help {name}")'.format(**opts)) + + return requirements + + @staticmethod + def trace_basic_info(finder): + # type: (PackageFinder) -> None + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options, # type: Values + session, # type: PipSession + target_python=None, # type: Optional[TargetPython] + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> PackageFinder + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/spinners.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 00000000..c6c4c5cd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,173 @@ +from __future__ import absolute_import, division + +import contextlib +import itertools +import logging +import sys +import time + +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, IO + +logger = logging.getLogger(__name__) + + +class SpinnerInterface(object): + def spin(self): + # type: () -> None + raise NotImplementedError() + + def finish(self, final_status): + # type: (str) -> None + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + # type: (str, IO[str], str, float) -> None + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + # type: (str) -> None + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message, min_update_interval_seconds=60): + # type: (str, float) -> None + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + # type: (str) -> None + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._update( + "finished with status '{final_status}'".format(**locals())) + self._finished = True + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + # type: (float) -> None + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 # type: float + + def ready(self): + # type: () -> bool + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + # type: () -> None + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message): + # type: (str) -> Iterator[SpinnerInterface] + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) # type: SpinnerInterface + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +@contextlib.contextmanager +def hidden_cursor(file): + # type: (IO[str]) -> Iterator[None] + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/__init__.py index c7d1da3d..6825fa6e 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/__init__.py @@ -1,57 +1,111 @@ """ Package containing all pip commands """ + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False +# There is currently a bug in python/typeshed mentioned at +# https://github.com/python/typeshed/issues/3906 which causes the +# return type of difflib.get_close_matches to be reported +# as List[Sequence[str]] whereas it should have been List[str] + from __future__ import absolute_import -from pip._internal.commands.completion import CompletionCommand -from pip._internal.commands.configuration import ConfigurationCommand -from pip._internal.commands.download import DownloadCommand -from pip._internal.commands.freeze import FreezeCommand -from pip._internal.commands.hash import HashCommand -from pip._internal.commands.help import HelpCommand -from pip._internal.commands.list import ListCommand -from pip._internal.commands.check import CheckCommand -from pip._internal.commands.search import SearchCommand -from pip._internal.commands.show import ShowCommand -from pip._internal.commands.install import InstallCommand -from pip._internal.commands.uninstall import UninstallCommand -from pip._internal.commands.wheel import WheelCommand +import importlib +from collections import OrderedDict, namedtuple from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Type # noqa: F401 - from pip._internal.cli.base_command import Command # noqa: F401 - -commands_order = [ - InstallCommand, - DownloadCommand, - UninstallCommand, - FreezeCommand, - ListCommand, - ShowCommand, - CheckCommand, - ConfigurationCommand, - SearchCommand, - WheelCommand, - HashCommand, - CompletionCommand, - HelpCommand, -] # type: List[Type[Command]] - -commands_dict = {c.name: c for c in commands_order} - - -def get_summaries(ordered=True): - """Yields sorted (command name, command summary) tuples.""" - - if ordered: - cmditems = _sort_commands(commands_dict, commands_order) - else: - cmditems = commands_dict.items() - - for name, command_class in cmditems: - yield (name, command_class.summary) + from typing import Any + from pip._internal.cli.base_command import Command + + +CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + +# The ordering matters for help display. +# Also, even though the module path starts with the same +# "pip._internal.commands" prefix in each case, we include the full path +# because it makes testing easier (specifically when modifying commands_dict +# in test setup / teardown by adding info for a FakeCommand class defined +# in a test-related module). +# Finally, we need to pass an iterable of pairs here rather than a dict +# so that the ordering won't be lost when using Python 2.7. +commands_dict = OrderedDict([ + ('install', CommandInfo( + 'pip._internal.commands.install', 'InstallCommand', + 'Install packages.', + )), + ('download', CommandInfo( + 'pip._internal.commands.download', 'DownloadCommand', + 'Download packages.', + )), + ('uninstall', CommandInfo( + 'pip._internal.commands.uninstall', 'UninstallCommand', + 'Uninstall packages.', + )), + ('freeze', CommandInfo( + 'pip._internal.commands.freeze', 'FreezeCommand', + 'Output installed packages in requirements format.', + )), + ('list', CommandInfo( + 'pip._internal.commands.list', 'ListCommand', + 'List installed packages.', + )), + ('show', CommandInfo( + 'pip._internal.commands.show', 'ShowCommand', + 'Show information about installed packages.', + )), + ('check', CommandInfo( + 'pip._internal.commands.check', 'CheckCommand', + 'Verify installed packages have compatible dependencies.', + )), + ('config', CommandInfo( + 'pip._internal.commands.configuration', 'ConfigurationCommand', + 'Manage local and global configuration.', + )), + ('search', CommandInfo( + 'pip._internal.commands.search', 'SearchCommand', + 'Search PyPI for packages.', + )), + ('cache', CommandInfo( + 'pip._internal.commands.cache', 'CacheCommand', + "Inspect and manage pip's wheel cache.", + )), + ('wheel', CommandInfo( + 'pip._internal.commands.wheel', 'WheelCommand', + 'Build wheels from your requirements.', + )), + ('hash', CommandInfo( + 'pip._internal.commands.hash', 'HashCommand', + 'Compute hashes of package archives.', + )), + ('completion', CommandInfo( + 'pip._internal.commands.completion', 'CompletionCommand', + 'A helper command used for command completion.', + )), + ('debug', CommandInfo( + 'pip._internal.commands.debug', 'DebugCommand', + 'Show information useful for debugging.', + )), + ('help', CommandInfo( + 'pip._internal.commands.help', 'HelpCommand', + 'Show help for commands.', + )), +]) # type: OrderedDict[str, CommandInfo] + + +def create_command(name, **kwargs): + # type: (str, **Any) -> Command + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command def get_similar_commands(name): @@ -66,14 +120,3 @@ def get_similar_commands(name): return close_commands[0] else: return False - - -def _sort_commands(cmddict, order): - def keyfn(key): - try: - return order.index(key[1]) - except ValueError: - # unordered items should come last - return 0xff - - return sorted(cmddict.items(), key=keyfn) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/cache.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 00000000..747277f6 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,182 @@ +from __future__ import absolute_import + +import logging +import os +import textwrap + +import pip._internal.utils.filesystem as filesystem +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, List + + +logger = logging.getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] + %prog remove + %prog purge + """ + + def run(self, options, args): + # type: (Values, List[Any]) -> int + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not " + "function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + logger.info(options.cache_dir) + + def get_cache_info(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + num_packages = len(self._find_wheels(options, '*')) + + cache_location = self._wheels_cache_dir(options) + cache_size = filesystem.format_directory_size(cache_location) + + message = textwrap.dedent(""" + Location: {location} + Size: {size} + Number of wheels: {package_count} + """).format( + location=cache_location, + package_count=num_packages, + size=cache_size, + ).strip() + + logger.info(message) + + def list_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if args: + pattern = args[0] + else: + pattern = '*' + + files = self._find_wheels(options, pattern) + + if not files: + logger.info('Nothing cached.') + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(' - {} ({})'.format(wheel, size)) + logger.info('Cache contents:\n') + logger.info('\n'.join(sorted(results))) + + def remove_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if not args: + raise CommandError('Please provide a pattern') + + files = self._find_wheels(options, args[0]) + if not files: + raise CommandError('No matching packages') + + for filename in files: + os.unlink(filename) + logger.debug('Removed %s', filename) + logger.info('Files removed: %s', len(files)) + + def purge_cache(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + return self.remove_cache_items(options, ['*']) + + def _wheels_cache_dir(self, options): + # type: (Values) -> str + return os.path.join(options.cache_dir, 'wheels') + + def _find_wheels(self, options, pattern): + # type: (Values, str) -> List[str] + wheel_dir = self._wheels_cache_dir(options) + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/check.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/check.py index 801cecc0..b557ca64 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/check.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/check.py @@ -1,28 +1,37 @@ import logging from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.operations.check import ( - check_package_set, create_package_set_from_installed, + check_package_set, + create_package_set_from_installed, ) +from pip._internal.utils.misc import write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING logger = logging.getLogger(__name__) +if MYPY_CHECK_RUNNING: + from typing import List, Any + from optparse import Values + class CheckCommand(Command): """Verify installed packages have compatible dependencies.""" - name = 'check' + usage = """ %prog [options]""" - summary = 'Verify installed packages have compatible dependencies.' def run(self, options, args): + # type: (Values, List[Any]) -> int + package_set, parsing_probs = create_package_set_from_installed() missing, conflicting = check_package_set(package_set) for project_name in missing: version = package_set[project_name].version for dependency in missing[project_name]: - logger.info( + write_output( "%s %s requires %s, which is not installed.", project_name, version, dependency[0], ) @@ -30,12 +39,13 @@ class CheckCommand(Command): for project_name in conflicting: version = package_set[project_name].version for dep_name, dep_version, req in conflicting[project_name]: - logger.info( + write_output( "%s %s has requirement %s, but you have %s %s.", project_name, version, req, dep_name, dep_version, ) if missing or conflicting or parsing_probs: - return 1 + return ERROR else: - logger.info("No broken requirements found.") + write_output("No broken requirements found.") + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/completion.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/completion.py index 2fcdd393..9b99f51f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/completion.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/completion.py @@ -4,32 +4,38 @@ import sys import textwrap from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS from pip._internal.utils.misc import get_prog +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + from optparse import Values BASE_COMPLETION = """ -# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +# pip {shell} completion start{script}# pip {shell} completion end """ COMPLETION_SCRIPTS = { 'bash': """ _pip_completion() - { - COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ COMP_CWORD=$COMP_CWORD \\ - PIP_AUTO_COMPLETE=1 $1 ) ) - } - complete -o default -F _pip_completion %(prog)s + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} """, 'zsh': """ - function _pip_completion { + function _pip_completion {{ local words cword read -Ac words read -cn cword reply=( $( COMP_WORDS="$words[*]" \\ COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] ) ) - } - compctl -K _pip_completion %(prog)s + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + }} + compctl -K _pip_completion {prog} """, 'fish': """ function __fish_complete_pip @@ -40,55 +46,53 @@ COMPLETION_SCRIPTS = { set -lx PIP_AUTO_COMPLETE 1 string split \\ -- (eval $COMP_WORDS[1]) end - complete -fa "(__fish_complete_pip)" -c %(prog)s + complete -fa "(__fish_complete_pip)" -c {prog} """, } class CompletionCommand(Command): """A helper command to be used for command completion.""" - name = 'completion' - summary = 'A helper command used for command completion.' - ignore_require_venv = True - def __init__(self, *args, **kw): - super(CompletionCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts + ignore_require_venv = True - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '--bash', '-b', action='store_const', const='bash', dest='shell', help='Emit completion code for bash') - cmd_opts.add_option( + self.cmd_opts.add_option( '--zsh', '-z', action='store_const', const='zsh', dest='shell', help='Emit completion code for zsh') - cmd_opts.add_option( + self.cmd_opts.add_option( '--fish', '-f', action='store_const', const='fish', dest='shell', help='Emit completion code for fish') - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int """Prints the completion code of the given shell""" shells = COMPLETION_SCRIPTS.keys() shell_options = ['--' + shell for shell in sorted(shells)] if options.shell in shells: script = textwrap.dedent( - COMPLETION_SCRIPTS.get(options.shell, '') % { - 'prog': get_prog(), - } + COMPLETION_SCRIPTS.get(options.shell, '').format( + prog=get_prog()) ) - print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS else: sys.stderr.write( - 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + 'ERROR: You must pass {}\n' .format(' or '.join(shell_options)) ) + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/configuration.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/configuration.py index 826c08dc..f9b3ab79 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/configuration.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/configuration.py @@ -4,32 +4,45 @@ import subprocess from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.configuration import Configuration, kinds +from pip._internal.configuration import ( + Configuration, + get_configuration_files, + kinds, +) from pip._internal.exceptions import PipError -from pip._internal.locations import venv_config_file -from pip._internal.utils.misc import get_prog +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Any, Optional + from optparse import Values + + from pip._internal.configuration import Kind logger = logging.getLogger(__name__) class ConfigurationCommand(Command): - """Manage local and global configuration. + """ + Manage local and global configuration. - Subcommands: + Subcommands: - list: List the active configuration (or from the file specified) - edit: Edit the configuration file in an editor - get: Get the value associated with name - set: Set the name=value - unset: Unset the value associated with name + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name + - debug: List the configuration files and values defined under them - If none of --user, --global and --venv are passed, a virtual - environment configuration file is used if one is active and the file - exists. Otherwise, all modifications happen on the to the user file by - default. + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen on the to the user file by + default. """ - name = 'config' + ignore_require_venv = True usage = """ %prog [] list %prog [] [--editor ] edit @@ -37,15 +50,11 @@ class ConfigurationCommand(Command): %prog [] get name %prog [] set name value %prog [] unset name + %prog [] debug """ - summary = "Manage local and global configuration." - - def __init__(self, *args, **kwargs): - super(ConfigurationCommand, self).__init__(*args, **kwargs) - - self.configuration = None - + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '--editor', dest='editor', @@ -74,28 +83,31 @@ class ConfigurationCommand(Command): ) self.cmd_opts.add_option( - '--venv', - dest='venv_file', + '--site', + dest='site_file', action='store_true', default=False, - help='Use the virtualenv configuration file only' + help='Use the current environment configuration file only' ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int handlers = { "list": self.list_values, "edit": self.open_in_editor, "get": self.get_name, "set": self.set_name_value, - "unset": self.unset_name + "unset": self.unset_name, + "debug": self.list_config_values, } # Determine action if not args or args[0] not in handlers: - logger.error("Need an action ({}) to perform.".format( - ", ".join(sorted(handlers))) + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), ) return ERROR @@ -127,54 +139,97 @@ class ConfigurationCommand(Command): return SUCCESS def _determine_file(self, options, need_value): - file_options = { - kinds.USER: options.user_file, - kinds.GLOBAL: options.global_file, - kinds.VENV: options.venv_file - } - - if sum(file_options.values()) == 0: + # type: (Values, bool) -> Optional[Kind] + file_options = [key for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) if value] + + if not file_options: if not need_value: return None - # Default to user, unless there's a virtualenv file. - elif os.path.exists(venv_config_file): - return kinds.VENV + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE else: return kinds.USER - elif sum(file_options.values()) == 1: - # There's probably a better expression for this. - return [key for key in file_options if file_options[key]][0] + elif len(file_options) == 1: + return file_options[0] raise PipError( "Need exactly one file to operate upon " - "(--user, --venv, --global) to perform." + "(--user, --site, --global) to perform." ) def list_values(self, options, args): + # type: (Values, List[str]) -> None self._get_n_args(args, "list", n=0) for key, value in sorted(self.configuration.items()): - logger.info("%s=%r", key, value) + write_output("%s=%r", key, value) def get_name(self, options, args): + # type: (Values, List[str]) -> None key = self._get_n_args(args, "get [name]", n=1) value = self.configuration.get_value(key) - logger.info("%s", value) + write_output("%s", value) def set_name_value(self, options, args): + # type: (Values, List[str]) -> None key, value = self._get_n_args(args, "set [name] [value]", n=2) self.configuration.set_value(key, value) self._save_configuration() def unset_name(self, options, args): + # type: (Values, List[str]) -> None key = self._get_n_args(args, "unset [name]", n=1) self.configuration.unset_value(key) self._save_configuration() + def list_config_values(self, options, args): + # type: (Values, List[str]) -> None + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", + fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant): + # type: (Kind) -> None + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.\ + get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self): + # type: () -> None + """Get key-values pairs present as environment variables""" + write_output("%s:", 'env_var') + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = 'PIP_{}'.format(key.upper()) + write_output("%s=%r", env_var, value) + def open_in_editor(self, options, args): + # type: (Values, List[str]) -> None editor = self._determine_editor(options) fname = self.configuration.get_file_to_edit() @@ -190,6 +245,7 @@ class ConfigurationCommand(Command): ) def _get_n_args(self, args, example, n): + # type: (List[str], str, int) -> Any """Helper to make sure the command got the right number of arguments """ if len(args) != n: @@ -205,18 +261,19 @@ class ConfigurationCommand(Command): return args def _save_configuration(self): + # type: () -> None # We successfully ran a modifying command. Need to save the # configuration. try: self.configuration.save() except Exception: - logger.error( - "Unable to save configuration. Please report this as a bug.", - exc_info=1 + logger.exception( + "Unable to save configuration. Please report this as a bug." ) raise PipError("Internal Error.") def _determine_editor(self, options): + # type: (Values) -> str if options.editor is not None: return options.editor elif "VISUAL" in os.environ: diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/debug.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 00000000..ff369d7d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,229 @@ +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +import pip._vendor +from pip._vendor import pkg_resources +from pip._vendor.certifi import where + +from pip import __file__ as pip_location +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from types import ModuleType + from typing import List, Optional, Dict + from optparse import Values + from pip._internal.configuration import Configuration + +logger = logging.getLogger(__name__) + + +def show_value(name, value): + # type: (str, Optional[str]) -> None + logger.info('%s: %s', name, value) + + +def show_sys_implementation(): + # type: () -> None + logger.info('sys.implementation:') + if hasattr(sys, 'implementation'): + implementation = sys.implementation # type: ignore + implementation_name = implementation.name + else: + implementation_name = '' + + with indent_log(): + show_value('name', implementation_name) + + +def create_vendor_txt_map(): + # type: () -> Dict[str, str] + vendor_txt_path = os.path.join( + os.path.dirname(pip_location), + '_vendor', + 'vendor.txt' + ) + + with open(vendor_txt_path) as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [line.strip().split(' ', 1)[0] + for line in f.readlines() if '==' in line] + + # Transform into "module" -> version dict. + return dict(line.split('==', 1) for line in lines) # type: ignore + + +def get_module_from_module_name(module_name): + # type: (str) -> ModuleType + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower() + # PATCH: setuptools is actually only pkg_resources. + if module_name == 'setuptools': + module_name = 'pkg_resources' + + __import__( + 'pip._vendor.{}'.format(module_name), + globals(), + locals(), + level=0 + ) + return getattr(pip._vendor, module_name) + + +def get_vendor_version_from_module(module_name): + # type: (str) -> Optional[str] + module = get_module_from_module_name(module_name) + version = getattr(module, '__version__', None) + + if not version: + # Try to find version in debundled module info + # The type for module.__file__ is Optional[str] in + # Python 2, and str in Python 3. The type: ignore is + # added to account for Python 2, instead of a cast + # and should be removed once we drop Python 2 support + pkg_set = pkg_resources.WorkingSet( + [os.path.dirname(module.__file__)] # type: ignore + ) + package = pkg_set.find(pkg_resources.Requirement.parse(module_name)) + version = getattr(package, 'version', None) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions): + # type: (Dict[str, str]) -> None + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = '' + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ' (Unable to locate actual module version, using'\ + ' vendor.txt specified version)' + actual_version = expected_version + elif actual_version != expected_version: + extra_message = ' (CONFLICT: vendor.txt suggests version should'\ + ' be {})'.format(expected_version) + logger.info('%s==%s%s', module_name, actual_version, extra_message) + + +def show_vendor_versions(): + # type: () -> None + logger.info('vendored library versions:') + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options): + # type: (Values) -> None + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = '' + if formatted_target: + suffix = ' (target: {})'.format(formatted_target) + + msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + '...\n' + '[First {tag_limit} tags shown. Pass --verbose to show all.]' + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config): + # type: (Configuration) -> str + levels = set() + for key, _ in config.items(): + levels.add(key.split('.')[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ['install', 'wheel', 'download'] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return 'global' + + if 'global' in levels: + levels.remove('global') + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self): + # type: () -> None + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options, args): + # type: (Values, List[str]) -> int + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value('pip version', get_pip_version()) + show_value('sys.version', sys.version) + show_value('sys.executable', sys.executable) + show_value('sys.getdefaultencoding', sys.getdefaultencoding()) + show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) + show_value( + 'locale.getpreferredencoding', locale.getpreferredencoding(), + ) + show_value('sys.platform', sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE')) + show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE')) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/download.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/download.py index a57e4bc4..46e83712 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/download.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/download.py @@ -4,14 +4,17 @@ import logging import os from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List logger = logging.getLogger(__name__) @@ -28,7 +31,6 @@ class DownloadCommand(RequirementCommand): pip also supports downloading from "requirements files", which provide an easy way to specify a whole environment to be downloaded. """ - name = 'download' usage = """ %prog [options] [package-index-options] ... @@ -37,31 +39,25 @@ class DownloadCommand(RequirementCommand): %prog [options] ... %prog [options] ...""" - summary = 'Download packages.' - - def __init__(self, *args, **kw): - super(DownloadCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.pre()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option( '-d', '--dest', '--destination-dir', '--destination-directory', dest='download_dir', metavar='dir', @@ -69,10 +65,7 @@ class DownloadCommand(RequirementCommand): help=("Download packages into ."), ) - cmd_opts.add_option(cmdoptions.platform()) - cmd_opts.add_option(cmdoptions.python_version()) - cmd_opts.add_option(cmdoptions.implementation()) - cmd_opts.add_option(cmdoptions.abi()) + cmdoptions.add_target_python_options(self.cmd_opts) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -80,97 +73,71 @@ class DownloadCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int + options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] - if options.python_version: - python_versions = [options.python_version] - else: - python_versions = None - cmdoptions.check_dist_restriction(options) - options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) - with self._build_session(options) as session: - finder = self._build_package_finder( - options=options, - session=session, - platform=options.platform, - python_versions=python_versions, - abi=options.abi, - implementation=options.implementation, - ) - build_delete = (not (options.no_clean or options.build_dir)) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="download" - ) as directory: - - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) - self.populate_requirement_set( - requirement_set, - args, - options, - finder, - session, - self.name, - None - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=options.download_dir, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=None, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=False, - ignore_installed=True, - isolated=options.isolated_mode, - ) - resolver.resolve(requirement_set) - - downloaded = ' '.join([ - req.name for req in requirement_set.successfully_downloaded - ]) - if downloaded: - logger.info('Successfully downloaded %s', downloaded) - - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - - return requirement_set + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + downloaded = ' '.join([req.name # type: ignore + for req in requirement_set.requirements.values() + if req.successfully_downloaded]) + if downloaded: + write_output('Successfully downloaded %s', downloaded) + + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/freeze.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/freeze.py index dc9c53a6..2071fbab 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/freeze.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/freeze.py @@ -3,13 +3,20 @@ from __future__ import absolute_import import sys from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS from pip._internal.models.format_control import FormatControl from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List + class FreezeCommand(Command): """ @@ -17,15 +24,13 @@ class FreezeCommand(Command): packages are listed in a case-insensitive sorted order. """ - name = 'freeze' + usage = """ %prog [options]""" - summary = 'Output installed packages in requirements format.' log_streams = ("ext://sys.stderr", "ext://sys.stderr") - def __init__(self, *args, **kw): - super(FreezeCommand, self).__init__(*args, **kw) - + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', @@ -56,12 +61,13 @@ class FreezeCommand(Command): action='store_true', default=False, help='Only output packages installed in user-site.') + self.cmd_opts.add_option(cmdoptions.list_path()) self.cmd_opts.add_option( '--all', dest='freeze_all', action='store_true', help='Do not skip these packages in the output:' - ' %s' % ', '.join(DEV_PKGS)) + ' {}'.format(', '.join(DEV_PKGS))) self.cmd_opts.add_option( '--exclude-editable', dest='exclude_editable', @@ -71,26 +77,27 @@ class FreezeCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int format_control = FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) skip = set(stdlib_pkgs) if not options.freeze_all: skip.update(DEV_PKGS) + cmdoptions.check_list_path_option(options) + freeze_kwargs = dict( requirement=options.requirements, find_links=options.find_links, local_only=options.local, user_only=options.user, - skip_regex=options.skip_requirements_regex, + paths=options.path, isolated=options.isolated_mode, wheel_cache=wheel_cache, skip=skip, exclude_editable=options.exclude_editable, ) - try: - for line in freeze(**freeze_kwargs): - sys.stdout.write(line + '\n') - finally: - wheel_cache.cleanup() + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/hash.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/hash.py index 423440e9..37831c39 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/hash.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/hash.py @@ -5,9 +5,14 @@ import logging import sys from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR +from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES -from pip._internal.utils.misc import read_chunks +from pip._internal.utils.misc import read_chunks, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List logger = logging.getLogger(__name__) @@ -18,37 +23,38 @@ class HashCommand(Command): These can be used with --hash in a requirements file to do repeatable installs. - """ - name = 'hash' + usage = '%prog [options] ...' - summary = 'Compute hashes of package archives.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(HashCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-a', '--algorithm', dest='algorithm', choices=STRONG_HASHES, action='store', default=FAVORITE_HASH, - help='The hash algorithm to use: one of %s' % - ', '.join(STRONG_HASHES)) + help='The hash algorithm to use: one of {}'.format( + ', '.join(STRONG_HASHES))) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: self.parser.print_usage(sys.stderr) return ERROR algorithm = options.algorithm for path in args: - logger.info('%s:\n--hash=%s:%s', - path, algorithm, _hash_of_file(path, algorithm)) + write_output('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + return SUCCESS def _hash_of_file(path, algorithm): + # type: (str, str) -> str """Return the hash digest of a file.""" with open(path, 'rb') as archive: hash = hashlib.new(algorithm) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/help.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/help.py index 49a81cbb..a2edc298 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/help.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/help.py @@ -3,18 +3,25 @@ from __future__ import absolute_import from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import CommandError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + from optparse import Values class HelpCommand(Command): """Show help for commands""" - name = 'help' + usage = """ %prog """ - summary = 'Show help for commands.' ignore_require_venv = True def run(self, options, args): - from pip._internal.commands import commands_dict, get_similar_commands + # type: (Values, List[str]) -> int + from pip._internal.commands import ( + commands_dict, create_command, get_similar_commands, + ) try: # 'pip help' with no args is handled by pip.__init__.parseopt() @@ -25,13 +32,13 @@ class HelpCommand(Command): if cmd_name not in commands_dict: guess = get_similar_commands(cmd_name) - msg = ['unknown command "%s"' % cmd_name] + msg = ['unknown command "{}"'.format(cmd_name)] if guess: - msg.append('maybe you meant "%s"' % guess) + msg.append('maybe you meant "{}"'.format(guess)) raise CommandError(' - '.join(msg)) - command = commands_dict[cmd_name]() + command = create_command(cmd_name) command.parser.print_help() return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/install.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/install.py index 1c244d23..8c2c32fd 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/install.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/install.py @@ -5,34 +5,64 @@ import logging import operator import os import shutil +import site from optparse import SUPPRESS_HELP from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.cli.status_codes import ERROR -from pip._internal.exceptions import ( - CommandError, InstallationError, PreviousBuildDirError, -) -from pip._internal.locations import distutils_scheme, virtualenv_no_global +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import distutils_scheme from pip._internal.operations.check import check_install_conflicts -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet, install_given_reqs -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver -from pip._internal.utils.filesystem import check_path_owner +from pip._internal.req import install_given_reqs +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.datetime import today_is_later_than +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.misc import ( - ensure_dir, get_installed_version, + ensure_dir, + get_installed_version, + get_pip_version, protect_pip_from_modification_on_windows, + write_output, ) from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel import WheelBuilder +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import virtualenv_no_global +from pip._internal.wheel_builder import build, should_build_for_install_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Iterable, List, Optional + + from pip._internal.models.format_control import FormatControl + from pip._internal.operations.check import ConflictDetails + from pip._internal.req.req_install import InstallRequirement + from pip._internal.wheel_builder import BinaryAllowedPredicate + logger = logging.getLogger(__name__) +def get_check_binary_allowed(format_control): + # type: (FormatControl) -> BinaryAllowedPredicate + def check_binary_allowed(req): + # type: (InstallRequirement) -> bool + if req.use_pep517: + return True + canonical_name = canonicalize_name(req.name) + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + class InstallCommand(RequirementCommand): """ Install packages from: @@ -45,7 +75,6 @@ class InstallCommand(RequirementCommand): pip also supports installing from "requirements files", which provide an easy way to specify a whole environment to be installed. """ - name = 'install' usage = """ %prog [options] [package-index-options] ... @@ -54,20 +83,15 @@ class InstallCommand(RequirementCommand): %prog [options] [-e] ... %prog [options] ...""" - summary = 'Install packages.' + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) - def __init__(self, *args, **kw): - super(InstallCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.pre()) - - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( '-t', '--target', dest='target_dir', metavar='dir', @@ -77,12 +101,9 @@ class InstallCommand(RequirementCommand): '. Use --upgrade to replace existing packages in ' 'with new versions.' ) - cmd_opts.add_option(cmdoptions.platform()) - cmd_opts.add_option(cmdoptions.python_version()) - cmd_opts.add_option(cmdoptions.implementation()) - cmd_opts.add_option(cmdoptions.abi()) + cmdoptions.add_target_python_options(self.cmd_opts) - cmd_opts.add_option( + self.cmd_opts.add_option( '--user', dest='use_user_site', action='store_true', @@ -90,19 +111,19 @@ class InstallCommand(RequirementCommand): "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " "for full details.)") - cmd_opts.add_option( + self.cmd_opts.add_option( '--no-user', dest='use_user_site', action='store_false', help=SUPPRESS_HELP) - cmd_opts.add_option( + self.cmd_opts.add_option( '--root', dest='root_path', metavar='dir', default=None, help="Install everything relative to this alternate root " "directory.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--prefix', dest='prefix_path', metavar='dir', @@ -110,11 +131,11 @@ class InstallCommand(RequirementCommand): help="Installation prefix where lib, bin and other top-level " "folders are placed") - cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option( + self.cmd_opts.add_option( '-U', '--upgrade', dest='upgrade', action='store_true', @@ -123,7 +144,7 @@ class InstallCommand(RequirementCommand): 'upgrade-strategy used.' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--upgrade-strategy', dest='upgrade_strategy', default='only-if-needed', @@ -137,28 +158,32 @@ class InstallCommand(RequirementCommand): 'satisfy the requirements of the upgraded package(s).' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--force-reinstall', dest='force_reinstall', action='store_true', help='Reinstall all packages even if they are already ' 'up-to-date.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-I', '--ignore-installed', dest='ignore_installed', action='store_true', - help='Ignore the installed packages (reinstalling instead).') + help='Ignore the installed packages, overwriting them. ' + 'This can break your system if the existing package ' + 'is of a different version or was installed ' + 'with a different package manager!' + ) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.install_options()) - cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option( + self.cmd_opts.add_option( "--compile", action="store_true", dest="compile", @@ -166,21 +191,21 @@ class InstallCommand(RequirementCommand): help="Compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-compile", action="store_false", dest="compile", help="Do not compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-script-location", action="store_false", dest="warn_script_location", default=True, help="Do not warn when installing scripts outside PATH", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-conflicts", action="store_false", dest="warn_about_conflicts", @@ -188,12 +213,11 @@ class InstallCommand(RequirementCommand): help="Do not warn about broken dependencies", ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -201,41 +225,34 @@ class InstallCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - cmdoptions.check_dist_restriction(options, check_target=True) - if options.python_version: - python_versions = [options.python_version] - else: - python_versions = None - - options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] - if options.use_user_site: - if options.prefix_path: - raise CommandError( - "Can not combine '--user' and '--prefix' as they imply " - "different installation locations" - ) - if virtualenv_no_global(): - raise InstallationError( - "Can not perform a '--user' install. User site-packages " - "are not visible in this virtualenv." - ) - install_options.append('--user') - install_options.append('--prefix=') - target_temp_dir = TempDirectory(kind="target") + logger.debug("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir = None # type: Optional[TempDirectory] + target_temp_dir_path = None # type: Optional[str] if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) @@ -247,290 +264,471 @@ class InstallCommand(RequirementCommand): ) # Create a target directory for using with the target option - target_temp_dir.create() - install_options.append('--home=' + target_temp_dir.path) + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) global_options = options.global_options or [] - with self._build_session(options) as session: - finder = self._build_package_finder( + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + reject_location_related_install_options( + reqs, options.install_options + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, options=options, + req_tracker=req_tracker, session=session, - platform=options.platform, - python_versions=python_versions, - abi=options.abi, - implementation=options.implementation, + finder=finder, + use_user_site=options.use_user_site, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, ) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="install" - ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - check_supported_wheels=not options.target_dir, - ) - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) + self.trace_basic_info(finder) - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=options.use_user_site, - upgrade_strategy=upgrade_strategy, - force_reinstall=options.force_reinstall, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=options.ignore_installed, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) - protect_pip_from_modification_on_windows( - modifying_pip=requirement_set.has_requirement("pip") - ) + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows( + modifying_pip=modifying_pip + ) - # Consider legacy and PEP517-using requirements separately - legacy_requirements = [] - pep517_requirements = [] - for req in requirement_set.requirements.values(): - if req.use_pep517: - pep517_requirements.append(req) - else: - legacy_requirements.append(req) - - # We don't build wheels for legacy requirements if we - # don't have wheel installed or we don't have a cache dir - try: - import wheel # noqa: F401 - build_legacy = bool(options.cache_dir) - except ImportError: - build_legacy = False - - wb = WheelBuilder( - finder, preparer, wheel_cache, - build_options=[], global_options=[], - ) + check_binary_allowed = get_check_binary_allowed( + finder.format_control + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_install_command( + r, check_binary_allowed + ) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=[], + global_options=[], + ) - # Always build PEP 517 requirements - build_failures = wb.build( - pep517_requirements, - session=session, autobuilding=True + # If we're using PEP 517, we cannot do a direct install + # so we fail here. + pep517_build_failure_names = [ + r.name # type: ignore + for r in build_failures if r.use_pep517 + ] # type: List[str] + if pep517_build_failure_names: + raise InstallationError( + "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( + ", ".join(pep517_build_failure_names) ) + ) - if build_legacy: - # We don't care about failures building legacy - # requirements, as we'll fall through to a direct - # install for those. - wb.build( - legacy_requirements, - session=session, autobuilding=True + # For now, we just warn about failures building legacy + # requirements, as we'll fall through to a direct + # install for those. + legacy_build_failure_names = [ + r.name # type: ignore + for r in build_failures if not r.use_pep517 + ] # type: List[str] + if legacy_build_failure_names: + deprecated( + reason=( + "Could not build wheels for {} which do not use " + "PEP 517. pip will fall back to legacy 'setup.py " + "install' for these.".format( + ", ".join(legacy_build_failure_names) ) + ), + replacement="to fix the wheel build issue reported above", + gone_in="21.0", + issue=8368, + ) - # If we're using PEP 517, we cannot do a direct install - # so we fail here. - if build_failures: - raise InstallationError( - "Could not build wheels for {} which use" - " PEP 517 and cannot be installed directly".format( - ", ".join(r.name for r in build_failures))) + to_install = resolver.get_installation_order( + requirement_set + ) - to_install = resolver.get_installation_order( - requirement_set - ) + # Check for conflicts in the package set we're installing. + conflicts = None # type: Optional[ConflictDetails] + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) - # Consistency Checking of the package set we're installing. - should_warn_about_conflicts = ( - not options.ignore_dependencies and - options.warn_about_conflicts - ) - if should_warn_about_conflicts: - self._warn_about_conflicts(to_install) - - # Don't warn about script install locations if - # --target has been specified - warn_script_location = options.warn_script_location - if options.target_dir: - warn_script_location = False - - installed = install_given_reqs( - to_install, - install_options, - global_options, - root=options.root_path, - home=target_temp_dir.path, - prefix=options.prefix_path, - pycompile=options.compile, - warn_script_location=warn_script_location, - use_user_site=options.use_user_site, - ) + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + working_set = pkg_resources.WorkingSet(lib_locations) - lib_locations = get_lib_location_guesses( - user=options.use_user_site, - home=target_temp_dir.path, - root=options.root_path, - prefix=options.prefix_path, - isolated=options.isolated_mode, - ) - working_set = pkg_resources.WorkingSet(lib_locations) - - reqs = sorted(installed, key=operator.attrgetter('name')) - items = [] - for req in reqs: - item = req.name - try: - installed_version = get_installed_version( - req.name, working_set=working_set - ) - if installed_version: - item += '-' + installed_version - except Exception: - pass - items.append(item) - installed = ' '.join(items) - if installed: - logger.info('Successfully installed %s', installed) - except EnvironmentError as error: - show_traceback = (self.verbosity >= 1) - - message = create_env_error_message( - error, show_traceback, options.use_user_site, + installed.sort(key=operator.attrgetter('name')) + items = [] + for result in installed: + item = result.name + try: + installed_version = get_installed_version( + result.name, working_set=working_set ) - logger.error(message, exc_info=show_traceback) - - return ERROR - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + if installed_version: + item += '-' + installed_version + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + new_resolver='2020-resolver' in options.features_enabled, + ) + + installed_desc = ' '.join(items) + if installed_desc: + write_output( + 'Successfully installed %s', installed_desc, + ) + except EnvironmentError as error: + show_traceback = (self.verbosity >= 1) + + message = create_env_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) # noqa + + return ERROR if options.target_dir: + assert target_temp_dir self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) - return requirement_set + + return SUCCESS def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): + # type: (str, TempDirectory, bool) -> None ensure_dir(target_dir) # Checking both purelib and platlib directories for installed # packages to be moved to target directory lib_dir_list = [] - with target_temp_dir: - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - scheme = distutils_scheme('', home=target_temp_dir.path) - purelib_dir = scheme['purelib'] - platlib_dir = scheme['platlib'] - data_dir = scheme['data'] - - if os.path.exists(purelib_dir): - lib_dir_list.append(purelib_dir) - if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: - lib_dir_list.append(platlib_dir) - if os.path.exists(data_dir): - lib_dir_list.append(data_dir) - - for lib_dir in lib_dir_list: - for item in os.listdir(lib_dir): - if lib_dir == data_dir: - ddir = os.path.join(data_dir, item) - if any(s.startswith(ddir) for s in lib_dir_list[:-1]): - continue - target_item_dir = os.path.join(target_dir, item) - if os.path.exists(target_item_dir): - if not upgrade: - logger.warning( - 'Target directory %s already exists. Specify ' - '--upgrade to force replacement.', - target_item_dir - ) - continue - if os.path.islink(target_item_dir): - logger.warning( - 'Target directory %s already exists and is ' - 'a link. Pip will not automatically replace ' - 'links, please remove if replacement is ' - 'desired.', - target_item_dir - ) - continue - if os.path.isdir(target_item_dir): - shutil.rmtree(target_item_dir) - else: - os.remove(target_item_dir) - - shutil.move( - os.path.join(lib_dir, item), - target_item_dir - ) + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = distutils_scheme('', home=target_temp_dir.path) + purelib_dir = scheme['purelib'] + platlib_dir = scheme['platlib'] + data_dir = scheme['data'] + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) - def _warn_about_conflicts(self, to_install): + def _determine_conflicts(self, to_install): + # type: (List[InstallRequirement]) -> Optional[ConflictDetails] try: - package_set, _dep_info = check_install_conflicts(to_install) + return check_install_conflicts(to_install) except Exception: - logger.error("Error checking for conflicts.", exc_info=True) + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts(self, conflict_details, new_resolver): + # type: (ConflictDetails, bool) -> None + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: return - missing, conflicting = _dep_info - # NOTE: There is some duplication here from pip check + parts = [] # type: List[str] + if not new_resolver: + parts.append( + "After October 2020 you may experience errors when installing " + "or updating packages. This is because pip will change the " + "way that it resolves dependency conflicts.\n" + ) + parts.append( + "We recommend you use --use-feature=2020-resolver to test " + "your packages with the new resolver before it becomes the " + "default.\n" + ) + elif not today_is_later_than(year=2020, month=7, day=31): + # NOTE: trailing newlines here are intentional + parts.append( + "Pip will install or upgrade your package(s) and its " + "dependencies without taking into account other packages you " + "already have installed. This may cause an uncaught " + "dependency conflict.\n" + ) + form_link = "https://forms.gle/cWKMoDs8sUVE29hz9" + parts.append( + "If you would like pip to take your other packages into " + "account, please tell us here: {}\n".format(form_link) + ) + + # NOTE: There is some duplication here, with commands/check.py for project_name in missing: version = package_set[project_name][0] for dependency in missing[project_name]: - logger.critical( - "%s %s requires %s, which is not installed.", - project_name, version, dependency[1], + message = ( + "{name} {version} requires {requirement}, " + "which is not installed." + ).format( + name=project_name, + version=version, + requirement=dependency[1], ) + parts.append(message) for project_name in conflicting: version = package_set[project_name][0] for dep_name, dep_version, req in conflicting[project_name]: - logger.critical( - "%s %s has requirement %s, but you'll have %s %s which is " - "incompatible.", - project_name, version, req, dep_name, dep_version, + message = ( + "{name} {version} requires {requirement}, but you'll have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, ) + parts.append(message) + + logger.critical("\n".join(parts)) -def get_lib_location_guesses(*args, **kwargs): - scheme = distutils_scheme('', *args, **kwargs) +def get_lib_location_guesses( + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None # type: Optional[str] +): + # type:(...) -> List[str] + scheme = distutils_scheme('', user=user, home=home, root=root, + isolated=isolated, prefix=prefix) return [scheme['purelib'], scheme['platlib']] +def site_packages_writable(root, isolated): + # type: (Optional[str], bool) -> bool + return all( + test_writable_dir(d) for d in set( + get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site, # type: Optional[bool] + prefix_path=None, # type: Optional[str] + target_dir=None, # type: Optional[str] + root_path=None, # type: Optional[str] + isolated_mode=False, # type: bool +): + # type: (...) -> bool + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info("Defaulting to user installation because normal site-packages " + "is not writeable") + return True + + +def reject_location_related_install_options(requirements, options): + # type: (List[InstallRequirement], Optional[List[str]]) -> None + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + def format_options(option_names): + # type: (Iterable[str]) -> List[str] + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + offenders = [] + + for requirement in requirements: + install_options = requirement.install_options + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format( + format_options(location_options.keys()) + ) + ) + + if not offenders: + return + + raise CommandError( + "Location-changing options found in --install-option: {}." + " This is unsupported, use pip-level options like --user," + " --prefix, --root, and --target instead.".format( + "; ".join(offenders) + ) + ) + + def create_env_error_message(error, show_traceback, using_user_site): + # type: (EnvironmentError, bool, bool) -> str """Format an error message for an EnvironmentError It may occur anytime during the execution of the install command. diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/list.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/list.py index a6402749..a67d0f8d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/list.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/list.py @@ -4,52 +4,62 @@ import json import logging from pip._vendor import six -from pip._vendor.six.moves import zip_longest from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import CommandError -from pip._internal.index import PackageFinder +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, + dist_is_editable, + get_installed_distributions, + tabulate, + write_output, ) from pip._internal.utils.packaging import get_installer +from pip._internal.utils.parallel import map_multithread +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Set, Tuple, Iterator + + from pip._internal.network.session import PipSession + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) -class ListCommand(Command): +class ListCommand(IndexGroupCommand): """ List installed packages, including editables. Packages are listed in a case-insensitive sorted order. """ - name = 'list' + usage = """ %prog [options]""" - summary = 'List installed packages.' - - def __init__(self, *args, **kw): - super(ListCommand, self).__init__(*args, **kw) - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '-o', '--outdated', action='store_true', default=False, help='List outdated packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-u', '--uptodate', action='store_true', default=False, help='List uptodate packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-e', '--editable', action='store_true', default=False, help='List editable projects.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-l', '--local', action='store_true', default=False, @@ -62,8 +72,8 @@ class ListCommand(Command): action='store_true', default=False, help='Only output packages installed in user-site.') - - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -71,7 +81,7 @@ class ListCommand(Command): "pip only finds stable versions."), ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--format', action='store', dest='list_format', @@ -81,7 +91,7 @@ class ListCommand(Command): "or json", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--not-required', action='store_true', dest='not_required', @@ -89,13 +99,13 @@ class ListCommand(Command): "installed packages.", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--exclude-editable', action='store_false', dest='include_editable', help='Exclude editable package from output.', ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--include-editable', action='store_true', dest='include_editable', @@ -107,30 +117,40 @@ class ListCommand(Command): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) - def _build_package_finder(self, options, index_urls, session): + def _build_package_finder(self, options, session): + # type: (Values, PipSession) -> PackageFinder """ Create a package finder appropriate to this list command. """ - return PackageFinder( - find_links=options.find_links, - index_urls=index_urls, + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, allow_all_prereleases=options.pre, - trusted_hosts=options.trusted_hosts, - session=session, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, ) def run(self, options, args): + # type: (Values, List[str]) -> int if options.outdated and options.uptodate: raise CommandError( "Options --outdated and --uptodate cannot be combined.") + cmdoptions.check_list_path_option(options) + packages = get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=options.editable, include_editables=options.include_editable, + paths=options.path, ) # get_not_required must be called firstly in order to find and @@ -146,35 +166,40 @@ class ListCommand(Command): packages = self.get_uptodate(packages, options) self.output_package_listing(packages, options) + return SUCCESS def get_outdated(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] return [ dist for dist in self.iter_packages_latest_infos(packages, options) if dist.latest_version > dist.parsed_version ] def get_uptodate(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] return [ dist for dist in self.iter_packages_latest_infos(packages, options) if dist.latest_version == dist.parsed_version ] def get_not_required(self, packages, options): - dep_keys = set() + # type: (List[Distribution], Values) -> List[Distribution] + dep_keys = set() # type: Set[Distribution] for dist in packages: dep_keys.update(requirement.key for requirement in dist.requires()) - return {pkg for pkg in packages if pkg.key not in dep_keys} - def iter_packages_latest_infos(self, packages, options): - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug('Ignoring indexes: %s', ','.join(index_urls)) - index_urls = [] + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.key not in dep_keys}) + def iter_packages_latest_infos(self, packages, options): + # type: (List[Distribution], Values) -> Iterator[Distribution] with self._build_session(options) as session: - finder = self._build_package_finder(options, index_urls, session) + finder = self._build_package_finder(options, session) - for dist in packages: + def latest_info(dist): + # type: (Distribution) -> Distribution typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) if not options.pre: @@ -182,21 +207,29 @@ class ListCommand(Command): all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] - if not all_candidates: - continue - best_candidate = max(all_candidates, - key=finder._candidate_sort_key) + evaluator = finder.make_candidate_evaluator( + project_name=dist.project_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + remote_version = best_candidate.version - if best_candidate.location.is_wheel: + if best_candidate.link.is_wheel: typ = 'wheel' else: typ = 'sdist' # This is dirty but makes the rest of the code much cleaner dist.latest_version = remote_version dist.latest_filetype = typ - yield dist + return dist + + for dist in map_multithread(latest_info, packages): + if dist is not None: + yield dist def output_package_listing(self, packages, options): + # type: (List[Distribution], Values) -> None packages = sorted( packages, key=lambda dist: dist.project_name.lower(), @@ -207,14 +240,15 @@ class ListCommand(Command): elif options.list_format == 'freeze': for dist in packages: if options.verbose >= 1: - logger.info("%s==%s (%s)", dist.project_name, - dist.version, dist.location) + write_output("%s==%s (%s)", dist.project_name, + dist.version, dist.location) else: - logger.info("%s==%s", dist.project_name, dist.version) + write_output("%s==%s", dist.project_name, dist.version) elif options.list_format == 'json': - logger.info(format_for_json(packages, options)) + write_output(format_for_json(packages, options)) def output_package_listing_columns(self, data, header): + # type: (List[List[str]], List[str]) -> None # insert the header first: we need to know the size of column names if len(data) > 0: data.insert(0, header) @@ -226,28 +260,11 @@ class ListCommand(Command): pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) for val in pkg_strings: - logger.info(val) - - -def tabulate(vals): - # From pfmoore on GitHub: - # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 - assert len(vals) > 0 - - sizes = [0] * max(len(x) for x in vals) - for row in vals: - sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] - - result = [] - for row in vals: - display = " ".join([str(c).ljust(s) if c is not None else '' - for s, c in zip_longest(sizes, row)]) - result.append(display) - - return result, sizes + write_output(val) def format_for_columns(pkgs, options): + # type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]] """ Convert the package data into something usable by output_package_listing_columns. @@ -285,6 +302,7 @@ def format_for_columns(pkgs, options): def format_for_json(packages, options): + # type: (List[Distribution], Values) -> str data = [] for dist in packages: info = { diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/search.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/search.py index c157a312..e906ce76 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/search.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/search.py @@ -12,26 +12,37 @@ from pip._vendor.packaging.version import parse as parse_version from pip._vendor.six.moves import xmlrpc_client # type: ignore from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS -from pip._internal.download import PipXmlrpcTransport from pip._internal.exceptions import CommandError from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport from pip._internal.utils.compat import get_terminal_size from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_distribution, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Dict, Optional + from typing_extensions import TypedDict + TransformedHit = TypedDict( + 'TransformedHit', + {'name': str, 'summary': str, 'versions': List[str]}, + ) logger = logging.getLogger(__name__) -class SearchCommand(Command): +class SearchCommand(Command, SessionCommandMixin): """Search for PyPI packages whose name or summary contains .""" - name = 'search' + usage = """ %prog [options] """ - summary = 'Search PyPI for packages.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(SearchCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-i', '--index', dest='index', @@ -42,6 +53,7 @@ class SearchCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: raise CommandError('Missing required argument (search query).') query = args @@ -58,21 +70,25 @@ class SearchCommand(Command): return NO_MATCHES_FOUND def search(self, query, options): + # type: (List[str], Values) -> List[Dict[str, str]] index_url = options.index - with self._build_session(options) as session: - transport = PipXmlrpcTransport(index_url, session) - pypi = xmlrpc_client.ServerProxy(index_url, transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') - return hits + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits def transform_hits(hits): + # type: (List[Dict[str, str]]) -> List[TransformedHit] """ The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use. """ - packages = OrderedDict() + packages = OrderedDict() # type: OrderedDict[str, TransformedHit] for hit in hits: name = hit['name'] summary = hit['summary'] @@ -95,6 +111,7 @@ def transform_hits(hits): def print_results(hits, name_column_width=None, terminal_width=None): + # type: (List[TransformedHit], Optional[int], Optional[int]) -> None if not hits: return if name_column_width is None: @@ -112,24 +129,31 @@ def print_results(hits, name_column_width=None, terminal_width=None): target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal - summary = textwrap.wrap(summary, target_width) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + summary_lines = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join( + summary_lines) - line = '%-*s - %s' % (name_column_width, - '%s (%s)' % (name, latest), summary) + line = '{name_latest:{name_column_width}} - {summary}'.format( + name_latest='{name} ({latest})'.format(**locals()), + **locals()) try: - logger.info(line) + write_output(line) if name in installed_packages: - dist = pkg_resources.get_distribution(name) + dist = get_distribution(name) with indent_log(): if dist.version == latest: - logger.info('INSTALLED: %s (latest)', dist.version) + write_output('INSTALLED: %s (latest)', dist.version) else: - logger.info('INSTALLED: %s', dist.version) - logger.info('LATEST: %s', latest) + write_output('INSTALLED: %s', dist.version) + if parse_version(latest).pre: + write_output('LATEST: %s (pre-release; install' + ' with "pip install --pre")', latest) + else: + write_output('LATEST: %s', latest) except UnicodeEncodeError: pass def highest_version(versions): + # type: (List[str]) -> str return max(versions, key=parse_version) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/show.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/show.py index f92c9bc6..3892c595 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/show.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/show.py @@ -2,13 +2,19 @@ from __future__ import absolute_import import logging import os -from email.parser import FeedParser # type: ignore +from email.parser import FeedParser from pip._vendor import pkg_resources from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.misc import write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Dict, Iterator logger = logging.getLogger(__name__) @@ -19,14 +25,13 @@ class ShowCommand(Command): The output is in RFC-compliant mail header format. """ - name = 'show' + usage = """ %prog [options] ...""" - summary = 'Show information about installed packages.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(ShowCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-f', '--files', dest='files', @@ -37,6 +42,7 @@ class ShowCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: logger.warning('ERROR: Please provide a package name or names.') return ERROR @@ -50,6 +56,7 @@ class ShowCommand(Command): def search_packages_info(query): + # type: (List[str]) -> Iterator[Dict[str, str]] """ Gather details from installed distributions. Print distribution name, version, location, and installed files. Installed files requires a @@ -61,6 +68,21 @@ def search_packages_info(query): installed[canonicalize_name(p.project_name)] = p query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning('Package(s) not found: %s', ', '.join(missing)) + + def get_requiring_packages(package_name): + # type: (str) -> List[str] + canonical_name = canonicalize_name(package_name) + return [ + pkg.project_name for pkg in pkg_resources.working_set + if canonical_name in + [canonicalize_name(required.name) for required in + pkg.requires()] + ] for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { @@ -68,14 +90,15 @@ def search_packages_info(query): 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()], + 'required_by': get_requiring_packages(dist.project_name) } file_list = None - metadata = None + metadata = '' if isinstance(dist, pkg_resources.DistInfoDistribution): # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') - paths = [l.split(',')[0] for l in lines] + paths = [line.split(',')[0] for line in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] @@ -123,46 +146,41 @@ def search_packages_info(query): def print_results(distributions, list_files=False, verbose=False): + # type: (Iterator[Dict[str, str]], bool, bool) -> bool """ - Print the informations from installed distributions found. + Print the information from installed distributions found. """ results_printed = False for i, dist in enumerate(distributions): results_printed = True if i > 0: - logger.info("---") - - name = dist.get('name', '') - required_by = [ - pkg.project_name for pkg in pkg_resources.working_set - if name in [required.name for required in pkg.requires()] - ] - - logger.info("Name: %s", name) - logger.info("Version: %s", dist.get('version', '')) - logger.info("Summary: %s", dist.get('summary', '')) - logger.info("Home-page: %s", dist.get('home-page', '')) - logger.info("Author: %s", dist.get('author', '')) - logger.info("Author-email: %s", dist.get('author-email', '')) - logger.info("License: %s", dist.get('license', '')) - logger.info("Location: %s", dist.get('location', '')) - logger.info("Requires: %s", ', '.join(dist.get('requires', []))) - logger.info("Required-by: %s", ', '.join(required_by)) + write_output("---") + + write_output("Name: %s", dist.get('name', '')) + write_output("Version: %s", dist.get('version', '')) + write_output("Summary: %s", dist.get('summary', '')) + write_output("Home-page: %s", dist.get('home-page', '')) + write_output("Author: %s", dist.get('author', '')) + write_output("Author-email: %s", dist.get('author-email', '')) + write_output("License: %s", dist.get('license', '')) + write_output("Location: %s", dist.get('location', '')) + write_output("Requires: %s", ', '.join(dist.get('requires', []))) + write_output("Required-by: %s", ', '.join(dist.get('required_by', []))) if verbose: - logger.info("Metadata-Version: %s", - dist.get('metadata-version', '')) - logger.info("Installer: %s", dist.get('installer', '')) - logger.info("Classifiers:") + write_output("Metadata-Version: %s", + dist.get('metadata-version', '')) + write_output("Installer: %s", dist.get('installer', '')) + write_output("Classifiers:") for classifier in dist.get('classifiers', []): - logger.info(" %s", classifier) - logger.info("Entry-points:") + write_output(" %s", classifier) + write_output("Entry-points:") for entry in dist.get('entry_points', []): - logger.info(" %s", entry.strip()) + write_output(" %s", entry.strip()) if list_files: - logger.info("Files:") + write_output("Files:") for line in dist.get('files', []): - logger.info(" %s", line.strip()) + write_output(" %s", line.strip()) if "files" not in dist: - logger.info("Cannot locate installed-files.txt") + write_output("Cannot locate installed-files.txt") return results_printed diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/uninstall.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/uninstall.py index 0cd6f54b..3371fe47 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/uninstall.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/uninstall.py @@ -3,13 +3,23 @@ from __future__ import absolute_import from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import InstallationError from pip._internal.req import parse_requirements -from pip._internal.req.constructors import install_req_from_line +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) from pip._internal.utils.misc import protect_pip_from_modification_on_windows +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List -class UninstallCommand(Command): + +class UninstallCommand(Command, SessionCommandMixin): """ Uninstall packages. @@ -19,14 +29,13 @@ class UninstallCommand(Command): leave behind no metadata to determine what files were installed. - Script wrappers installed by ``python setup.py develop``. """ - name = 'uninstall' + usage = """ %prog [options] ... %prog [options] -r ...""" - summary = 'Uninstall packages.' - def __init__(self, *args, **kw): - super(UninstallCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', @@ -45,34 +54,42 @@ class UninstallCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): - with self._build_session(options) as session: - reqs_to_uninstall = {} - for name in args: - req = install_req_from_line( - name, isolated=options.isolated_mode, + # type: (Values, List[str]) -> int + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, + options=options, + session=session): + req = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode ) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req - for filename in options.requirements: - for req in parse_requirements( - filename, - options=options, - session=session): - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - if not reqs_to_uninstall: - raise InstallationError( - 'You must give at least one requirement to %(name)s (see ' - '"pip help %(name)s")' % dict(name=self.name) - ) + if not reqs_to_uninstall: + raise InstallationError( + 'You must give at least one requirement to {self.name} (see ' + '"pip help {self.name}")'.format(**locals()) + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) - protect_pip_from_modification_on_windows( - modifying_pip="pip" in reqs_to_uninstall + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, ) + if uninstall_pathset: + uninstall_pathset.commit() - for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, verbose=self.verbosity > 0, - ) - if uninstall_pathset: - uninstall_pathset.commit() + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/wheel.py index cd72a3df..0f718566 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/wheel.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/commands/wheel.py @@ -1,19 +1,26 @@ # -*- coding: utf-8 -*- + from __future__ import absolute_import import logging import os +import shutil from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.exceptions import CommandError, PreviousBuildDirError -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel import WheelBuilder +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List + logger = logging.getLogger(__name__) @@ -33,7 +40,6 @@ class WheelCommand(RequirementCommand): """ - name = 'wheel' usage = """ %prog [options] ... %prog [options] -r ... @@ -41,14 +47,10 @@ class WheelCommand(RequirementCommand): %prog [options] [-e] ... %prog [options] ...""" - summary = 'Build wheels from your requirements.' - - def __init__(self, *args, **kw): - super(WheelCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + self.cmd_opts.add_option( '-w', '--wheel-dir', dest='wheel_dir', metavar='dir', @@ -56,29 +58,29 @@ class WheelCommand(RequirementCommand): help=("Build wheels into , where the default is the " "current working directory."), ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option( '--build-option', dest='build_options', metavar='options', action='append', help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", ) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.progress_bar()) - - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( '--global-option', dest='global_options', action='append', @@ -86,7 +88,7 @@ class WheelCommand(RequirementCommand): help="Extra global options to be supplied to the setup.py " "call before the 'bdist_wheel' command.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -94,8 +96,7 @@ class WheelCommand(RequirementCommand): "pip only finds stable versions."), ) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -103,84 +104,85 @@ class WheelCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int cmdoptions.check_install_build_global(options) - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug('Ignoring indexes: %s', ','.join(index_urls)) - index_urls = [] + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) - options.src_dir = os.path.abspath(options.src_dir) + req_tracker = self.enter_context(get_requirement_tracker()) - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="wheel", + globally_managed=True, + ) - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="wheel" - ) as directory: + reqs = self.get_requirements(args, options, finder, session) - requirement_set = RequirementSet( - require_hashes=options.require_hashes, + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + wheel_download_dir=options.wheel_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_wheel_command(r) + ] + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError( + "Failed to build one or more wheels" + ) - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=options.wheel_dir, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=True, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) - - # build wheels - wb = WheelBuilder( - finder, preparer, wheel_cache, - build_options=options.build_options or [], - global_options=options.global_options or [], - no_clean=options.no_clean, - ) - build_failures = wb.build( - requirement_set.requirements.values(), session=session, - ) - if len(build_failures) != 0: - raise CommandError( - "Failed to build one or more wheels" - ) - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + return SUCCESS diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/configuration.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/configuration.py index fe6df9b7..e49a5f4f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/configuration.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/configuration.py @@ -14,22 +14,21 @@ Some terminology: import locale import logging import os +import sys -from pip._vendor import six from pip._vendor.six.moves import configparser from pip._internal.exceptions import ( - ConfigurationError, ConfigurationFileCouldNotBeLoaded, -) -from pip._internal.locations import ( - legacy_config_file, new_config_file, running_under_virtualenv, - site_config_files, venv_config_file, + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, ) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS, expanduser from pip._internal.utils.misc import ensure_dir, enum from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Any, Dict, Iterable, List, NewType, Optional, Tuple ) @@ -52,6 +51,12 @@ def _normalize_name(name): def _disassemble_key(name): # type: (str) -> List[str] + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) return name.split(".", 1) @@ -59,12 +64,38 @@ def _disassemble_key(name): kinds = enum( USER="user", # User Specific GLOBAL="global", # System Wide - VENV="venv", # Virtual Environment Specific + SITE="site", # [Virtual] Environment Specific ENV="env", # from PIP_CONFIG_FILE ENV_VAR="env-var", # from Environment Variables ) +CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' + + +def get_configuration_files(): + # type: () -> Dict[Kind, List[str]] + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) + for path in appdirs.site_config_dirs('pip') + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + expanduser('~'), + 'pip' if WINDOWS else '.pip', + CONFIG_BASENAME, + ) + new_config_file = os.path.join( + appdirs.user_config_dir("pip"), CONFIG_BASENAME + ) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + class Configuration(object): """Handles management of configuration. @@ -80,22 +111,22 @@ class Configuration(object): """ def __init__(self, isolated, load_only=None): - # type: (bool, Kind) -> None + # type: (bool, Optional[Kind]) -> None super(Configuration, self).__init__() - _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None] + _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None] if load_only not in _valid_load_only: raise ConfigurationError( "Got invalid value for load_only - should be one of {}".format( ", ".join(map(repr, _valid_load_only[:-1])) ) ) - self.isolated = isolated # type: bool - self.load_only = load_only # type: Optional[Kind] + self.isolated = isolated + self.load_only = load_only # The order here determines the override order. self._override_order = [ - kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR + kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR ] self._ignore_env_names = ["version", "help"] @@ -151,6 +182,7 @@ class Configuration(object): """ self._ensure_have_load_only() + assert self.load_only fname, parser = self._get_parser_to_modify() if parser is not None: @@ -166,10 +198,10 @@ class Configuration(object): def unset_value(self, key): # type: (str) -> None - """Unset a value in the configuration. - """ + """Unset a value in the configuration.""" self._ensure_have_load_only() + assert self.load_only if key not in self._config[self.load_only]: raise ConfigurationError("No such key - {}".format(key)) @@ -177,35 +209,23 @@ class Configuration(object): if parser is not None: section, name = _disassemble_key(key) - - # Remove the key in the parser - modified_something = False - if parser.has_section(section): - # Returns whether the option was removed or not - modified_something = parser.remove_option(section, name) - - if modified_something: - # name removed from parser, section may now be empty - section_iter = iter(parser.items(section)) - try: - val = six.next(section_iter) - except StopIteration: - val = None - - if val is None: - parser.remove_section(section) - - self._mark_as_modified(fname, parser) - else: + if not (parser.has_section(section) + and parser.remove_option(section, name)): + # The option was not removed. raise ConfigurationError( "Fatal Internal error [id=1]. Please report as a bug." ) + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + del self._config[self.load_only][key] def save(self): # type: () -> None - """Save the currentin-memory state. + """Save the current in-memory state. """ self._ensure_have_load_only() @@ -216,7 +236,7 @@ class Configuration(object): ensure_dir(os.path.dirname(fname)) with open(fname, "w") as f: - parser.write(f) # type: ignore + parser.write(f) # # Private routines @@ -246,7 +266,7 @@ class Configuration(object): # type: () -> None """Loads configuration from configuration files """ - config_files = dict(self._iter_config_files()) + config_files = dict(self.iter_config_files()) if config_files[kinds.ENV][0:1] == [os.devnull]: logger.debug( "Skipping loading configuration files due to " @@ -308,7 +328,7 @@ class Configuration(object): """Loads configuration from environment variables """ self._config[kinds.ENV_VAR].update( - self._normalized_keys(":env:", self._get_environ_vars()) + self._normalized_keys(":env:", self.get_environ_vars()) ) def _normalized_keys(self, section, items): @@ -324,7 +344,7 @@ class Configuration(object): normalized[key] = val return normalized - def _get_environ_vars(self): + def get_environ_vars(self): # type: () -> Iterable[Tuple[str, str]] """Returns a generator with all environmental vars with prefix PIP_""" for key, val in os.environ.items(): @@ -336,7 +356,7 @@ class Configuration(object): yield key[4:].lower(), val # XXX: This is patched in the tests. - def _iter_config_files(self): + def iter_config_files(self): # type: () -> Iterable[Tuple[Kind, List[str]]] """Yields variant and configuration files associated with it. @@ -351,8 +371,10 @@ class Configuration(object): else: yield kinds.ENV, [] + config_files = get_configuration_files() + # at the base we have any global configuration - yield kinds.GLOBAL, list(site_config_files) + yield kinds.GLOBAL, config_files[kinds.GLOBAL] # per-user configuration next should_load_user_config = not self.isolated and not ( @@ -360,15 +382,20 @@ class Configuration(object): ) if should_load_user_config: # The legacy config file is overridden by the new config file - yield kinds.USER, [legacy_config_file, new_config_file] + yield kinds.USER, config_files[kinds.USER] # finally virtualenv configuration first trumping others - if running_under_virtualenv(): - yield kinds.VENV, [venv_config_file] + yield kinds.SITE, config_files[kinds.SITE] + + def get_values_in_config(self, variant): + # type: (Kind) -> Dict[str, Any] + """Get values present in a config file""" + return self._config[variant] def _get_parser_to_modify(self): # type: () -> Tuple[str, RawConfigParser] # Determine which parser to modify + assert self.load_only parsers = self._parsers[self.load_only] if not parsers: # This should not happen if everything works correctly. @@ -385,3 +412,7 @@ class Configuration(object): file_parser_tuple = (fname, parser) if file_parser_tuple not in self._modified_parsers: self._modified_parsers.append(file_parser_tuple) + + def __repr__(self): + # type: () -> str + return "{}({!r})".format(self.__class__.__name__, self._dictionary) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 00000000..d5c1afc5 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.distributions.base import AbstractDistribution + from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement(install_req): + # type: (InstallRequirement) -> AbstractDistribution + """Returns a Distribution for the given InstallRequirement + """ + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/base.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 00000000..b836b98d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,45 @@ +import abc + +from pip._vendor.six import add_metaclass + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.req import InstallRequirement + from pip._internal.index.package_finder import PackageFinder + + +@add_metaclass(abc.ABCMeta) +class AbstractDistribution(object): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req): + # type: (InstallRequirement) -> None + super(AbstractDistribution, self).__init__() + self.req = req + + @abc.abstractmethod + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + raise NotImplementedError() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/installed.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 00000000..0d15bf42 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + return self.req.satisfied_by + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/sdist.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 00000000..be3d7d97 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,104 @@ +import logging + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Set, Tuple + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + return self.req.get_dist() + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + self._setup_isolation(finder) + + self.req.prepare_metadata() + + def _setup_isolation(self, finder): + # type: (PackageFinder) -> None + def _raise_conflicts(conflicting_with, conflicting_reqs): + # type: (str, Set[Tuple[str, str]]) -> None + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=', '.join( + '{} is incompatible with {}'.format(installed, wanted) + for installed, wanted in sorted(conflicting) + ) + ) + raise InstallationError(error_message) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, 'overlay', + "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + _raise_conflicts("PEP 517/518 supported requirements", + conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))) + ) + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build wheel" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + reqs = backend.get_requires_for_build_wheel() + + conflicting, missing = self.req.build_env.check_requirements(reqs) + if conflicting: + _raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, 'normal', + "Installing backend dependencies" + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 00000000..bf3482b1 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,36 @@ +from zipfile import ZipFile + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + # Set as part of preparation during download. + assert self.req.local_file_path + # Wheels are never unnamed. + assert self.req.name + + with ZipFile(self.req.local_file_path, allowZip64=True) as z: + return pkg_resources_distribution_for_wheel( + z, self.req.name, self.req.local_file_path + ) + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/download.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/download.py deleted file mode 100644 index 2bbe1762..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/download.py +++ /dev/null @@ -1,971 +0,0 @@ -from __future__ import absolute_import - -import cgi -import email.utils -import getpass -import json -import logging -import mimetypes -import os -import platform -import re -import shutil -import sys - -from pip._vendor import requests, six, urllib3 -from pip._vendor.cachecontrol import CacheControlAdapter -from pip._vendor.cachecontrol.caches import FileCache -from pip._vendor.lockfile import LockError -from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter -from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response -from pip._vendor.requests.structures import CaseInsensitiveDict -from pip._vendor.requests.utils import get_netrc_auth -# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is -# why we ignore the type on this import -from pip._vendor.six.moves import xmlrpc_client # type: ignore -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request -from pip._vendor.urllib3.util import IS_PYOPENSSL - -import pip -from pip._internal.exceptions import HashMismatch, InstallationError -from pip._internal.locations import write_delete_marker_file -from pip._internal.models.index import PyPI -from pip._internal.utils.encoding import auto_decode -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.glibc import libc_ver -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume, - display_path, format_size, get_installed_version, rmtree, - split_auth_from_netloc, splitext, unpack_file, -) -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import DownloadProgressProvider -from pip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Dict, IO, Text, Union - ) - from pip._internal.models.link import Link # noqa: F401 - from pip._internal.utils.hashes import Hashes # noqa: F401 - from pip._internal.vcs import AuthInfo # noqa: F401 - -try: - import ssl # noqa -except ImportError: - ssl = None - -HAS_TLS = (ssl is not None) or IS_PYOPENSSL - -__all__ = ['get_file_content', - 'is_url', 'url_to_path', 'path_to_url', - 'is_archive_file', 'unpack_vcs_link', - 'unpack_file_url', 'is_vcs_url', 'is_file_url', - 'unpack_http_url', 'unpack_url'] - - -logger = logging.getLogger(__name__) - - -def user_agent(): - """ - Return a string representing the user agent. - """ - data = { - "installer": {"name": "pip", "version": pip.__version__}, - "python": platform.python_version(), - "implementation": { - "name": platform.python_implementation(), - }, - } - - if data["implementation"]["name"] == 'CPython': - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'PyPy': - if sys.pypy_version_info.releaselevel == 'final': - pypy_version_info = sys.pypy_version_info[:3] - else: - pypy_version_info = sys.pypy_version_info - data["implementation"]["version"] = ".".join( - [str(x) for x in pypy_version_info] - ) - elif data["implementation"]["name"] == 'Jython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'IronPython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - - if sys.platform.startswith("linux"): - from pip._vendor import distro - distro_infos = dict(filter( - lambda x: x[1], - zip(["name", "version", "id"], distro.linux_distribution()), - )) - libc = dict(filter( - lambda x: x[1], - zip(["lib", "version"], libc_ver()), - )) - if libc: - distro_infos["libc"] = libc - if distro_infos: - data["distro"] = distro_infos - - if sys.platform.startswith("darwin") and platform.mac_ver()[0]: - data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} - - if platform.system(): - data.setdefault("system", {})["name"] = platform.system() - - if platform.release(): - data.setdefault("system", {})["release"] = platform.release() - - if platform.machine(): - data["cpu"] = platform.machine() - - if HAS_TLS: - data["openssl_version"] = ssl.OPENSSL_VERSION - - setuptools_version = get_installed_version("setuptools") - if setuptools_version is not None: - data["setuptools_version"] = setuptools_version - - return "{data[installer][name]}/{data[installer][version]} {json}".format( - data=data, - json=json.dumps(data, separators=(",", ":"), sort_keys=True), - ) - - -class MultiDomainBasicAuth(AuthBase): - - def __init__(self, prompting=True): - # type: (bool) -> None - self.prompting = prompting - self.passwords = {} # type: Dict[str, AuthInfo] - - def __call__(self, req): - parsed = urllib_parse.urlparse(req.url) - - # Split the credentials from the netloc. - netloc, url_user_password = split_auth_from_netloc(parsed.netloc) - - # Set the url of the request to the url without any credentials - req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) - - # Use any stored credentials that we have for this netloc - username, password = self.passwords.get(netloc, (None, None)) - - # Use the credentials embedded in the url if we have none stored - if username is None: - username, password = url_user_password - - # Get creds from netrc if we still don't have them - if username is None and password is None: - netrc_auth = get_netrc_auth(req.url) - username, password = netrc_auth if netrc_auth else (None, None) - - if username or password: - # Store the username and password - self.passwords[netloc] = (username, password) - - # Send the basic auth with this request - req = HTTPBasicAuth(username or "", password or "")(req) - - # Attach a hook to handle 401 responses - req.register_hook("response", self.handle_401) - - return req - - def handle_401(self, resp, **kwargs): - # We only care about 401 responses, anything else we want to just - # pass through the actual response - if resp.status_code != 401: - return resp - - # We are not able to prompt the user so simply return the response - if not self.prompting: - return resp - - parsed = urllib_parse.urlparse(resp.url) - - # Prompt the user for a new username and password - username = six.moves.input("User for %s: " % parsed.netloc) - password = getpass.getpass("Password: ") - - # Store the new username and password to use for future requests - if username or password: - self.passwords[parsed.netloc] = (username, password) - - # Consume content and release the original connection to allow our new - # request to reuse the same one. - resp.content - resp.raw.release_conn() - - # Add our new username and password to the request - req = HTTPBasicAuth(username or "", password or "")(resp.request) - req.register_hook("response", self.warn_on_401) - - # Send our new request - new_resp = resp.connection.send(req, **kwargs) - new_resp.history.append(resp) - - return new_resp - - def warn_on_401(self, resp, **kwargs): - # warn user that they provided incorrect credentials - if resp.status_code == 401: - logger.warning('401 Error, Credentials not correct for %s', - resp.request.url) - - -class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, - proxies=None): - pathname = url_to_path(request.url) - - resp = Response() - resp.status_code = 200 - resp.url = request.url - - try: - stats = os.stat(pathname) - except OSError as exc: - resp.status_code = 404 - resp.raw = exc - else: - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - content_type = mimetypes.guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) - - resp.raw = open(pathname, "rb") - resp.close = resp.raw.close - - return resp - - def close(self): - pass - - -class SafeFileCache(FileCache): - """ - A file based cache which is safe to use even when the target directory may - not be accessible or writable. - """ - - def __init__(self, *args, **kwargs): - super(SafeFileCache, self).__init__(*args, **kwargs) - - # Check to ensure that the directory containing our cache directory - # is owned by the user current executing pip. If it does not exist - # we will check the parent directory until we find one that does exist. - # If it is not owned by the user executing pip then we will disable - # the cache and log a warning. - if not check_path_owner(self.directory): - logger.warning( - "The directory '%s' or its parent directory is not owned by " - "the current user and the cache has been disabled. Please " - "check the permissions and owner of that directory. If " - "executing pip with sudo, you may want sudo's -H flag.", - self.directory, - ) - - # Set our directory to None to disable the Cache - self.directory = None - - def get(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).get(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def set(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).set(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def delete(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).delete(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - -class InsecureHTTPAdapter(HTTPAdapter): - - def cert_verify(self, conn, url, verify, cert): - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - - -class PipSession(requests.Session): - - timeout = None # type: Optional[int] - - def __init__(self, *args, **kwargs): - retries = kwargs.pop("retries", 0) - cache = kwargs.pop("cache", None) - insecure_hosts = kwargs.pop("insecure_hosts", []) - - super(PipSession, self).__init__(*args, **kwargs) - - # Attach our User Agent to the request - self.headers["User-Agent"] = user_agent() - - # Attach our Authentication handler to the session - self.auth = MultiDomainBasicAuth() - - # Create our urllib3.Retry instance which will allow us to customize - # how we handle retries. - retries = urllib3.Retry( - # Set the total number of retries that a particular request can - # have. - total=retries, - - # A 503 error from PyPI typically means that the Fastly -> Origin - # connection got interrupted in some way. A 503 error in general - # is typically considered a transient error so we'll go ahead and - # retry it. - # A 500 may indicate transient error in Amazon S3 - # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], - - # Add a small amount of back off between failed requests in - # order to prevent hammering the service. - backoff_factor=0.25, - ) - - # We want to _only_ cache responses on securely fetched origins. We do - # this because we can't validate the response of an insecurely fetched - # origin, and we don't want someone to be able to poison the cache and - # require manual eviction from the cache to fix it. - if cache: - secure_adapter = CacheControlAdapter( - cache=SafeFileCache(cache, use_dir_lock=True), - max_retries=retries, - ) - else: - secure_adapter = HTTPAdapter(max_retries=retries) - - # Our Insecure HTTPAdapter disables HTTPS validation. It does not - # support caching (see above) so we'll use it for all http:// URLs as - # well as any https:// host that we've marked as ignoring TLS errors - # for. - insecure_adapter = InsecureHTTPAdapter(max_retries=retries) - - self.mount("https://", secure_adapter) - self.mount("http://", insecure_adapter) - - # Enable file:// urls - self.mount("file://", LocalFSAdapter()) - - # We want to use a non-validating adapter for any requests which are - # deemed insecure. - for host in insecure_hosts: - self.mount("https://{}/".format(host), insecure_adapter) - - def request(self, method, url, *args, **kwargs): - # Allow setting a default timeout on a session - kwargs.setdefault("timeout", self.timeout) - - # Dispatch the actual request - return super(PipSession, self).request(method, url, *args, **kwargs) - - -def get_file_content(url, comes_from=None, session=None): - # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text] - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode. - - :param url: File path or url. - :param comes_from: Origin description of requirements. - :param session: Instance of pip.download.PipSession. - """ - if session is None: - raise TypeError( - "get_file_content() missing 1 required keyword argument: 'session'" - ) - - match = _scheme_re.search(url) - if match: - scheme = match.group(1).lower() - if (scheme == 'file' and comes_from and - comes_from.startswith('http')): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - if scheme == 'file': - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib_parse.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - else: - # FIXME: catch some errors - resp = session.get(url) - resp.raise_for_status() - return resp.url, resp.text - try: - with open(url, 'rb') as f: - content = auto_decode(f.read()) - except IOError as exc: - raise InstallationError( - 'Could not open requirements file: %s' % str(exc) - ) - return url, content - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - - -def is_url(name): - # type: (Union[str, Text]) -> bool - """Returns true if the name looks like a URL""" - if ':' not in name: - return False - scheme = name.split(':', 1)[0].lower() - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes - - -def url_to_path(url): - # type: (str) -> str - """ - Convert a file: URL to a path. - """ - assert url.startswith('file:'), ( - "You can only turn file: urls into filenames (not %r)" % url) - - _, netloc, path, _, _ = urllib_parse.urlsplit(url) - - # if we have a UNC path, prepend UNC share notation - if netloc: - netloc = '\\\\' + netloc - - path = urllib_request.url2pathname(netloc + path) - return path - - -def path_to_url(path): - # type: (Union[str, Text]) -> str - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) - return url - - -def is_archive_file(name): - # type: (str) -> bool - """Return True if `name` is a considered as an archive file.""" - ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False - - -def unpack_vcs_link(link, location): - vcs_backend = _get_used_vcs_backend(link) - vcs_backend.unpack(location) - - -def _get_used_vcs_backend(link): - for backend in vcs.backends: - if link.scheme in backend.schemes: - vcs_backend = backend(link.url) - return vcs_backend - - -def is_vcs_url(link): - # type: (Link) -> bool - return bool(_get_used_vcs_backend(link)) - - -def is_file_url(link): - # type: (Link) -> bool - return link.url.lower().startswith('file:') - - -def is_dir_url(link): - # type: (Link) -> bool - """Return whether a file:// Link points to a directory. - - ``link`` must not have any other scheme but file://. Call is_file_url() - first. - - """ - link_path = url_to_path(link.url_without_fragment) - return os.path.isdir(link_path) - - -def _progress_indicator(iterable, *args, **kwargs): - return iterable - - -def _download_url( - resp, # type: Response - link, # type: Link - content_file, # type: IO - hashes, # type: Hashes - progress_bar # type: str -): - # type: (...) -> None - try: - total_length = int(resp.headers['content-length']) - except (ValueError, KeyError, TypeError): - total_length = 0 - - cached_resp = getattr(resp, "from_cache", False) - if logger.getEffectiveLevel() > logging.INFO: - show_progress = False - elif cached_resp: - show_progress = False - elif total_length > (40 * 1000): - show_progress = True - elif not total_length: - show_progress = True - else: - show_progress = False - - show_url = link.show_url - - def resp_read(chunk_size): - try: - # Special case for urllib3. - for chunk in resp.raw.stream( - chunk_size, - # We use decode_content=False here because we don't - # want urllib3 to mess with the raw bytes we get - # from the server. If we decompress inside of - # urllib3 then we cannot verify the checksum - # because the checksum will be of the compressed - # file. This breakage will only occur if the - # server adds a Content-Encoding header, which - # depends on how the server was configured: - # - Some servers will notice that the file isn't a - # compressible file and will leave the file alone - # and with an empty Content-Encoding - # - Some servers will notice that the file is - # already compressed and will leave the file - # alone and will add a Content-Encoding: gzip - # header - # - Some servers won't notice anything at all and - # will take a file that's already been compressed - # and compress it again and set the - # Content-Encoding: gzip header - # - # By setting this not to decode automatically we - # hope to eliminate problems with the second case. - decode_content=False): - yield chunk - except AttributeError: - # Standard file-like object. - while True: - chunk = resp.raw.read(chunk_size) - if not chunk: - break - yield chunk - - def written_chunks(chunks): - for chunk in chunks: - content_file.write(chunk) - yield chunk - - progress_indicator = _progress_indicator - - if link.netloc == PyPI.netloc: - url = show_url - else: - url = link.url_without_fragment - - if show_progress: # We don't show progress on cached responses - progress_indicator = DownloadProgressProvider(progress_bar, - max=total_length) - if total_length: - logger.info("Downloading %s (%s)", url, format_size(total_length)) - else: - logger.info("Downloading %s", url) - elif cached_resp: - logger.info("Using cached %s", url) - else: - logger.info("Downloading %s", url) - - logger.debug('Downloading from URL %s', link) - - downloaded_chunks = written_chunks( - progress_indicator( - resp_read(CONTENT_CHUNK_SIZE), - CONTENT_CHUNK_SIZE - ) - ) - if hashes: - hashes.check_against_chunks(downloaded_chunks) - else: - consume(downloaded_chunks) - - -def _copy_file(filename, location, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % - display_path(download_location), ('i', 'w', 'b', 'a')) - if response == 'i': - copy = False - elif response == 'w': - logger.warning('Deleting %s', display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warning( - 'Backing up %s to %s', - display_path(download_location), - display_path(dest_file), - ) - shutil.move(download_location, dest_file) - elif response == 'a': - sys.exit(-1) - if copy: - shutil.copy(filename, download_location) - logger.info('Saved %s', display_path(download_location)) - - -def unpack_http_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - if session is None: - raise TypeError( - "unpack_http_url() missing 1 required keyword argument: 'session'" - ) - - with TempDirectory(kind="unpack") as temp_dir: - # If a download dir is specified, is the file already downloaded there? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - content_type = mimetypes.guess_type(from_path)[0] - else: - # let's download to a tmp dir - from_path, content_type = _download_http_url(link, - session, - temp_dir.path, - hashes, - progress_bar) - - # unpack the archive to the build dir location. even when only - # downloading archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified; let's copy the archive there - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - if not already_downloaded_path: - os.unlink(from_path) - - -def unpack_file_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - hashes=None # type: Optional[Hashes] -): - # type: (...) -> None - """Unpack link into location. - - If download_dir is provided and link points to a file, make a copy - of the link file inside download_dir. - """ - link_path = url_to_path(link.url_without_fragment) - - # If it's a url to a local directory - if is_dir_url(link): - if os.path.isdir(location): - rmtree(location) - shutil.copytree(link_path, location, symlinks=True) - if download_dir: - logger.info('Link is a directory, ignoring download_dir') - return - - # If --require-hashes is off, `hashes` is either empty, the - # link's embedded hash, or MissingHashes; it is required to - # match. If --require-hashes is on, we are satisfied by any - # hash in `hashes` matching: a URL-based or an option-based - # one; no internet-sourced hash will be in `hashes`. - if hashes: - hashes.check_against_path(link_path) - - # If a download dir is specified, is the file already there and valid? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link_path - - content_type = mimetypes.guess_type(from_path)[0] - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified and not already downloaded - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - -def _copy_dist_from_dir(link_path, location): - """Copy distribution files in `link_path` to `location`. - - Invoked when user requests to install a local directory. E.g.: - - pip install . - pip install ~/dev/git-repos/python-prompt-toolkit - - """ - - # Note: This is currently VERY SLOW if you have a lot of data in the - # directory, because it copies everything with `shutil.copytree`. - # What it should really do is build an sdist and install that. - # See https://github.com/pypa/pip/issues/2195 - - if os.path.isdir(location): - rmtree(location) - - # build an sdist - setup_py = 'setup.py' - sdist_args = [sys.executable] - sdist_args.append('-c') - sdist_args.append(SETUPTOOLS_SHIM % setup_py) - sdist_args.append('sdist') - sdist_args += ['--dist-dir', location] - logger.info('Running setup.py sdist for %s', link_path) - - with indent_log(): - call_subprocess(sdist_args, cwd=link_path, show_stdout=False) - - # unpack sdist into `location` - sdist = os.path.join(location, os.listdir(location)[0]) - logger.info('Unpacking sdist %s into %s', sdist, location) - unpack_file(sdist, location, content_type=None, link=None) - - -class PipXmlrpcTransport(xmlrpc_client.Transport): - """Provide a `xmlrpclib.Transport` implementation via a `PipSession` - object. - """ - - def __init__(self, index_url, session, use_datetime=False): - xmlrpc_client.Transport.__init__(self, use_datetime) - index_parts = urllib_parse.urlparse(index_url) - self._scheme = index_parts.scheme - self._session = session - - def request(self, host, handler, request_body, verbose=False): - parts = (self._scheme, host, handler, None, None, None) - url = urllib_parse.urlunparse(parts) - try: - headers = {'Content-Type': 'text/xml'} - response = self._session.post(url, data=request_body, - headers=headers, stream=True) - response.raise_for_status() - self.verbose = verbose - return self.parse_response(response.raw) - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", - exc.response.status_code, url, - ) - raise - - -def unpack_url( - link, # type: Optional[Link] - location, # type: Optional[str] - download_dir=None, # type: Optional[str] - only_download=False, # type: bool - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - """Unpack link. - If link is a VCS link: - if only_download, export into download_dir and ignore location - else unpack into location - for other types of link: - - unpack into location - - if download_dir, copy the file into download_dir - - if only_download, mark location for deletion - - :param hashes: A Hashes object, one of whose embedded hashes must match, - or HashMismatch will be raised. If the Hashes is empty, no matches are - required, and unhashable types of requirements (like VCS ones, which - would ordinarily raise HashUnsupported) are allowed. - """ - # non-editable vcs urls - if is_vcs_url(link): - unpack_vcs_link(link, location) - - # file urls - elif is_file_url(link): - unpack_file_url(link, location, download_dir, hashes=hashes) - - # http urls - else: - if session is None: - session = PipSession() - - unpack_http_url( - link, - location, - download_dir, - session, - hashes=hashes, - progress_bar=progress_bar - ) - if only_download: - write_delete_marker_file(location) - - -def _download_http_url( - link, # type: Link - session, # type: PipSession - temp_dir, # type: str - hashes, # type: Hashes - progress_bar # type: str -): - # type: (...) -> Tuple[str, str] - """Download link url into temp_dir using provided session""" - target_url = link.url.split('#', 1)[0] - try: - resp = session.get( - target_url, - # We use Accept-Encoding: identity here because requests - # defaults to accepting compressed responses. This breaks in - # a variety of ways depending on how the server is configured. - # - Some servers will notice that the file isn't a compressible - # file and will leave the file alone and with an empty - # Content-Encoding - # - Some servers will notice that the file is already - # compressed and will leave the file alone and will add a - # Content-Encoding: gzip header - # - Some servers won't notice anything at all and will take - # a file that's already been compressed and compress it again - # and set the Content-Encoding: gzip header - # By setting this to request only the identity encoding We're - # hoping to eliminate the third case. Hopefully there does not - # exist a server which when given a file will notice it is - # already compressed and that you're not asking for a - # compressed file and will then decompress it before sending - # because if that's the case I don't think it'll ever be - # possible to make this work. - headers={"Accept-Encoding": "identity"}, - stream=True, - ) - resp.raise_for_status() - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", exc.response.status_code, link, - ) - raise - - content_type = resp.headers.get('content-type', '') - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: - type, params = cgi.parse_header(content_disposition) - # We use ``or`` here because we don't want to use an "empty" value - # from the filename param. - filename = params.get('filename') or filename - ext = splitext(filename)[1] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - file_path = os.path.join(temp_dir, filename) - with open(file_path, 'wb') as content_file: - _download_url(resp, link, content_file, hashes, progress_bar) - return file_path, content_type - - -def _check_download_dir(link, download_dir, hashes): - # type: (Link, str, Hashes) -> Optional[str] - """ Check download_dir for previously downloaded file with correct hash - If a correct file is found return its path else None - """ - download_path = os.path.join(download_dir, link.filename) - if os.path.exists(download_path): - # If already downloaded, does its hash match? - logger.info('File was already downloaded %s', download_path) - if hashes: - try: - hashes.check_against_path(download_path) - except HashMismatch: - logger.warning( - 'Previously-downloaded file %s has bad hash. ' - 'Re-downloading.', - download_path - ) - os.unlink(download_path) - return None - return download_path - return None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/exceptions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/exceptions.py index 38ceeea9..3f26215d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/exceptions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/exceptions.py @@ -1,4 +1,5 @@ """Exceptions used throughout package""" + from __future__ import absolute_import from itertools import chain, groupby, repeat @@ -8,8 +9,19 @@ from pip._vendor.six import iteritems from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import Any, Optional, List, Dict, Text + + from pip._vendor.pkg_resources import Distribution + from pip._vendor.requests.models import Response, Request + from pip._vendor.six import PY3 + from pip._vendor.six.moves import configparser + + from pip._internal.req.req_install import InstallRequirement + + if PY3: + from hashlib import _Hash + else: + from hashlib import _hash as _Hash class PipError(Exception): @@ -28,6 +40,36 @@ class UninstallationError(PipError): """General exception during uninstallation""" +class NoneMetadataError(PipError): + """ + Raised when accessing "METADATA" or "PKG-INFO" metadata for a + pip._vendor.pkg_resources.Distribution object and + `dist.has_metadata('METADATA')` returns True but + `dist.get_metadata('METADATA')` returns None (and similarly for + "PKG-INFO"). + """ + + def __init__(self, dist, metadata_name): + # type: (Distribution, str) -> None + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self): + # type: () -> str + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return ( + 'None {} metadata found for distribution: {}'.format( + self.metadata_name, self.dist, + ) + ) + + class DistributionNotFound(InstallationError): """Raised when a distribution cannot be found to satisfy a requirement""" @@ -49,10 +91,38 @@ class CommandError(PipError): """Raised when there is an error in command-line arguments""" +class SubProcessError(PipError): + """Raised when there is an error raised while executing a + command in subprocess""" + + class PreviousBuildDirError(PipError): """Raised when there's a previous conflicting build directory""" +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__(self, error_msg, response=None, request=None): + # type: (Text, Response, Request) -> None + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if (self.response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(NetworkConnectionError, self).__init__( + error_msg, response, request) + + def __str__(self): + # type: () -> str + return str(self.error_msg) + + class InvalidWheelFilename(InstallationError): """Invalid wheel filename.""" @@ -61,16 +131,39 @@ class UnsupportedWheel(InstallationError): """Unsupported wheel.""" +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename + or user-supplied ``#egg=`` value. + """ + def __init__(self, ireq, field, built): + # type: (InstallRequirement, str, Any) -> None + self.ireq = ireq + self.field = field + self.built = built + + def __str__(self): + # type: () -> str + return "Requested {} has different {} in metadata: {!r}".format( + self.ireq, self.field, self.built, + ) + + class HashErrors(InstallationError): """Multiple HashError instances rolled into one for reporting""" def __init__(self): - self.errors = [] + # type: () -> None + self.errors = [] # type: List[HashError] def append(self, error): + # type: (HashError) -> None self.errors.append(error) def __str__(self): + # type: () -> str lines = [] self.errors.sort(key=lambda e: e.order) for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): @@ -78,11 +171,14 @@ class HashErrors(InstallationError): lines.extend(e.body() for e in errors_of_cls) if lines: return '\n'.join(lines) + return '' def __nonzero__(self): + # type: () -> bool return bool(self.errors) def __bool__(self): + # type: () -> bool return self.__nonzero__() @@ -104,23 +200,27 @@ class HashError(InstallationError): """ req = None # type: Optional[InstallRequirement] head = '' + order = None # type: Optional[int] def body(self): + # type: () -> str """Return a summary of me for display under the heading. This default implementation simply prints a description of the triggering requirement. :param req: The InstallRequirement that provoked this error, with - populate_link() having already been called + its link already populated by the resolver's _populate_link(). """ - return ' %s' % self._requirement_name() + return ' {}'.format(self._requirement_name()) def __str__(self): - return '%s\n%s' % (self.head, self.body()) + # type: () -> str + return '{}\n{}'.format(self.head, self.body()) def _requirement_name(self): + # type: () -> str """Return a description of the requirement that triggered me. This default implementation returns long description of the req, with @@ -161,6 +261,7 @@ class HashMissing(HashError): 'has a hash.)') def __init__(self, gotten_hash): + # type: (str) -> None """ :param gotten_hash: The hash of the (possibly malicious) archive we just downloaded @@ -168,6 +269,7 @@ class HashMissing(HashError): self.gotten_hash = gotten_hash def body(self): + # type: () -> str # Dodge circular import. from pip._internal.utils.hashes import FAVORITE_HASH @@ -180,9 +282,9 @@ class HashMissing(HashError): # In case someone feeds something downright stupid # to InstallRequirement's constructor. else getattr(self.req, 'req', None)) - return ' %s --hash=%s:%s' % (package or 'unknown package', - FAVORITE_HASH, - self.gotten_hash) + return ' {} --hash={}:{}'.format(package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) class HashUnpinned(HashError): @@ -210,6 +312,7 @@ class HashMismatch(HashError): 'someone may have tampered with them.') def __init__(self, allowed, gots): + # type: (Dict[str, List[str]], Dict[str, _Hash]) -> None """ :param allowed: A dict of algorithm names pointing to lists of allowed hex digests @@ -220,10 +323,12 @@ class HashMismatch(HashError): self.gots = gots def body(self): - return ' %s:\n%s' % (self._requirement_name(), - self._hash_comparison()) + # type: () -> str + return ' {}:\n{}'.format(self._requirement_name(), + self._hash_comparison()) def _hash_comparison(self): + # type: () -> str """ Return a comparison of actual and expected hash values. @@ -235,18 +340,18 @@ class HashMismatch(HashError): """ def hash_then_or(hash_name): + # type: (str) -> chain[str] # For now, all the decent hashes have 6-char names, so we can get # away with hard-coding space literals. return chain([hash_name], repeat(' or')) - lines = [] + lines = [] # type: List[str] for hash_name, expecteds in iteritems(self.allowed): prefix = hash_then_or(hash_name) - lines.extend((' Expected %s %s' % (next(prefix), e)) + lines.extend((' Expected {} {}'.format(next(prefix), e)) for e in expecteds) - lines.append(' Got %s\n' % - self.gots[hash_name].hexdigest()) - prefix = ' or' + lines.append(' Got {}\n'.format( + self.gots[hash_name].hexdigest())) return '\n'.join(lines) @@ -260,15 +365,17 @@ class ConfigurationFileCouldNotBeLoaded(ConfigurationError): """ def __init__(self, reason="could not be loaded", fname=None, error=None): + # type: (str, Optional[str], Optional[configparser.Error]) -> None super(ConfigurationFileCouldNotBeLoaded, self).__init__(error) self.reason = reason self.fname = fname self.error = error def __str__(self): + # type: () -> str if self.fname is not None: message_part = " in {}.".format(self.fname) else: assert self.error is not None - message_part = ".\n{}\n".format(self.error.message) + message_part = ".\n{}\n".format(self.error) return "Configuration file {}{}".format(self.reason, message_part) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index.py deleted file mode 100644 index 9eda3a35..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index.py +++ /dev/null @@ -1,990 +0,0 @@ -"""Routines related to PyPI, indexes""" -from __future__ import absolute_import - -import cgi -import itertools -import logging -import mimetypes -import os -import posixpath -import re -import sys -from collections import namedtuple - -from pip._vendor import html5lib, requests, six -from pip._vendor.distlib.compat import unescape -from pip._vendor.packaging import specifiers -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.requests.exceptions import RetryError, SSLError -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request - -from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, - UnsupportedWheel, -) -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.format_control import FormatControl -from pip._internal.models.index import PyPI -from pip._internal.models.link import Link -from pip._internal.pep425tags import get_supported -from pip._internal.utils.compat import ipaddress -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path, - redact_password_from_url, -) -from pip._internal.utils.packaging import check_requires_python -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from logging import Logger # noqa: F401 - from typing import ( # noqa: F401 - Tuple, Optional, Any, List, Union, Callable, Set, Sequence, - Iterable, MutableMapping - ) - from pip._vendor.packaging.version import _BaseVersion # noqa: F401 - from pip._vendor.requests import Response # noqa: F401 - from pip._internal.req import InstallRequirement # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - - SecureOrigin = Tuple[str, str, Optional[str]] - BuildTag = Tuple[Any, ...] # either emply tuple or Tuple[int, str] - CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]] - -__all__ = ['FormatControl', 'PackageFinder'] - - -SECURE_ORIGINS = [ - # protocol, hostname, port - # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) - ("https", "*", "*"), - ("*", "localhost", "*"), - ("*", "127.0.0.0/8", "*"), - ("*", "::1/128", "*"), - ("file", "*", None), - # ssh is always secure. - ("ssh", "*", "*"), -] # type: List[SecureOrigin] - - -logger = logging.getLogger(__name__) - - -def _match_vcs_scheme(url): - # type: (str) -> Optional[str] - """Look for VCS schemes in the URL. - - Returns the matched VCS scheme, or None if there's no match. - """ - from pip._internal.vcs import VcsSupport - for scheme in VcsSupport.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - return scheme - return None - - -def _is_url_like_archive(url): - # type: (str) -> bool - """Return whether the URL looks like an archive. - """ - filename = Link(url).filename - for bad_ext in ARCHIVE_EXTENSIONS: - if filename.endswith(bad_ext): - return True - return False - - -class _NotHTML(Exception): - def __init__(self, content_type, request_desc): - # type: (str, str) -> None - super(_NotHTML, self).__init__(content_type, request_desc) - self.content_type = content_type - self.request_desc = request_desc - - -def _ensure_html_header(response): - # type: (Response) -> None - """Check the Content-Type header to ensure the response contains HTML. - - Raises `_NotHTML` if the content type is not text/html. - """ - content_type = response.headers.get("Content-Type", "") - if not content_type.lower().startswith("text/html"): - raise _NotHTML(content_type, response.request.method) - - -class _NotHTTP(Exception): - pass - - -def _ensure_html_response(url, session): - # type: (str, PipSession) -> None - """Send a HEAD request to the URL, and ensure the response contains HTML. - - Raises `_NotHTTP` if the URL is not available for a HEAD request, or - `_NotHTML` if the content type is not text/html. - """ - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) - if scheme not in {'http', 'https'}: - raise _NotHTTP() - - resp = session.head(url, allow_redirects=True) - resp.raise_for_status() - - _ensure_html_header(resp) - - -def _get_html_response(url, session): - # type: (str, PipSession) -> Response - """Access an HTML page with GET, and return the response. - - This consists of three parts: - - 1. If the URL looks suspiciously like an archive, send a HEAD first to - check the Content-Type is HTML, to avoid downloading a large file. - Raise `_NotHTTP` if the content type cannot be determined, or - `_NotHTML` if it is not HTML. - 2. Actually perform the request. Raise HTTP exceptions on network failures. - 3. Check the Content-Type header to make sure we got HTML, and raise - `_NotHTML` otherwise. - """ - if _is_url_like_archive(url): - _ensure_html_response(url, session=session) - - logger.debug('Getting page %s', url) - - resp = session.get( - url, - headers={ - "Accept": "text/html", - # We don't want to blindly returned cached data for - # /simple/, because authors generally expecting that - # twine upload && pip install will function, but if - # they've done a pip install in the last ~10 minutes - # it won't. Thus by setting this to zero we will not - # blindly use any cached data, however the benefit of - # using max-age=0 instead of no-cache, is that we will - # still support conditional requests, so we will still - # minimize traffic sent in cases where the page hasn't - # changed at all, we will just always incur the round - # trip for the conditional GET now instead of only - # once per 10 minutes. - # For more information, please see pypa/pip#5670. - "Cache-Control": "max-age=0", - }, - ) - resp.raise_for_status() - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. - _ensure_html_header(resp) - - return resp - - -def _handle_get_page_fail( - link, # type: Link - reason, # type: Union[str, Exception] - meth=None # type: Optional[Callable[..., None]] -): - # type: (...) -> None - if meth is None: - meth = logger.debug - meth("Could not fetch URL %s: %s - skipping", link, reason) - - -def _get_html_page(link, session=None): - # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] - if session is None: - raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" - ) - - url = link.url.split('#', 1)[0] - - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: - logger.debug('Cannot look at %s URL %s', vcs_scheme, link) - return None - - # Tack index.html onto file:// URLs that point to directories - scheme, _, path, _, _, _ = urllib_parse.urlparse(url) - if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): - # add trailing slash if not present so urljoin doesn't trim - # final segment - if not url.endswith('/'): - url += '/' - url = urllib_parse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s', url) - - try: - resp = _get_html_response(url, session=session) - except _NotHTTP as exc: - logger.debug( - 'Skipping page %s because it looks like an archive, and cannot ' - 'be checked by HEAD.', link, - ) - except _NotHTML as exc: - logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', - link, exc.request_desc, exc.content_type, - ) - except requests.HTTPError as exc: - _handle_get_page_fail(link, exc) - except RetryError as exc: - _handle_get_page_fail(link, exc) - except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) - _handle_get_page_fail(link, reason, meth=logger.info) - except requests.ConnectionError as exc: - _handle_get_page_fail(link, "connection error: %s" % exc) - except requests.Timeout: - _handle_get_page_fail(link, "timed out") - else: - return HTMLPage(resp.content, resp.url, resp.headers) - return None - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links. - """ - - def __init__( - self, - find_links, # type: List[str] - index_urls, # type: List[str] - allow_all_prereleases=False, # type: bool - trusted_hosts=None, # type: Optional[Iterable[str]] - session=None, # type: Optional[PipSession] - format_control=None, # type: Optional[FormatControl] - platform=None, # type: Optional[str] - versions=None, # type: Optional[List[str]] - abi=None, # type: Optional[str] - implementation=None, # type: Optional[str] - prefer_binary=False # type: bool - ): - # type: (...) -> None - """Create a PackageFinder. - - :param format_control: A FormatControl object or None. Used to control - the selection of source packages / binary packages when consulting - the index and links. - :param platform: A string or None. If None, searches for packages - that are supported by the current system. Otherwise, will find - packages that can be built on the platform passed in. These - packages will only be downloaded for distribution: they will - not be built locally. - :param versions: A list of strings or None. This is passed directly - to pep425tags.py in the get_supported() method. - :param abi: A string or None. This is passed directly - to pep425tags.py in the get_supported() method. - :param implementation: A string or None. This is passed directly - to pep425tags.py in the get_supported() method. - """ - if session is None: - raise TypeError( - "PackageFinder() missing 1 required keyword argument: " - "'session'" - ) - - # Build find_links. If an argument starts with ~, it may be - # a local file relative to a home directory. So try normalizing - # it and if it exists, use the normalized version. - # This is deliberately conservative - it might be fine just to - # blindly normalize anything starting with a ~... - self.find_links = [] # type: List[str] - for link in find_links: - if link.startswith('~'): - new_link = normalize_path(link) - if os.path.exists(new_link): - link = new_link - self.find_links.append(link) - - self.index_urls = index_urls - - # These are boring links that have already been logged somehow: - self.logged_links = set() # type: Set[Link] - - self.format_control = format_control or FormatControl(set(), set()) - - # Domains that we won't emit warnings for when not using HTTPS - self.secure_origins = [ - ("*", host, "*") - for host in (trusted_hosts if trusted_hosts else []) - ] # type: List[SecureOrigin] - - # Do we want to allow _all_ pre-releases? - self.allow_all_prereleases = allow_all_prereleases - - # The Session we'll use to make requests - self.session = session - - # The valid tags to check potential found wheel candidates against - self.valid_tags = get_supported( - versions=versions, - platform=platform, - abi=abi, - impl=implementation, - ) - - # Do we prefer old, but valid, binary dist over new source dist - self.prefer_binary = prefer_binary - - # If we don't have TLS enabled, then WARN if anyplace we're looking - # relies on TLS. - if not HAS_TLS: - for link in itertools.chain(self.index_urls, self.find_links): - parsed = urllib_parse.urlparse(link) - if parsed.scheme == "https": - logger.warning( - "pip is configured with locations that require " - "TLS/SSL, however the ssl module in Python is not " - "available." - ) - break - - def get_formatted_locations(self): - # type: () -> str - lines = [] - if self.index_urls and self.index_urls != [PyPI.simple_url]: - lines.append( - "Looking in indexes: {}".format(", ".join( - redact_password_from_url(url) for url in self.index_urls)) - ) - if self.find_links: - lines.append( - "Looking in links: {}".format(", ".join(self.find_links)) - ) - return "\n".join(lines) - - @staticmethod - def _sort_locations(locations, expand_dir=False): - # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] - """ - Sort locations into "files" (archives) and "urls", and return - a pair of lists (files,urls) - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate list - def sort_path(path): - url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - - is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') - - if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) - if os.path.isdir(path): - if expand_dir: - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif is_file_url: - urls.append(url) - else: - logger.warning( - "Path '{0}' is ignored: " - "it is a directory.".format(path), - ) - elif os.path.isfile(path): - sort_path(path) - else: - logger.warning( - "Url '%s' is ignored: it is neither a file " - "nor a directory.", url, - ) - elif is_url(url): - # Only add url with clear scheme - urls.append(url) - else: - logger.warning( - "Url '%s' is ignored. It is either a non-existing " - "path or lacks a specific scheme.", url, - ) - - return files, urls - - def _candidate_sort_key(self, candidate): - # type: (InstallationCandidate) -> CandidateSortingKey - """ - Function used to generate link sort key for link tuples. - The greater the return value, the more preferred it is. - If not finding wheels, then sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min(self.valid_tags) - 3. source archives - If prefer_binary was set, then all wheels are sorted above sources. - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - support_num = len(self.valid_tags) - build_tag = tuple() # type: BuildTag - binary_preference = 0 - if candidate.location.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(candidate.location.filename) - if not wheel.supported(self.valid_tags): - raise UnsupportedWheel( - "%s is not a supported wheel for this platform. It " - "can't be sorted." % wheel.filename - ) - if self.prefer_binary: - binary_preference = 1 - pri = -(wheel.support_index_min(self.valid_tags)) - if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) - build_tag_groups = match.groups() - build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) - else: # sdist - pri = -(support_num) - return (binary_preference, candidate.version, build_tag, pri) - - def _validate_secure_origin(self, logger, location): - # type: (Logger, Link) -> bool - # Determine if this url used a secure transport mechanism - parsed = urllib_parse.urlparse(str(location)) - origin = (parsed.scheme, parsed.hostname, parsed.port) - - # The protocol to use to see if the protocol matches. - # Don't count the repository type as part of the protocol: in - # cases such as "git+ssh", only use "ssh". (I.e., Only verify against - # the last scheme.) - protocol = origin[0].rsplit('+', 1)[-1] - - # Determine if our origin is a secure origin by looking through our - # hardcoded list of secure origins, as well as any additional ones - # configured on this PackageFinder instance. - for secure_origin in (SECURE_ORIGINS + self.secure_origins): - if protocol != secure_origin[0] and secure_origin[0] != "*": - continue - - try: - # We need to do this decode dance to ensure that we have a - # unicode object, even on Python 2.x. - addr = ipaddress.ip_address( - origin[1] - if ( - isinstance(origin[1], six.text_type) or - origin[1] is None - ) - else origin[1].decode("utf8") - ) - network = ipaddress.ip_network( - secure_origin[1] - if isinstance(secure_origin[1], six.text_type) - # setting secure_origin[1] to proper Union[bytes, str] - # creates problems in other places - else secure_origin[1].decode("utf8") # type: ignore - ) - except ValueError: - # We don't have both a valid address or a valid network, so - # we'll check this origin against hostnames. - if (origin[1] and - origin[1].lower() != secure_origin[1].lower() and - secure_origin[1] != "*"): - continue - else: - # We have a valid address and network, so see if the address - # is contained within the network. - if addr not in network: - continue - - # Check to see if the port patches - if (origin[2] != secure_origin[2] and - secure_origin[2] != "*" and - secure_origin[2] is not None): - continue - - # If we've gotten here, then this origin matches the current - # secure origin and we should return True - return True - - # If we've gotten to this point, then the origin isn't secure and we - # will not accept it as a valid location to search. We will however - # log a warning that we are ignoring it. - logger.warning( - "The repository located at %s is not a trusted or secure host and " - "is being ignored. If this repository is available via HTTPS we " - "recommend you use HTTPS instead, otherwise you may silence " - "this warning and allow it anyway with '--trusted-host %s'.", - parsed.hostname, - parsed.hostname, - ) - - return False - - def _get_index_urls_locations(self, project_name): - # type: (str) -> List[str] - """Returns the locations found via self.index_urls - - Checks the url_name on the main (first in the list) index and - use this url_name to produce all locations - """ - - def mkurl_pypi_url(url): - loc = posixpath.join( - url, - urllib_parse.quote(canonicalize_name(project_name))) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's - # behavior. - if not loc.endswith('/'): - loc = loc + '/' - return loc - - return [mkurl_pypi_url(url) for url in self.index_urls] - - def find_all_candidates(self, project_name): - # type: (str) -> List[Optional[InstallationCandidate]] - """Find all available InstallationCandidate for project_name - - This checks index_urls and find_links. - All versions found are returned as an InstallationCandidate list. - - See _link_package_versions for details on which files are accepted - """ - index_locations = self._get_index_urls_locations(project_name) - index_file_loc, index_url_loc = self._sort_locations(index_locations) - fl_file_loc, fl_url_loc = self._sort_locations( - self.find_links, expand_dir=True, - ) - - file_locations = (Link(url) for url in itertools.chain( - index_file_loc, fl_file_loc, - )) - - # We trust every url that the user has given us whether it was given - # via --index-url or --find-links. - # We want to filter out any thing which does not have a secure origin. - url_locations = [ - link for link in itertools.chain( - (Link(url) for url in index_url_loc), - (Link(url) for url in fl_url_loc), - ) - if self._validate_secure_origin(logger, link) - ] - - logger.debug('%d location(s) to search for versions of %s:', - len(url_locations), project_name) - - for location in url_locations: - logger.debug('* %s', location) - - canonical_name = canonicalize_name(project_name) - formats = self.format_control.get_allowed_formats(canonical_name) - search = Search(project_name, canonical_name, formats) - find_links_versions = self._package_versions( - # We trust every directly linked archive in find_links - (Link(url, '-f') for url in self.find_links), - search - ) - - page_versions = [] - for page in self._get_pages(url_locations, project_name): - logger.debug('Analyzing links from page %s', page.url) - with indent_log(): - page_versions.extend( - self._package_versions(page.iter_links(), search) - ) - - file_versions = self._package_versions(file_locations, search) - if file_versions: - file_versions.sort(reverse=True) - logger.debug( - 'Local files found: %s', - ', '.join([ - url_to_path(candidate.location.url) - for candidate in file_versions - ]) - ) - - # This is an intentional priority ordering - return file_versions + find_links_versions + page_versions - - def find_requirement(self, req, upgrade): - # type: (InstallRequirement, bool) -> Optional[Link] - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean - Returns a Link if found, - Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise - """ - all_candidates = self.find_all_candidates(req.name) - - # Filter out anything which doesn't match our specifier - compatible_versions = set( - req.specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - [str(c.version) for c in all_candidates], - prereleases=( - self.allow_all_prereleases - if self.allow_all_prereleases else None - ), - ) - ) - applicable_candidates = [ - # Again, converting to str to deal with debundling. - c for c in all_candidates if str(c.version) in compatible_versions - ] - - if applicable_candidates: - best_candidate = max(applicable_candidates, - key=self._candidate_sort_key) - else: - best_candidate = None - - if req.satisfied_by is not None: - installed_version = parse_version(req.satisfied_by.version) - else: - installed_version = None - - if installed_version is None and best_candidate is None: - logger.critical( - 'Could not find a version that satisfies the requirement %s ' - '(from versions: %s)', - req, - ', '.join( - sorted( - {str(c.version) for c in all_candidates}, - key=parse_version, - ) - ) - ) - - raise DistributionNotFound( - 'No matching distribution found for %s' % req - ) - - best_installed = False - if installed_version and ( - best_candidate is None or - best_candidate.version <= installed_version): - best_installed = True - - if not upgrade and installed_version is not None: - if best_installed: - logger.debug( - 'Existing installed version (%s) is most up-to-date and ' - 'satisfies requirement', - installed_version, - ) - else: - logger.debug( - 'Existing installed version (%s) satisfies requirement ' - '(most up-to-date version is %s)', - installed_version, - best_candidate.version, - ) - return None - - if best_installed: - # We have an existing version, and its the best version - logger.debug( - 'Installed version (%s) is most up-to-date (past versions: ' - '%s)', - installed_version, - ', '.join(sorted(compatible_versions, key=parse_version)) or - "none", - ) - raise BestVersionAlreadyInstalled - - logger.debug( - 'Using version %s (newest of versions: %s)', - best_candidate.version, - ', '.join(sorted(compatible_versions, key=parse_version)) - ) - return best_candidate.location - - def _get_pages(self, locations, project_name): - # type: (Iterable[Link], str) -> Iterable[HTMLPage] - """ - Yields (page, page_url) from the given locations, skipping - locations that have errors. - """ - seen = set() # type: Set[Link] - for location in locations: - if location in seen: - continue - seen.add(location) - - page = _get_html_page(location, session=self.session) - if page is None: - continue - - yield page - - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') - - def _sort_links(self, links): - # type: (Iterable[Link]) -> List[Link] - """ - Returns elements of links in order, non-egg links first, egg links - second, while eliminating duplicates - """ - eggs, no_eggs = [], [] - seen = set() # type: Set[Link] - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _package_versions( - self, - links, # type: Iterable[Link] - search # type: Search - ): - # type: (...) -> List[Optional[InstallationCandidate]] - result = [] - for link in self._sort_links(links): - v = self._link_package_versions(link, search) - if v is not None: - result.append(v) - return result - - def _log_skipped_link(self, link, reason): - # type: (Link, str) -> None - if link not in self.logged_links: - logger.debug('Skipping link %s; %s', link, reason) - self.logged_links.add(link) - - def _link_package_versions(self, link, search): - # type: (Link, Search) -> Optional[InstallationCandidate] - """Return an InstallationCandidate or None""" - version = None - if link.egg_fragment: - egg_info = link.egg_fragment - ext = link.ext - else: - egg_info, ext = link.splitext() - if not ext: - self._log_skipped_link(link, 'not a file') - return None - if ext not in SUPPORTED_EXTENSIONS: - self._log_skipped_link( - link, 'unsupported archive format: %s' % ext, - ) - return None - if "binary" not in search.formats and ext == WHEEL_EXTENSION: - self._log_skipped_link( - link, 'No binaries permitted for %s' % search.supplied, - ) - return None - if "macosx10" in link.path and ext == '.zip': - self._log_skipped_link(link, 'macosx10 one') - return None - if ext == WHEEL_EXTENSION: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - self._log_skipped_link(link, 'invalid wheel filename') - return None - if canonicalize_name(wheel.name) != search.canonical: - self._log_skipped_link( - link, 'wrong project name (not %s)' % search.supplied) - return None - - if not wheel.supported(self.valid_tags): - self._log_skipped_link( - link, 'it is not compatible with this Python') - return None - - version = wheel.version - - # This should be up by the search.ok_binary check, but see issue 2700. - if "source" not in search.formats and ext != WHEEL_EXTENSION: - self._log_skipped_link( - link, 'No sources permitted for %s' % search.supplied, - ) - return None - - if not version: - version = _egg_info_matches(egg_info, search.canonical) - if not version: - self._log_skipped_link( - link, 'Missing project version for %s' % search.supplied) - return None - - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != sys.version[:3]: - self._log_skipped_link( - link, 'Python version is incorrect') - return None - try: - support_this_python = check_requires_python(link.requires_python) - except specifiers.InvalidSpecifier: - logger.debug("Package %s has an invalid Requires-Python entry: %s", - link.filename, link.requires_python) - support_this_python = True - - if not support_this_python: - logger.debug("The package %s is incompatible with the python " - "version in use. Acceptable python versions are: %s", - link, link.requires_python) - return None - logger.debug('Found link %s, version: %s', link, version) - - return InstallationCandidate(search.supplied, version, link) - - -def _find_name_version_sep(egg_info, canonical_name): - # type: (str, str) -> int - """Find the separator's index based on the package's canonical name. - - `egg_info` must be an egg info string for the given package, and - `canonical_name` must be the package's canonical name. - - This function is needed since the canonicalized name does not necessarily - have the same length as the egg info's name part. An example:: - - >>> egg_info = 'foo__bar-1.0' - >>> canonical_name = 'foo-bar' - >>> _find_name_version_sep(egg_info, canonical_name) - 8 - """ - # Project name and version must be separated by one single dash. Find all - # occurrences of dashes; if the string in front of it matches the canonical - # name, this is the one separating the name and version parts. - for i, c in enumerate(egg_info): - if c != "-": - continue - if canonicalize_name(egg_info[:i]) == canonical_name: - return i - raise ValueError("{} does not match {}".format(egg_info, canonical_name)) - - -def _egg_info_matches(egg_info, canonical_name): - # type: (str, str) -> Optional[str] - """Pull the version part out of a string. - - :param egg_info: The string to parse. E.g. foo-2.1 - :param canonical_name: The canonicalized name of the package this - belongs to. - """ - try: - version_start = _find_name_version_sep(egg_info, canonical_name) + 1 - except ValueError: - return None - version = egg_info[version_start:] - if not version: - return None - return version - - -def _determine_base_url(document, page_url): - """Determine the HTML document's base URL. - - This looks for a ```` tag in the HTML document. If present, its href - attribute denotes the base URL of anchor tags in the document. If there is - no such tag (or if it does not have a valid href attribute), the HTML - file's URL is used as the base URL. - - :param document: An HTML document representation. The current - implementation expects the result of ``html5lib.parse()``. - :param page_url: The URL of the HTML document. - """ - for base in document.findall(".//base"): - href = base.get("href") - if href is not None: - return href - return page_url - - -def _get_encoding_from_headers(headers): - """Determine if we have any encoding information in our headers. - """ - if headers and "Content-Type" in headers: - content_type, params = cgi.parse_header(headers["Content-Type"]) - if "charset" in params: - return params['charset'] - return None - - -_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) - - -def _clean_link(url): - # type: (str) -> str - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url) - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - def __init__(self, content, url, headers=None): - # type: (bytes, str, MutableMapping[str, str]) -> None - self.content = content - self.url = url - self.headers = headers - - def __str__(self): - return redact_password_from_url(self.url) - - def iter_links(self): - # type: () -> Iterable[Link] - """Yields all links in the page""" - document = html5lib.parse( - self.content, - transport_encoding=_get_encoding_from_headers(self.headers), - namespaceHTMLElements=False, - ) - base_url = _determine_base_url(document, self.url) - for anchor in document.findall(".//a"): - if anchor.get("href"): - href = anchor.get("href") - url = _clean_link(urllib_parse.urljoin(base_url, href)) - pyrequire = anchor.get('data-requires-python') - pyrequire = unescape(pyrequire) if pyrequire else None - yield Link(url, self.url, requires_python=pyrequire) - - -Search = namedtuple('Search', 'supplied canonical formats') -"""Capture key aspects of a search. - -:attribute supplied: The user supplied package. -:attribute canonical: The canonical package name. -:attribute formats: The formats allowed for this package. Should be a set - with 'binary' or 'source' or both in it. -""" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 00000000..7a17b7b3 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/collector.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/collector.py new file mode 100644 index 00000000..6c35fc66 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,692 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_links(). +""" + +import cgi +import functools +import itertools +import logging +import mimetypes +import os +import re +from collections import OrderedDict + +from pip._vendor import html5lib, requests +from pip._vendor.distlib.compat import unescape +from pip._vendor.requests.exceptions import RetryError, SSLError +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url, vcs + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import ( + Callable, Iterable, List, MutableMapping, Optional, + Protocol, Sequence, Tuple, TypeVar, Union, + ) + import xml.etree.ElementTree + + from pip._vendor.requests import Response + + from pip._internal.network.session import PipSession + + HTMLElement = xml.etree.ElementTree.Element + ResponseHeaders = MutableMapping[str, str] + + # Used in the @lru_cache polyfill. + F = TypeVar('F') + + class LruCache(Protocol): + def __call__(self, maxsize=None): + # type: (Optional[int]) -> Callable[[F], F] + raise NotImplementedError + + +logger = logging.getLogger(__name__) + + +# Fallback to noop_lru_cache in Python 2 +# TODO: this can be removed when python 2 support is dropped! +def noop_lru_cache(maxsize=None): + # type: (Optional[int]) -> Callable[[F], F] + def _wrapper(f): + # type: (F) -> F + return f + return _wrapper + + +_lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache + + +def _match_vcs_scheme(url): + # type: (str) -> Optional[str] + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + return scheme + return None + + +def _is_url_like_archive(url): + # type: (str) -> bool + """Return whether the URL looks like an archive. + """ + filename = Link(url).filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + return True + return False + + +class _NotHTML(Exception): + def __init__(self, content_type, request_desc): + # type: (str, str) -> None + super(_NotHTML, self).__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response): + # type: (Response) -> None + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url, session): + # type: (str, PipSession) -> None + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in {'http', 'https'}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_html_header(resp) + + +def _get_html_response(url, session): + # type: (str, PipSession) -> Response + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if _is_url_like_archive(url): + _ensure_html_response(url, session=session) + + logger.debug('Getting page %s', redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers): + # type: (ResponseHeaders) -> Optional[str] + """Determine if we have any encoding information in our headers. + """ + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params['charset'] + return None + + +def _determine_base_url(document, page_url): + # type: (HTMLElement, str) -> str + """Determine the HTML document's base URL. + + This looks for a ```` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_url_path_part(part): + # type: (str) -> str + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib_parse.quote(urllib_parse.unquote(part)) + + +def _clean_file_url_path(part): + # type: (str) -> str + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib_request.pathname2url(urllib_request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE) + + +def _clean_url_path(path, is_local_path): + # type: (str, bool) -> str + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [''])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return ''.join(cleaned_parts) + + +def _clean_link(url): + # type: (str) -> str + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib_parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib_parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor, # type: HTMLElement + page_url, # type: str + base_url, # type: str +): + # type: (...) -> Optional[Link] + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib_parse.urljoin(base_url, href)) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + + yanked_reason = anchor.get('data-yanked') + if yanked_reason: + # This is a unicode string in Python 2 (and 3). + yanked_reason = unescape(yanked_reason) + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +class CacheablePageContent(object): + def __init__(self, page): + # type: (HTMLPage) -> None + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other): + # type: (object) -> bool + return (isinstance(other, type(self)) and + self.page.url == other.page.url) + + def __hash__(self): + # type: () -> int + return hash(self.page.url) + + +def with_cached_html_pages( + fn, # type: Callable[[HTMLPage], Iterable[Link]] +): + # type: (...) -> Callable[[HTMLPage], List[Link]] + """ + Given a function that parses an Iterable[Link] from an HTMLPage, cache the + function's result (keyed by CacheablePageContent), unless the HTMLPage + `page` has `page.cache_link_parsing == False`. + """ + + @_lru_cache(maxsize=None) + def wrapper(cacheable_page): + # type: (CacheablePageContent) -> List[Link] + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page): + # type: (HTMLPage) -> List[Link] + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_html_pages +def parse_links(page): + # type: (HTMLPage) -> Iterable[Link] + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__( + self, + content, # type: bytes + encoding, # type: Optional[str] + url, # type: str + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self): + # type: () -> str + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response, cache_link_parsing=True): + # type: (Response, bool) -> HTMLPage + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage( + response.content, + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing) + + +def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning('Cannot look at %s URL %s because it does not support ' + 'lookup as web pages.', vcs_scheme, link) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib_parse.urlparse(url) + if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.warning( + 'Skipping page %s because it looks like an archive, and cannot ' + 'be checked by a HTTP HEAD request.', link, + ) + except _NotHTML as exc: + logger.warning( + 'Skipping page %s because the %s request got Content-Type: %s.' + 'The only supported Content-Type is text/html', + link, exc.request_desc, exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, "connection error: {}".format(exc)) + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp, + cache_link_parsing=link.cache_link_parsing) + return None + + +def _remove_duplicate_links(links): + # type: (Iterable[Link]) -> List[Link] + """ + Return a list of links, with duplicates removed and ordering preserved. + """ + # We preserve the ordering when removing duplicates because we can. + return list(OrderedDict.fromkeys(links)) + + +def group_locations(locations, expand_dir=False): + # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] + """ + Divide a list of locations into two groups: "files" (archives) and "urls." + + :return: A pair of lists (files, urls). + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + # type: (str) -> None + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + else: + logger.warning( + "Path '%s' is ignored: it is a directory.", path, + ) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url, + ) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url, + ) + + return files, urls + + +class CollectedLinks(object): + + """ + Encapsulates the return value of a call to LinkCollector.collect_links(). + + The return value includes both URLs to project pages containing package + links, as well as individual package Link objects collected from other + sources. + + This info is stored separately as: + + (1) links from the configured file locations, + (2) links from the configured find_links, and + (3) urls to HTML project pages, as described by the PEP 503 simple + repository API. + """ + + def __init__( + self, + files, # type: List[Link] + find_links, # type: List[Link] + project_urls, # type: List[Link] + ): + # type: (...) -> None + """ + :param files: Links from file locations. + :param find_links: Links from find_links. + :param project_urls: URLs to HTML project pages, as described by + the PEP 503 simple repository API. + """ + self.files = files + self.find_links = find_links + self.project_urls = project_urls + + +class LinkCollector(object): + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_links() method. + """ + + def __init__( + self, + session, # type: PipSession + search_scope, # type: SearchScope + ): + # type: (...) -> None + self.search_scope = search_scope + self.session = session + + @classmethod + def create(cls, session, options, suppress_no_index=False): + # type: (PipSession, Values, bool) -> LinkCollector + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + 'Ignoring indexes: %s', + ','.join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, index_urls=index_urls, + ) + link_collector = LinkCollector( + session=session, search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self): + # type: () -> List[str] + return self.search_scope.find_links + + def fetch_page(self, location): + # type: (Link) -> Optional[HTMLPage] + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_links(self, project_name): + # type: (str) -> CollectedLinks + """Find all available links for the given project name. + + :return: All the Link objects (unfiltered), as a CollectedLinks object. + """ + search_scope = self.search_scope + index_locations = search_scope.get_index_urls_locations(project_name) + index_file_loc, index_url_loc = group_locations(index_locations) + fl_file_loc, fl_url_loc = group_locations( + self.find_links, expand_dir=True, + ) + + file_links = [ + Link(url) for url in itertools.chain(index_file_loc, fl_file_loc) + ] + + # We trust every directly linked archive in find_links + find_link_links = [Link(url, '-f') for url in self.find_links] + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links. + # We want to filter out anything that does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + # Mark PyPI indices as "cache_link_parsing == False" -- this + # will avoid caching the result of parsing the page for links. + (Link(url, cache_link_parsing=False) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + ) + if self.session.is_secure_origin(link) + ] + + url_locations = _remove_duplicate_links(url_locations) + lines = [ + '{} location(s) to search for versions of {}:'.format( + len(url_locations), project_name, + ), + ] + for link in url_locations: + lines.append('* {}'.format(link)) + logger.debug('\n'.join(lines)) + + return CollectedLinks( + files=file_links, + find_links=find_link_links, + project_urls=url_locations, + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/package_finder.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 00000000..84115783 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1014 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import re + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union, + ) + + from pip._vendor.packaging.tags import Tag + from pip._vendor.packaging.version import _BaseVersion + + from pip._internal.index.collector import LinkCollector + from pip._internal.models.search_scope import SearchScope + from pip._internal.req import InstallRequirement + from pip._internal.utils.hashes import Hashes + + BuildTag = Union[Tuple[()], Tuple[int, str]] + CandidateSortingKey = ( + Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] + ) + + +__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] + + +logger = logging.getLogger(__name__) + + +def _check_link_requires_python( + link, # type: Link + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> bool + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, link, + ) + else: + if not is_compatible: + version = '.'.join(map(str, version_info)) + if not ignore_requires_python: + logger.debug( + 'Link requires a different Python (%s not in: %r): %s', + version, link.requires_python, link, + ) + return False + + logger.debug( + 'Ignoring failed Requires-Python check (%s not in: %r) ' + 'for link: %s', + version, link.requires_python, link, + ) + + return True + + +class LinkEvaluator(object): + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name, # type: str + canonical_name, # type: str + formats, # type: FrozenSet[str] + target_python, # type: TargetPython + allow_yanked, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link): + # type: (Link) -> Tuple[bool, Optional[Text]] + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '' + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + return (False, u'yanked for reason: {}'.format(reason)) + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: + return (False, 'unsupported archive format: {}'.format(ext)) + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = 'No binaries permitted for {}'.format( + self.project_name) + return (False, reason) + if "macosx10" in link.path and ext == '.zip': + return (False, 'macosx10 one') + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, 'invalid wheel filename') + if canonicalize_name(wheel.name) != self._canonical_name: + reason = 'wrong project name (not {})'.format( + self.project_name) + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags match: {}".format( + ', '.join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = 'No sources permitted for {}'.format(self.project_name) + return (False, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, self._canonical_name, + ) + if not version: + reason = 'Missing project version for {}'.format(self.project_name) + return (False, reason) + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, 'Python version is incorrect') + + supports_python = _check_link_requires_python( + link, version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug('Found link %s, version: %s', link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates, # type: List[InstallationCandidate] + hashes, # type: Hashes + project_name, # type: str +): + # type: (...) -> List[InstallationCandidate] + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + 'Given no hashes to check %s links for project %r: ' + 'discarding no candidates', + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = 'discarding no candidates' + else: + discard_message = 'discarding {} non-matches:\n {}'.format( + len(non_matches), + '\n '.join(str(candidate.link) for candidate in non_matches) + ) + + logger.debug( + 'Checked %s links for project %r against %s hashes ' + '(%s matches, %s no digest): %s', + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message + ) + + return filtered + + +class CandidatePreferences(object): + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + ): + # type: (...) -> None + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult(object): + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates, # type: List[InstallationCandidate] + applicable_candidates, # type: List[InstallationCandidate] + best_candidate, # type: Optional[InstallationCandidate] + ): + # type: (...) -> None + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through all candidates. + """ + return iter(self._candidates) + + def iter_applicable(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through the applicable candidates. + """ + return iter(self._applicable_candidates) + + +class CandidateEvaluator(object): + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name, # type: str + target_python=None, # type: Optional[TargetPython] + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name, # type: str + supported_tags, # type: List[Tag] + specifier, # type: specifiers.BaseSpecifier + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> None + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + + def get_applicable_candidates( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> List[InstallationCandidate] + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [ + c for c in candidates if str(c.version) in versions + ] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate): + # type: (InstallationCandidate) -> CandidateSortingKey + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag = () # type: BuildTag + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + if not wheel.supported(valid_tags): + raise UnsupportedWheel( + "{} is not a supported wheel for this platform. It " + "can't be sorted.".format(wheel.filename) + ) + if self._prefer_binary: + binary_preference = 1 + pri = -(wheel.support_index_min(valid_tags)) + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, yank_value, binary_preference, candidate.version, + build_tag, pri, + ) + + def sort_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> Optional[InstallationCandidate] + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> BestCandidateResult + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector, # type: LinkCollector + target_python, # type: TargetPython + allow_yanked, # type: bool + format_control=None, # type: Optional[FormatControl] + candidate_prefs=None, # type: CandidatePreferences + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links = set() # type: Set[Link] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector, # type: LinkCollector + selection_prefs, # type: SelectionPreferences + target_python=None, # type: Optional[TargetPython] + ): + # type: (...) -> PackageFinder + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self): + # type: () -> TargetPython + return self._target_python + + @property + def search_scope(self): + # type: () -> SearchScope + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope): + # type: (SearchScope) -> None + self._link_collector.search_scope = search_scope + + @property + def find_links(self): + # type: () -> List[str] + return self._link_collector.find_links + + @property + def index_urls(self): + # type: () -> List[str] + return self.search_scope.index_urls + + @property + def trusted_hosts(self): + # type: () -> Iterable[str] + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self): + # type: () -> bool + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self): + # type: () -> None + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self): + # type: () -> bool + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self): + # type: () -> None + self._candidate_prefs.prefer_binary = True + + def make_link_evaluator(self, project_name): + # type: (str) -> LinkEvaluator + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() # type: Set[Link] + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link, reason): + # type: (Link, Text) -> None + if link not in self._logged_links: + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + # Also, put the link at the end so the reason is more visible + # and because the link string is usually very long. + logger.debug(u'Skipping link: %s: %s', reason, link) + self._logged_links.add(link) + + def get_install_candidate(self, link_evaluator, link): + # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + # Convert the Text result to str since InstallationCandidate + # accepts str. + version=str(result), + ) + + def evaluate_links(self, link_evaluator, links): + # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url(self, project_url, link_evaluator): + # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + logger.debug( + 'Fetching project page and analyzing links: %s', project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + def find_all_candidates(self, project_name): + # type: (str) -> List[InstallationCandidate] + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + collected_links = self._link_collector.collect_links(project_name) + + link_evaluator = self.make_link_evaluator(project_name) + + find_links_versions = self.evaluate_links( + link_evaluator, + links=collected_links.find_links, + ) + + page_versions = [] + for project_url in collected_links.project_urls: + package_links = self.process_project_url( + project_url, link_evaluator=link_evaluator, + ) + page_versions.extend(package_links) + + file_versions = self.evaluate_links( + link_evaluator, + links=collected_links.files, + ) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.link.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return file_versions + find_links_versions + page_versions + + def make_candidate_evaluator( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object to use. + """ + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + def find_best_candidate( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> BestCandidateResult + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[InstallationCandidate] + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, specifier=req.specifier, hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version = None # type: Optional[_BaseVersion] + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter): + # type: (Iterable[InstallationCandidate]) -> str + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ", ".join(sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + )) or "none" + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + 'No matching distribution found for {}'.format( + req) + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + +def _find_name_version_sep(fragment, canonical_name): + # type: (str, str) -> int + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError("{} does not match {}".format(fragment, canonical_name)) + + +def _extract_version_from_fragment(fragment, canonical_name): + # type: (str, str) -> Optional[str] + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/locations.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/locations.py index c6e2a3e4..0c123548 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/locations.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/locations.py @@ -1,4 +1,8 @@ """Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import import os @@ -9,84 +13,51 @@ import sys import sysconfig from distutils import sysconfig as distutils_sysconfig from distutils.command.install import SCHEME_KEYS # type: ignore +from distutils.command.install import install as distutils_install_command +from pip._internal.models.scheme import Scheme from pip._internal.utils import appdirs -from pip._internal.utils.compat import WINDOWS, expanduser -from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import running_under_virtualenv if MYPY_CHECK_RUNNING: - from typing import Any, Union, Dict, List, Optional # noqa: F401 + from typing import Dict, List, Optional, Union + + from distutils.cmd import Command as DistutilsCommand # Application Directories USER_CACHE_DIR = appdirs.user_cache_dir("pip") -DELETE_MARKER_MESSAGE = '''\ -This file is placed here by pip to indicate the source was put -here by pip. - -Once this package is successfully installed this source code will be -deleted (unless you remove this file). -''' -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' - - -def write_delete_marker_file(directory): - # type: (str) -> None +def get_major_minor_version(): + # type: () -> str """ - Write the pip delete marker file into this directory. - """ - filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) - with open(filepath, 'w') as marker_fp: - marker_fp.write(DELETE_MARKER_MESSAGE) - - -def running_under_virtualenv(): - # type: () -> bool - """ - Return True if we're running inside a virtualenv, False otherwise. - + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". """ - if hasattr(sys, 'real_prefix'): - return True - elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): - return True + return '{}.{}'.format(*sys.version_info) - return False - -def virtualenv_no_global(): - # type: () -> bool - """ - Return True if in a venv and no system site packages. - """ - # this mirrors the logic in virtualenv.py for locating the - # no-global-site-packages.txt file - site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') - if running_under_virtualenv() and os.path.isfile(no_global_file): - return True +def get_src_prefix(): + # type: () -> str + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') else: - return False - + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) -if running_under_virtualenv(): - src_prefix = os.path.join(sys.prefix, 'src') -else: - # FIXME: keep src in cwd for now (it is not a temporary folder) - try: - src_prefix = os.path.join(os.getcwd(), 'src') - except OSError: - # In case the current working directory has been renamed or deleted - sys.exit( - "The folder you are executing pip from can no longer be found." - ) + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) -# under macOS + virtualenv sys.prefix is not properly resolved -# it is something like /path/to/python/bin/.. -# Note: using realpath due to tmp dirs on OSX being symlinks -src_prefix = os.path.abspath(src_prefix) # FIXME doesn't account for venv linked to global site-packages @@ -103,7 +74,7 @@ try: user_site = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE -user_dir = expanduser('~') + if WINDOWS: bin_py = os.path.join(sys.prefix, 'Scripts') bin_user = os.path.join(user_site, 'Scripts') @@ -111,73 +82,49 @@ if WINDOWS: if not os.path.exists(bin_py): bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') - - config_basename = 'pip.ini' - - legacy_storage_dir = os.path.join(user_dir, 'pip') - legacy_config_file = os.path.join( - legacy_storage_dir, - config_basename, - ) else: bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') - config_basename = 'pip.conf' - - legacy_storage_dir = os.path.join(user_dir, '.pip') - legacy_config_file = os.path.join( - legacy_storage_dir, - config_basename, - ) # Forcing to use /usr/local/bin for standard macOS framework installs # Also log to ~/Library/Logs/ for use with the Console.app log viewer if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': bin_py = '/usr/local/bin' -site_config_files = [ - os.path.join(path, config_basename) - for path in appdirs.site_config_dirs('pip') -] - -venv_config_file = os.path.join(sys.prefix, config_basename) -new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename) - -def distutils_scheme(dist_name, user=False, home=None, root=None, - isolated=False, prefix=None): - # type:(str, bool, str, str, bool, str) -> dict +def distutils_scheme( + dist_name, user=False, home=None, root=None, isolated=False, prefix=None +): + # type:(str, bool, str, str, bool, str) -> Dict[str, str] """ Return a distutils install scheme """ from distutils.dist import Distribution - scheme = {} - - if isolated: - extra_dist_args = {"script_args": ["--no-user-cfg"]} - else: - extra_dist_args = {} dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] - dist_args.update(extra_dist_args) + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) - # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() - # NOTE: Ignoring type since mypy can't find attributes on 'Command' - i = d.get_command_obj('install', create=True) # type: Any - assert i is not None + obj = None # type: Optional[DistutilsCommand] + obj = d.get_command_obj('install', create=True) + assert obj is not None + i = cast(distutils_install_command, obj) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), "user={} prefix={}".format(user, prefix) + assert not (home and prefix), "home={} prefix={}".format(home, prefix) i.user = user or i.user - if user: + if user or home: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() + + scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) @@ -186,17 +133,15 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config - - # Ignoring, typeshed issue reported python/typeshed/issues/2567 - if 'install_lib' in d.get_option_dict('install'): # type: ignore + if 'install_lib' in d.get_option_dict('install'): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( - sys.prefix, + i.prefix, 'include', 'site', - 'python' + sys.version[:3], + 'python{}'.format(get_major_minor_version()), dist_name, ) @@ -209,3 +154,41 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, ) return scheme + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme( + dist_name, user, home, root, isolated, prefix + ) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/main.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/main.py new file mode 100644 index 00000000..3208d5b8 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/main.py @@ -0,0 +1,16 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/candidate.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/candidate.py index 4475458a..9149e0fc 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/candidate.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/candidate.py @@ -4,28 +4,35 @@ from pip._internal.utils.models import KeyBasedCompareMixin from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from pip._vendor.packaging.version import _BaseVersion # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 - from typing import Any, Union # noqa: F401 + from pip._vendor.packaging.version import _BaseVersion + from pip._internal.models.link import Link class InstallationCandidate(KeyBasedCompareMixin): """Represents a potential "candidate" for installation. """ - def __init__(self, project, version, location): - # type: (Any, str, Link) -> None - self.project = project + __slots__ = ["name", "version", "link"] + + def __init__(self, name, version, link): + # type: (str, str, Link) -> None + self.name = name self.version = parse_version(version) # type: _BaseVersion - self.location = location + self.link = link super(InstallationCandidate, self).__init__( - key=(self.project, self.version, self.location), + key=(self.name, self.version, self.link), defining_class=InstallationCandidate ) def __repr__(self): # type: () -> str return "".format( - self.project, self.version, self.location, + self.name, self.version, self.link, + ) + + def __str__(self): + # type: () -> str + return '{!r} candidate (version {} at {})'.format( + self.name, self.version, self.link, ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/direct_url.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 00000000..87bd9fe4 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,245 @@ +""" PEP 610 """ +import json +import re + +from pip._vendor import six +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Iterable, Optional, Type, TypeVar, Union + ) + + T = TypeVar("T") + + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + + +class DirectUrlValidationError(Exception): + pass + + +def _get(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T] + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if six.PY2 and expected_type is str: + expected_type = six.string_types # type: ignore + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + "{!r} has unexpected type for {} (expected {})".format( + value, key, expected_type + ) + ) + return value + + +def _get_required(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> T + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError("{} must have a value".format(key)) + return value + + +def _exactly_one_of(infos): + # type: (Iterable[Optional[InfoType]]) -> InfoType + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs): + # type: (Any) -> Dict[str, Any] + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo(object): + name = "vcs_info" + + def __init__( + self, + vcs, # type: str + commit_id, # type: str + requested_revision=None, # type: Optional[str] + resolved_revision=None, # type: Optional[str] + resolved_revision_type=None, # type: Optional[str] + ): + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + self.resolved_revision = resolved_revision + self.resolved_revision_type = resolved_revision_type + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo] + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + resolved_revision=_get(d, str, "resolved_revision"), + resolved_revision_type=_get(d, str, "resolved_revision_type"), + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + resolved_revision=self.resolved_revision, + resolved_revision_type=self.resolved_revision_type, + ) + + +class ArchiveInfo(object): + name = "archive_info" + + def __init__( + self, + hash=None, # type: Optional[str] + ): + self.hash = hash + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo] + if d is None: + return None + return cls(hash=_get(d, str, "hash")) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(hash=self.hash) + + +class DirInfo(object): + name = "dir_info" + + def __init__( + self, + editable=False, # type: bool + ): + self.editable = editable + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[DirInfo] + if d is None: + return None + return cls( + editable=_get_required(d, bool, "editable", default=False) + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(editable=self.editable or None) + + +if MYPY_CHECK_RUNNING: + InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl(object): + + def __init__( + self, + url, # type: str + info, # type: InfoType + subdirectory=None, # type: Optional[str] + ): + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc): + # type: (str) -> str + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) and + self.info.vcs == "git" and + user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self): + # type: () -> str + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib_parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib_parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self): + # type: () -> None + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d): + # type: (Dict[str, Any]) -> DirectUrl + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self): + # type: () -> Dict[str, Any] + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s): + # type: (str) -> DirectUrl + return cls.from_dict(json.loads(s)) + + def to_json(self): + # type: () -> str + return json.dumps(self.to_dict(), sort_keys=True) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/format_control.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/format_control.py index 971a3914..c6275e72 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/format_control.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/format_control.py @@ -1,17 +1,20 @@ from pip._vendor.packaging.utils import canonicalize_name +from pip._internal.exceptions import CommandError from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, Set, FrozenSet # noqa: F401 + from typing import Optional, Set, FrozenSet class FormatControl(object): """Helper for managing formats from which a package can be installed. """ + __slots__ = ["no_binary", "only_binary"] + def __init__(self, no_binary=None, only_binary=None): - # type: (Optional[Set], Optional[Set]) -> None + # type: (Optional[Set[str]], Optional[Set[str]]) -> None if no_binary is None: no_binary = set() if only_binary is None: @@ -21,12 +24,24 @@ class FormatControl(object): self.only_binary = only_binary def __eq__(self, other): - return self.__dict__ == other.__dict__ + # type: (object) -> bool + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all( + getattr(self, k) == getattr(other, k) + for k in self.__slots__ + ) def __ne__(self, other): + # type: (object) -> bool return not self.__eq__(other) def __repr__(self): + # type: () -> str return "{}({}, {})".format( self.__class__.__name__, self.no_binary, @@ -35,7 +50,11 @@ class FormatControl(object): @staticmethod def handle_mutual_excludes(value, target, other): - # type: (str, Optional[Set], Optional[Set]) -> None + # type: (str, Set[str], Set[str]) -> None + if value.startswith('-'): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) new = value.split(',') while ':all:' in new: other.clear() @@ -54,7 +73,7 @@ class FormatControl(object): target.add(name) def get_allowed_formats(self, canonical_name): - # type: (str) -> FrozenSet + # type: (str) -> FrozenSet[str] result = {"binary", "source"} if canonical_name in self.only_binary: result.discard('source') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/index.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/index.py index ead1efbd..5b4a1fe2 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/index.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/index.py @@ -5,6 +5,9 @@ class PackageIndex(object): """Represents a Package Index and provides easier access to endpoints """ + __slots__ = ['url', 'netloc', 'simple_url', 'pypi_url', + 'file_storage_domain'] + def __init__(self, url, file_storage_domain): # type: (str, str) -> None super(PackageIndex, self).__init__() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/link.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/link.py index ad2f93e1..c0d278ad 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/link.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/link.py @@ -1,87 +1,144 @@ +import os import posixpath import re from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._internal.download import path_to_url +from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.misc import ( - WHEEL_EXTENSION, redact_password_from_url, splitext, + redact_auth_from_url, + split_auth_from_netloc, + splitext, ) from pip._internal.utils.models import KeyBasedCompareMixin from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple, Union, Text # noqa: F401 - from pip._internal.index import HTMLPage # noqa: F401 + from typing import Optional, Text, Tuple, Union + from pip._internal.index.collector import HTMLPage + from pip._internal.utils.hashes import Hashes class Link(KeyBasedCompareMixin): """Represents a parsed link from a Package Index's simple URL """ - def __init__(self, url, comes_from=None, requires_python=None): - # type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None + __slots__ = [ + "_parsed_url", + "_url", + "comes_from", + "requires_python", + "yanked_reason", + "cache_link_parsing", + ] + + def __init__( + self, + url, # type: str + comes_from=None, # type: Optional[Union[str, HTMLPage]] + requires_python=None, # type: Optional[str] + yanked_reason=None, # type: Optional[Text] + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None """ - url: - url of the resource pointed to (href of the link) - comes_from: - instance of HTMLPage where the link was found, or string. - requires_python: - String containing the `Requires-Python` metadata field, specified - in PEP 345. This may be specified by a data-requires-python - attribute in the HTML link tag, as described in PEP 503. + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link + should be cached. PyPI index urls should + generally have this set to False, for + example. """ # url can be a UNC windows share if url.startswith('\\\\'): url = path_to_url(url) - self.url = url + self._parsed_url = urllib_parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + self.comes_from = comes_from self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super(Link, self).__init__(key=url, defining_class=Link) - super(Link, self).__init__( - key=(self.url), - defining_class=Link - ) + self.cache_link_parsing = cache_link_parsing def __str__(self): + # type: () -> str if self.requires_python: - rp = ' (requires-python:%s)' % self.requires_python + rp = ' (requires-python:{})'.format(self.requires_python) else: rp = '' if self.comes_from: - return '%s (from %s)%s' % (redact_password_from_url(self.url), - self.comes_from, rp) + return '{} (from {}){}'.format( + redact_auth_from_url(self._url), self.comes_from, rp) else: - return redact_password_from_url(str(self.url)) + return redact_auth_from_url(str(self._url)) def __repr__(self): - return '' % self + # type: () -> str + return ''.format(self) + + @property + def url(self): + # type: () -> str + return self._url @property def filename(self): # type: () -> str - _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) - name = posixpath.basename(path.rstrip('/')) or netloc + path = self.path.rstrip('/') + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + name = urllib_parse.unquote(name) - assert name, ('URL %r produced no filename' % self.url) + assert name, ( + 'URL {self._url!r} produced no filename'.format(**locals())) return name + @property + def file_path(self): + # type: () -> str + return url_to_path(self.url) + @property def scheme(self): # type: () -> str - return urllib_parse.urlsplit(self.url)[0] + return self._parsed_url.scheme @property def netloc(self): # type: () -> str - return urllib_parse.urlsplit(self.url)[1] + """ + This can contain auth information. + """ + return self._parsed_url.netloc @property def path(self): # type: () -> str - return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) + return urllib_parse.unquote(self._parsed_url.path) def splitext(self): # type: () -> Tuple[str, str] @@ -95,7 +152,7 @@ class Link(KeyBasedCompareMixin): @property def url_without_fragment(self): # type: () -> str - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) + scheme, netloc, path, query, fragment = self._parsed_url return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') @@ -103,7 +160,7 @@ class Link(KeyBasedCompareMixin): @property def egg_fragment(self): # type: () -> Optional[str] - match = self._egg_fragment_re.search(self.url) + match = self._egg_fragment_re.search(self._url) if not match: return None return match.group(1) @@ -113,7 +170,7 @@ class Link(KeyBasedCompareMixin): @property def subdirectory_fragment(self): # type: () -> Optional[str] - match = self._subdirectory_fragment_re.search(self.url) + match = self._subdirectory_fragment_re.search(self._url) if not match: return None return match.group(1) @@ -125,7 +182,7 @@ class Link(KeyBasedCompareMixin): @property def hash(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) + match = self._hash_re.search(self._url) if match: return match.group(2) return None @@ -133,15 +190,24 @@ class Link(KeyBasedCompareMixin): @property def hash_name(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) + match = self._hash_re.search(self._url) if match: return match.group(1) return None @property def show_url(self): - # type: () -> Optional[str] - return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + # type: () -> str + return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_file(self): + # type: () -> bool + return self.scheme == 'file' + + def is_existing_dir(self): + # type: () -> bool + return self.is_file and os.path.isdir(self.file_path) @property def is_wheel(self): @@ -149,15 +215,31 @@ class Link(KeyBasedCompareMixin): return self.ext == WHEEL_EXTENSION @property - def is_artifact(self): + def is_vcs(self): # type: () -> bool - """ - Determines if this points to an actual artifact (e.g. a tarball) or if - it points to an "abstract" thing like a path or a VCS location. - """ from pip._internal.vcs import vcs - if self.scheme in vcs.all_schemes: + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self): + # type: () -> bool + return self.yanked_reason is not None + + @property + def has_hash(self): + # type: () -> bool + return self.hash_name is not None + + def is_hash_allowed(self, hashes): + # type: (Optional[Hashes]) -> bool + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None - return True + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/scheme.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 00000000..5040551e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data'] + + +class Scheme(object): + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib, # type: str + purelib, # type: str + headers, # type: str + scripts, # type: str + data, # type: str + ): + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/search_scope.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 00000000..d732504e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,135 @@ +import itertools +import logging +import os +import posixpath + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +logger = logging.getLogger(__name__) + + +class SearchScope(object): + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls"] + + @classmethod + def create( + cls, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> SearchScope + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links = [] # type: List[str] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == 'https': + logger.warning( + 'pip is configured with locations that require ' + 'TLS/SSL, however the ssl module in Python is not ' + 'available.' + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> None + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self): + # type: () -> str + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib_parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, ' + 'please provide a scheme.', redacted_index_url) + + redacted_index_urls.append(redacted_index_url) + + lines.append('Looking in indexes: {}'.format( + ', '.join(redacted_index_urls))) + + if self.find_links: + lines.append( + 'Looking in links: {}'.format(', '.join( + redact_auth_from_url(url) for url in self.find_links)) + ) + return '\n'.join(lines) + + def get_index_urls_locations(self, project_name): + # type: (str) -> List[str] + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + # type: (str) -> str + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/selection_prefs.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 00000000..5db3ca91 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,49 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences(object): + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control', + 'prefer_binary', 'ignore_requires_python'] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked, # type: bool + allow_all_prereleases=False, # type: bool + format_control=None, # type: Optional[FormatControl] + prefer_binary=False, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/target_python.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 00000000..6d1ca796 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,120 @@ +import sys + +from pip._internal.utils.compatibility_tags import ( + get_supported, + version_info_to_nodot, +) +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import Tag + + +class TargetPython(object): + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abi", + "implementation", + "platform", + "py_version", + "py_version_info", + "_valid_tags", + ] + + def __init__( + self, + platform=None, # type: Optional[str] + py_version_info=None, # type: Optional[Tuple[int, ...]] + abi=None, # type: Optional[str] + implementation=None, # type: Optional[str] + ): + # type: (...) -> None + """ + :param platform: A string or None. If None, searches for packages + that are supported by the current system. Otherwise, will find + packages that can be built on the platform passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abi: A string or None. This is passed to compatibility_tags.py's + get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = '.'.join(map(str, py_version_info[:2])) + + self.abi = abi + self.implementation = implementation + self.platform = platform + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags = None # type: Optional[List[Tag]] + + def format_given(self): + # type: () -> str + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = '.'.join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ('platform', self.platform), + ('version_info', display_version), + ('abi', self.abi), + ('implementation', self.implementation), + ] + return ' '.join( + '{}={!r}'.format(key, value) for key, value in key_values + if value is not None + ) + + def get_tags(self): + # type: () -> List[Tag] + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platform=self.platform, + abi=self.abi, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 00000000..4d4068f3 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,78 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +class Wheel(object): + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P.+?)-(?P.*?)) + ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + # type: (str) -> None + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "{} is not a valid wheel filename.".format(filename) + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self): + # type: () -> List[str] + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags): + # type: (List[Tag]) -> int + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def supported(self, tags): + # type: (List[Tag]) -> bool + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 00000000..b51bde91 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/auth.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/auth.py new file mode 100644 index 00000000..ca729fcd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,308 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +import logging + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.utils import get_netrc_auth +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Optional, Tuple, List, Any + + from pip._internal.vcs.versioncontrol import AuthInfo + + from pip._vendor.requests.models import Response, Request + + Credentials = Tuple[str, str, str] + +logger = logging.getLogger(__name__) + +try: + import keyring # noqa +except ImportError: + keyring = None +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + keyring = None + + +def get_keyring_auth(url, username): + # type: (str, str) -> Optional[AuthInfo] + """Return the tuple auth for a given url from keyring.""" + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + return None + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True, index_urls=None): + # type: (bool, Optional[List[str]]) -> None + self.prompting = prompting + self.index_urls = index_urls + self.passwords = {} # type: Dict[str, AuthInfo] + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save = None # type: Optional[Credentials] + + def _get_index_url(self, url): + # type: (str) -> Optional[str] + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + return None + + def _get_new_credentials(self, original_url, allow_netrc=True, + allow_keyring=True): + # type: (str, bool, bool) -> AuthInfo + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials(self, original_url): + # type: (str) -> Tuple[str, Optional[str], Optional[str]] + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + if username is None and password is None: + # No stored credentials. Acquire new credentials without prompting + # the user. (e.g. from netrc, keyring, or the URL itself) + username, password = self._get_new_credentials(original_url) + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) or + # Credentials were not found + (username is None and password is None) + ), "Could not load credentials from url: {}".format(original_url) + + return url, username, password + + def __call__(self, req): + # type: (Request) -> Request + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc): + # type: (str) -> Tuple[Optional[str], Optional[str], bool] + username = ask_input("User for {}: ".format(netloc)) + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self): + # type: () -> bool + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp, **kwargs): + # type: (Response, **Any) -> Response + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + '401 Error, Credentials not correct for %s', resp.request.url, + ) + + def save_credentials(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info('Saving credentials to keyring') + keyring.set_password(*creds) + except Exception: + logger.exception('Failed to save credentials') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/cache.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/cache.py new file mode 100644 index 00000000..a0d55b5e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,79 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Iterator + + +def is_from_cache(response): + # type: (Response) -> bool + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors(): + # type: () -> Iterator[None] + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except (OSError, IOError): + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory): + # type: (str) -> None + assert directory is not None, "Cache directory must not be None." + super(SafeFileCache, self).__init__() + self.directory = directory + + def _get_cache_path(self, name): + # type: (str) -> str + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + # type: (str) -> Optional[bytes] + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, 'rb') as f: + return f.read() + + def set(self, key, value): + # type: (str, bytes) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key): + # type: (str) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/download.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/download.py new file mode 100644 index 00000000..44f9985a --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/download.py @@ -0,0 +1,182 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE + +from pip._internal.cli.progress_bars import DownloadProgressProvider +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.network.cache import is_from_cache +from pip._internal.network.utils import ( + HEADERS, + raise_for_status, + response_chunks, +) +from pip._internal.utils.misc import ( + format_size, + redact_auth_from_url, + splitext, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterable, Optional + + from pip._vendor.requests.models import Response + + from pip._internal.models.link import Link + from pip._internal.network.session import PipSession + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp): + # type: (Response) -> Optional[int] + try: + return int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp, # type: Response + link, # type: Link + progress_bar # type: str +): + # type: (...) -> Iterable[bytes] + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider( + progress_bar, max=total_length + )(chunks) + + +def sanitize_content_filename(filename): + # type: (str) -> str + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition, default_filename): + # type: (str, str) -> str + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get('filename') + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp, link): + # type: (Response, Link) -> str + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext = splitext(filename)[1] # type: Optional[str] + if not ext: + ext = mimetypes.guess_extension( + resp.headers.get('content-type', '') + ) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session, link): + # type: (PipSession, Link) -> Response + target_url = link.url.split('#', 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Download(object): + def __init__( + self, + response, # type: Response + filename, # type: str + chunks, # type: Iterable[bytes] + ): + # type: (...) -> None + self.response = response + self.filename = filename + self.chunks = chunks + + +class Downloader(object): + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link): + # type: (Link) -> Download + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + return Download( + resp, + _get_http_response_filename(resp, link), + _prepare_download(resp, link, self._progress_bar), + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/lazy_wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 00000000..c2371bf5 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,231 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url'] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from zipfile import BadZipfile, ZipFile + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE +from pip._vendor.six.moves import range + +from pip._internal.network.utils import ( + HEADERS, + raise_for_status, + response_chunks, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, Iterator, List, Optional, Tuple + + from pip._vendor.pkg_resources import Distribution + from pip._vendor.requests.models import Response + + from pip._internal.network.session import PipSession + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name, url, session): + # type: (str, str, PipSession) -> Distribution + """Return a pkg_resources.Distribution from the given wheel URL. + + This uses HTTP range requests to only fetch the potion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as wheel: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + zip_file = ZipFile(wheel) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name) + + +class LazyZipOverHTTP(object): + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE): + # type: (str, PipSession, int) -> None + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers['Content-Length']) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left = [] # type: List[int] + self._right = [] # type: List[int] + if 'bytes' not in head.headers.get('Accept-Ranges', 'none'): + raise HTTPRangeRequestUnsupported('range request is not supported') + self._check_zip() + + @property + def mode(self): + # type: () -> str + """Opening mode, which is always rb.""" + return 'rb' + + @property + def name(self): + # type: () -> str + """Path to the underlying file.""" + return self._file.name + + def seekable(self): + # type: () -> bool + """Return whether random access is supported, which is True.""" + return True + + def close(self): + # type: () -> None + """Close the file.""" + self._file.close() + + @property + def closed(self): + # type: () -> bool + """Whether the file is closed.""" + return self._file.closed + + def read(self, size=-1): + # type: (int) -> bytes + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + start, length = self.tell(), self._length + stop = start + size if 0 <= size <= length-start else length + self._download(start, stop-1) + return self._file.read(size) + + def readable(self): + # type: () -> bool + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset, whence=0): + # type: (int, int) -> int + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self): + # type: () -> int + """Return the current possition.""" + return self._file.tell() + + def truncate(self, size=None): + # type: (Optional[int]) -> int + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self): + # type: () -> bool + """Return False.""" + return False + + def __enter__(self): + # type: () -> LazyZipOverHTTP + self._file.__enter__() + return self + + def __exit__(self, *exc): + # type: (*Any) -> Optional[bool] + return self._file.__exit__(*exc) + + @contextmanager + def _stay(self): + # type: ()-> Iterator[None] + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self): + # type: () -> None + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipfile: + pass + else: + break + + def _stream_response(self, start, end, base_headers=HEADERS): + # type: (int, int, Dict[str, str]) -> Response + """Return HTTP response to a range request from start to end.""" + headers = {'Range': 'bytes={}-{}'.format(start, end)} + headers.update(base_headers) + return self._session.get(self._url, headers=headers, stream=True) + + def _merge(self, start, end, left, right): + # type: (int, int, int, int) -> Iterator[Tuple[int, int]] + """Return an iterator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start]+lslice[:1]) + end = max([end]+rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j-1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start, end): + # type: (int, int) -> None + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/session.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/session.py new file mode 100644 index 00000000..39a4a546 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/session.py @@ -0,0 +1,421 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import email.utils +import json +import logging +import mimetypes +import os +import platform +import sys +import warnings + +from pip._vendor import requests, six, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls, ipaddress +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import ( + build_url_from_netloc, + get_installed_version, + parse_netloc, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + Iterator, List, Optional, Tuple, Union, + ) + + from pip._internal.models.link import Link + + SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +logger = logging.getLogger(__name__) + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] # type: List[SecureOrigin] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + 'BUILD_BUILDID', + # Jenkins + 'BUILD_ID', + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + 'CI', + # Explicit environment variable. + 'PIP_IS_CI', +) + + +def looks_like_ci(): + # type: () -> bool + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + distro_infos = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], distro.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + )) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_version = get_installed_version("setuptools") + if setuptools_version is not None: + data["setuptools_version"] = setuptools_version + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + super(InsecureHTTPAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + + def cert_verify(self, conn, url, verify, cert): + super(InsecureCacheControlAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) + + +class PipSession(requests.Session): + + timeout = None # type: Optional[int] + + def __init__(self, *args, **kwargs): + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str] + index_urls = kwargs.pop("index_urls", None) + + super(PipSession, self).__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def add_trusted_host(self, host, source=None, suppress_logging=False): + # type: (str, Optional[str], bool) -> None + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = 'adding trusted host: {!r}'.format(host) + if source is not None: + msg += ' (from {})'.format(source) + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount( + build_url_from_netloc(host) + '/', + self._trusted_host_adapter + ) + if not host_port[1]: + # Mount wildcard ports for the same host. + self.mount( + build_url_from_netloc(host) + ':', + self._trusted_host_adapter + ) + + def iter_secure_origins(self): + # type: () -> Iterator[SecureOrigin] + for secure_origin in SECURE_ORIGINS: + yield secure_origin + for host, port in self.pip_trusted_origins: + yield ('*', host, '*' if port is None else port) + + def is_secure_origin(self, location): + # type: (Link) -> bool + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, parsed.hostname, parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address( + None + if origin_host is None + else six.ensure_text(origin_host) + ) + network = ipaddress.ip_network( + six.ensure_text(secure_host) + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host and + origin_host.lower() != secure_host.lower() and + secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port and + secure_port != "*" and + secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/utils.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/utils.py new file mode 100644 index 00000000..907b3fed --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,97 @@ +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterator + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS = {'Accept-Encoding': 'identity'} # type: Dict[str, str] + + +def raise_for_status(resp): + # type: (Response) -> None + http_error_msg = u'' + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode('utf-8') + except UnicodeDecodeError: + reason = resp.reason.decode('iso-8859-1') + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % ( + resp.status_code, reason, resp.url) + + elif 500 <= resp.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % ( + resp.status_code, reason, resp.url) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): + # type: (Response, int) -> Iterator[bytes] + """Given a requests Response, provide the data chunks. + """ + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/xmlrpc.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 00000000..e6112624 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,52 @@ +"""xmlrpclib.Transport implementation +""" + +import logging + +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict + from pip._internal.network.session import PipSession + + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__(self, index_url, session, use_datetime=False): + # type: (str, PipSession, bool) -> None + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + # type: (str, str, Dict[str, str], bool) -> None + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise diff --git a/v1.1.14 b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/__init__.py similarity index 100% rename from v1.1.14 rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/__init__.py diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 00000000..cf52f8d8 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,37 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.build_env import BuildEnvironment + from pip._vendor.pep517.wrappers import Pep517HookCaller + + +def generate_metadata(build_env, backend): + # type: (BuildEnvironment, Pep517HookCaller) -> str + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory( + kind="modern-metadata", globally_managed=True + ) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing wheel metadata") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel( + metadata_dir + ) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 00000000..14762aef --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,77 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.build_env import BuildEnvironment + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory): + # type: (str) -> str + """Find an .egg-info subdirectory in `directory`. + """ + filenames = [ + f for f in os.listdir(directory) if f.endswith(".egg-info") + ] + + if not filenames: + raise InstallationError( + "No .egg-info directory found in {}".format(directory) + ) + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format( + directory + ) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env, # type: BuildEnvironment + setup_py_path, # type: str + source_dir, # type: str + isolated, # type: bool + details, # type: str +): + # type: (...) -> str + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + setup_py_path, details, + ) + + egg_info_dir = TempDirectory( + kind="pip-egg-info", globally_managed=True + ).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + call_subprocess( + args, + cwd=source_dir, + command_desc='python setup.py egg_info', + ) + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 00000000..0c28c498 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,46 @@ +import logging +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + from pip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name, # type: str + backend, # type: Pep517HookCaller + metadata_directory, # type: str + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + if build_options: + # PEP 517 does not support --build-options + logger.error('Cannot build wheel for %s using PEP 517 when ' + '--build-option is present', name) + return None + try: + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( + 'Building wheel for {} (PEP 517)'.format(name) + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error('Failed building wheel for %s', name) + return None + return os.path.join(tempd, wheel_name) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 00000000..37dc876a --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,115 @@ +import logging +import os.path + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import ( + make_setuptools_bdist_wheel_args, +) +from pip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Text + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = 'Command arguments: {}\n'.format(command_desc) + + if not command_output: + text += 'Command output: None' + elif logger.getEffectiveLevel() > logging.DEBUG: + text += 'Command output: [use --verbose to show]' + else: + if not command_output.endswith('\n'): + command_output += '\n' + text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) + + return text + + +def get_legacy_build_wheel_path( + names, # type: List[str] + temp_dir, # type: str + name, # type: str + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> Optional[str] + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ( + 'Legacy build of wheel for {!r} created no files.\n' + ).format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + 'Legacy build of wheel for {!r} created more than one file.\n' + 'Filenames (choosing first): {}\n' + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name, # type: str + setup_py_path, # type: str + source_dir, # type: str + global_options, # type: List[str] + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = 'Building wheel for {} (setup.py)'.format(name) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/check.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/check.py index 0b56eda4..5714915b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/check.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/check.py @@ -7,15 +7,17 @@ from collections import namedtuple from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.pkg_resources import RequirementParseError -from pip._internal.operations.prepare import make_abstract_dist +from pip._internal.distributions import ( + make_distribution_for_install_requirement, +) from pip._internal.utils.misc import get_installed_distributions from pip._internal.utils.typing import MYPY_CHECK_RUNNING logger = logging.getLogger(__name__) if MYPY_CHECK_RUNNING: - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from typing import ( # noqa: F401 + from pip._internal.req.req_install import InstallRequirement + from typing import ( Any, Callable, Dict, Optional, Set, Tuple, List ) @@ -27,6 +29,7 @@ if MYPY_CHECK_RUNNING: MissingDict = Dict[str, List[Missing]] ConflictingDict = Dict[str, List[Conflicting]] CheckResult = Tuple[MissingDict, ConflictingDict] + ConflictDetails = Tuple[PackageSet, CheckResult] PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) @@ -47,7 +50,7 @@ def create_package_set_from_installed(**kwargs): package_set[name] = PackageDetails(dist.version, dist.requires()) except RequirementParseError as e: # Don't crash on broken metadata - logging.warning("Error parsing requirements for %s: %s", name, e) + logger.warning("Error parsing requirements for %s: %s", name, e) problems = True return package_set, problems @@ -59,19 +62,16 @@ def check_package_set(package_set, should_ignore=None): If should_ignore is passed, it should be a callable that takes a package name and returns a boolean. """ - if should_ignore is None: - def should_ignore(name): - return False - missing = dict() - conflicting = dict() + missing = {} + conflicting = {} for package_name in package_set: # Info about dependencies of package_name missing_deps = set() # type: Set[Missing] conflicting_deps = set() # type: Set[Conflicting] - if should_ignore(package_name): + if should_ignore and should_ignore(package_name): continue for req in package_set[package_name].requires: @@ -100,7 +100,7 @@ def check_package_set(package_set, should_ignore=None): def check_install_conflicts(to_install): - # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] + # type: (List[InstallRequirement]) -> ConflictDetails """For checking if the dependency graph would be consistent after \ installing given requirements """ @@ -130,7 +130,10 @@ def _simulate_installation_of(to_install, package_set): # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: - dist = make_abstract_dist(inst_req).dist() + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_pkg_resources_distribution() + + assert dist is not None name = canonicalize_name(dist.key) package_set[name] = PackageDetails(dist.version, dist.requires()) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/freeze.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/freeze.py index 388bb73a..ddb9cb23 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/freeze.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/freeze.py @@ -3,7 +3,6 @@ from __future__ import absolute_import import collections import logging import os -import re from pip._vendor import six from pip._vendor.packaging.utils import canonicalize_name @@ -11,20 +10,26 @@ from pip._vendor.pkg_resources import RequirementParseError from pip._internal.exceptions import BadCommand, InstallationError from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, + install_req_from_editable, + install_req_from_line, ) from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import ( + direct_url_as_pep440_direct_reference, + dist_get_direct_url, +) from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, + dist_is_editable, + get_installed_distributions, ) from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union ) - from pip._internal.cache import WheelCache # noqa: F401 - from pip._vendor.pkg_resources import ( # noqa: F401 + from pip._internal.cache import WheelCache + from pip._vendor.pkg_resources import ( Distribution, Requirement ) @@ -37,9 +42,9 @@ logger = logging.getLogger(__name__) def freeze( requirement=None, # type: Optional[List[str]] find_links=None, # type: Optional[List[str]] - local_only=None, # type: Optional[bool] - user_only=None, # type: Optional[bool] - skip_regex=None, # type: Optional[str] + local_only=False, # type: bool + user_only=False, # type: bool + paths=None, # type: Optional[List[str]] isolated=False, # type: bool wheel_cache=None, # type: Optional[WheelCache] exclude_editable=False, # type: bool @@ -47,28 +52,32 @@ def freeze( ): # type: (...) -> Iterator[str] find_links = find_links or [] - skip_match = None - - if skip_regex: - skip_match = re.compile(skip_regex).search for link in find_links: - yield '-f %s' % link + yield '-f {}'.format(link) installations = {} # type: Dict[str, FrozenRequirement] - for dist in get_installed_distributions(local_only=local_only, - skip=(), - user_only=user_only): + + for dist in get_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + paths=paths + ): try: req = FrozenRequirement.from_dist(dist) - except RequirementParseError: + except RequirementParseError as exc: + # We include dist rather than dist.project_name because the + # dist string includes more information, like the version and + # location. We also include the exception message to aid + # troubleshooting. logger.warning( - "Could not parse requirement: %s", - dist.project_name + 'Could not generate requirement for distribution %r: %s', + dist, exc ) continue if exclude_editable and req.editable: continue - installations[req.name] = req + installations[req.canonical_name] = req if requirement: # the options that don't get turned into an InstallRequirement @@ -84,16 +93,15 @@ def freeze( for line in req_file: if (not line.strip() or line.strip().startswith('#') or - (skip_match and skip_match(line)) or line.startswith(( '-r', '--requirement', - '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', - '--extra-index-url'))): + '--extra-index-url', + '--use-feature'))): line = line.rstrip() if line not in emitted_options: emitted_options.add(line) @@ -108,13 +116,11 @@ def freeze( line_req = install_req_from_editable( line, isolated=isolated, - wheel_cache=wheel_cache, ) else: line_req = install_req_from_line( COMMENT_RE.sub('', line).strip(), isolated=isolated, - wheel_cache=wheel_cache, ) if not line_req.name: @@ -127,22 +133,27 @@ def freeze( " (add #egg=PackageName to the URL to avoid" " this warning)" ) - elif line_req.name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub('', line).strip(), line_req.name - ) + else: + line_req_canonical_name = canonicalize_name( + line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) else: + yield str(installations[ + line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] req_files[line_req.name].append(req_file_path) - else: - yield str(installations[line_req.name]).rstrip() - del installations[line_req.name] - req_files[line_req.name].append(req_file_path) # Warn about requirements that were included multiple times (in a # single requirements file or in different requirements files). @@ -157,7 +168,7 @@ def freeze( ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): - if canonicalize_name(installation.name) not in skip: + if installation.canonical_name not in skip: yield str(installation).rstrip() @@ -173,12 +184,12 @@ def get_requirement_info(dist): location = os.path.normcase(os.path.abspath(dist.location)) from pip._internal.vcs import vcs, RemoteNotFoundError - vc_type = vcs.get_backend_type(location) + vcs_backend = vcs.get_backend_for_dir(location) - if not vc_type: + if vcs_backend is None: req = dist.as_requirement() logger.debug( - 'No VCS found for editable requirement {!r} in: {!r}', req, + 'No VCS found for editable requirement "%s" in: %r', req, location, ) comments = [ @@ -187,12 +198,12 @@ def get_requirement_info(dist): return (location, True, comments) try: - req = vc_type.get_src_requirement(location, dist.project_name) + req = vcs_backend.get_src_requirement(location, dist.project_name) except RemoteNotFoundError: req = dist.as_requirement() comments = [ '# Editable {} install with no remote ({})'.format( - vc_type.__name__, req, + type(vcs_backend).__name__, req, ) ] return (location, True, comments) @@ -202,7 +213,7 @@ def get_requirement_info(dist): 'cannot determine version of editable source in %s ' '(%s command not found in path)', location, - vc_type.name, + vcs_backend.name, ) return (None, True, []) @@ -227,6 +238,7 @@ class FrozenRequirement(object): def __init__(self, name, req, editable, comments=()): # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None self.name = name + self.canonical_name = canonicalize_name(name) self.req = req self.editable = editable self.comments = comments @@ -234,14 +246,27 @@ class FrozenRequirement(object): @classmethod def from_dist(cls, dist): # type: (Distribution) -> FrozenRequirement + # TODO `get_requirement_info` is taking care of editable requirements. + # TODO This should be refactored when we will add detection of + # editable that provide .dist-info metadata. req, editable, comments = get_requirement_info(dist) + if req is None and not editable: + # if PEP 610 metadata is present, attempt to use it + direct_url = dist_get_direct_url(dist) + if direct_url: + req = direct_url_as_pep440_direct_reference( + direct_url, dist.project_name + ) + comments = [] if req is None: + # name==version requirement req = dist.as_requirement() return cls(dist.project_name, req, editable, comments=comments) def __str__(self): + # type: () -> str req = self.req if self.editable: - req = '-e %s' % req + req = '-e {}'.format(req) return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 00000000..24d6a5dd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 00000000..a668a61d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,52 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging + +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.build_env import BuildEnvironment + + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options, # type: List[str] + global_options, # type: Sequence[str] + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + name, # type: str + setup_py_path, # type: str + isolated, # type: bool + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str +): + # type: (...) -> None + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info('Running setup.py develop for %s', name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/legacy.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 00000000..87227d5f --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,130 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +import sys +from distutils.util import change_root + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.build_env import BuildEnvironment + from pip._internal.models.scheme import Scheme + + +logger = logging.getLogger(__name__) + + +class LegacyInstallFailure(Exception): + def __init__(self): + # type: () -> None + self.parent = sys.exc_info() + + +def install( + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + use_user_site, # type: bool + pycompile, # type: bool + scheme, # type: Scheme + setup_py_path, # type: str + isolated, # type: bool + req_name, # type: str + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str + req_description, # type: str +): + # type: (...) -> bool + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + try: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = make_setuptools_install_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + "Running setup.py install for {}".format(req_name) + ) + with indent_log(), build_env: + runner( + cmd=install_args, + cwd=unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + # Signal to the caller that we didn't install the new package + return False + + except Exception: + # Signal to the caller that we didn't install the new package + raise LegacyInstallFailure + + # At this point, we have successfully installed the requirement. + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + def prepend_root(path): + # type: (str) -> str + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + message = ( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(req_description) + raise InstallationError(message) + + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + + return True diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 00000000..8f73a88b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,830 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +from __future__ import absolute_import + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from itertools import chain, starmap +from zipfile import ZipFile + +from pip._vendor import pkg_resources +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.six import ( + PY2, + ensure_str, + ensure_text, + itervalues, + reraise, + text_type, +) +from pip._vendor.six.moves import filterfalse, map + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ( + captured_stdout, + ensure_dir, + hash_file, + partition, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import ( + parse_wheel, + pkg_resources_distribution_for_wheel, +) + +# Use the custom cast function at runtime to make cast work, +# and import typing.cast when performing pre-commit and type +# checks +if not MYPY_CHECK_RUNNING: + from pip._internal.utils.typing import cast +else: + from email.message import Message + from typing import ( + Any, + Callable, + Dict, + IO, + Iterable, + Iterator, + List, + NewType, + Optional, + Protocol, + Sequence, + Set, + Tuple, + Union, + cast, + ) + + from pip._vendor.pkg_resources import Distribution + + from pip._internal.models.scheme import Scheme + from pip._internal.utils.filesystem import NamedTemporaryFileResult + + RecordPath = NewType('RecordPath', text_type) + InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + class File(Protocol): + src_record_path = None # type: RecordPath + dest_path = None # type: text_type + changed = None # type: bool + + def save(self): + # type: () -> None + pass + + +logger = logging.getLogger(__name__) + + +def rehash(path, blocksize=1 << 20): + # type: (text_type, int) -> Tuple[str, str] + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + # unicode/str python2 issues + return (digest, str(length)) # type: ignore + + +def csv_io_kwargs(mode): + # type: (str) -> Dict[str, Any] + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + if PY2: + return {'mode': '{}b'.format(mode)} + else: + return {'mode': mode, 'newline': '', 'encoding': 'utf-8'} + + +def fix_script(path): + # type: (text_type) -> bool + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata): + # type: (Message) -> bool + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(distribution): + # type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]] + # get the entry points and then the script names + try: + console = distribution.get_entry_map('console_scripts') + gui = distribution.get_entry_map('gui_scripts') + except KeyError: + # Our dict-based Distribution raises KeyError if entry_points.txt + # doesn't exist. + return {}, {} + + def _split_ep(s): + # type: (pkg_resources.EntryPoint) -> Tuple[str, str] + """get the string representation of EntryPoint, + remove space and split on '=' + """ + split_parts = str(s).replace(" ", "").split("=") + return split_parts[0], split_parts[1] + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (Sequence[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } # type: Dict[str, Set[str]] + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts = sorted(dir_scripts) # type: List[str] + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) + for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path): + # type: (RecordPath) -> text_type + return record_path + + +def _fs_to_record_path(path, relative_to=None): + # type: (text_type, Optional[text_type]) -> RecordPath + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == \ + os.path.splitdrive(relative_to)[0].lower(): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, '/') + return cast('RecordPath', path) + + +def _parse_record_path(record_column): + # type: (str) -> RecordPath + p = ensure_text(record_column, encoding='utf-8') + return cast('RecordPath', p) + + +def get_csv_rows_for_installed( + old_csv_rows, # type: List[List[str]] + installed, # type: Dict[RecordPath, RecordPath] + changed, # type: Set[RecordPath] + generated, # type: List[str] + lib_dir, # type: str +): + # type: (...) -> List[InstalledCSVRow] + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows = [] # type: List[InstalledCSVRow] + for row in old_csv_rows: + if len(row) > 3: + logger.warning('RECORD line has more than three elements: %s', row) + old_record_path = _parse_record_path(row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else '' + length = row[2] if len(row) > 2 else '' + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + for installed_record_path in itervalues(installed): + installed_rows.append((installed_record_path, '', '')) + return installed_rows + + +def get_console_script_specs(console): + # type: (Dict[str, str]) -> List[str] + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append('pip = ' + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + 'pip{} = {}'.format(sys.version_info[0], pip_script) + ) + + scripts_to_generate.append( + 'pip{} = {}'.format(get_major_minor_version(), pip_script) + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append( + 'easy_install = ' + easy_install_script + ) + + scripts_to_generate.append( + 'easy_install-{} = {}'.format( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap('{} = {}'.format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile(object): + def __init__(self, src_record_path, dest_path, zip_file): + # type: (RecordPath, text_type, ZipFile) -> None + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def save(self): + # type: () -> None + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + with self._zip_file.open(self.src_record_path) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + zipinfo = self._zip_file.getinfo(self.src_record_path) + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile(object): + def __init__(self, file): + # type: (File) -> None + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self): + # type: () -> None + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point): + # type: (str) -> None + super(MissingCallableSuffix, self).__init__( + "Invalid script entry point: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry_point) + ) + + +def _raise_for_invalid_entrypoint(specification): + # type: (str) -> None + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification, options=None): + # type: (str, Dict[str, Any]) -> List[str] + _raise_for_invalid_entrypoint(specification) + return super(PipScriptMaker, self).make(specification, options) + + +def _install_wheel( + name, # type: str + wheel_zip, # type: ZipFile + wheel_path, # type: str + scheme, # type: Scheme + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} # type: Dict[RecordPath, RecordPath] + changed = set() # type: Set[RecordPath] + generated = [] # type: List[str] + + def record_installed(srcfile, destfile, modified=False): + # type: (RecordPath, text_type, bool) -> None + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(_fs_to_record_path(destfile)) + + def all_paths(): + # type: () -> Iterable[RecordPath] + names = wheel_zip.namelist() + # If a flag is set, names may be unicode in Python 2. We convert to + # text explicitly so these are valid for lookup in RECORD. + decoded_names = map(ensure_text, names) + for name in decoded_names: + yield cast("RecordPath", name) + + def is_dir_path(path): + # type: (RecordPath) -> bool + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path, target_path): + # type: (text_type, text_type) -> None + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker(zip_file, dest): + # type: (ZipFile, text_type) -> Callable[[RecordPath], File] + def make_root_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker(zip_file, scheme): + # type: (ZipFile, Scheme) -> Callable[[RecordPath], File] + scheme_paths = {} + for key in SCHEME_KEYS: + encoded_key = ensure_text(key) + scheme_paths[encoded_key] = ensure_text( + getattr(scheme, key), encoding=sys.getfilesystemencoding() + ) + + def make_data_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + scheme_path = scheme_paths[scheme_key] + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path): + # type: (RecordPath) -> bool + return path.split("/", 1)[0].endswith(".data") + + paths = all_paths() + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition( + is_data_scheme_path, file_paths + ) + + make_root_scheme_file = root_scheme_file_maker( + wheel_zip, + ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), + ) + files = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path): + # type: (RecordPath) -> bool + parts = path.split("/", 2) + return ( + len(parts) > 2 and + parts[0].endswith(".data") and + parts[1] == "scripts" + ) + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = pkg_resources_distribution_for_wheel( + wheel_zip, name, wheel_path + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file): + # type: (File) -> bool + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + script_scheme_files = map(make_data_scheme_file, script_scheme_paths) + script_scheme_files = filterfalse( + is_entrypoint_wrapper, script_scheme_files + ) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths(): + # type: () -> Iterator[text_type] + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith('.py'): + continue + yield full_installed_path + + def pyc_output_path(path): + # type: (text_type) -> text_type + """Return the path the pyc file would have been written to. + """ + if PY2: + if sys.flags.optimize: + return path + 'o' + else: + return path + 'c' + else: + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + for path in pyc_source_file_paths(): + # Python 2's `compileall.compile_file` requires a str in + # error cases, so we must convert to the native type. + path_arg = ensure_str( + path, encoding=sys.getfilesystemencoding() + ) + success = compileall.compile_file( + path_arg, force=True, quiet=True + ) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend( + maker.make_multiple(gui_scripts_to_generate, {'gui': True}) + ) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path, **kwargs): + # type: (str, **Any) -> Iterator[NamedTemporaryFileResult] + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, 'INSTALLER') + with _generate_file(installer_path) as installer_file: + installer_file.write(b'pip\n') + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, 'REQUESTED') + with open(requested_path, "w"): + pass + generated.append(requested_path) + + record_text = distribution.get_metadata('RECORD') + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, 'RECORD') + + with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: + # The type mypy infers for record_file is different for Python 3 + # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly + # cast to typing.IO[str] as a workaround. + writer = csv.writer(cast('IO[str]', record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description): + # type: (str) -> Iterator[None] + try: + yield + except InstallationError as e: + message = "For req: {}. {}".format(req_description, e.args[0]) + reraise( + InstallationError, InstallationError(message), sys.exc_info()[2] + ) + + +def install_wheel( + name, # type: str + wheel_path, # type: str + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/prepare.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/prepare.py index 4f31dd5a..a5455fcc 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/prepare.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/operations/prepare.py @@ -1,173 +1,317 @@ """Prepares a distribution for installation """ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + import logging +import mimetypes import os +import shutil -from pip._vendor import pkg_resources, requests +from pip._vendor.six import PY2 -from pip._internal.build_env import BuildEnvironment -from pip._internal.download import ( - is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, +from pip._internal.distributions import ( + make_distribution_for_install_requirement, ) +from pip._internal.distributions.installed import InstalledDistribution from pip._internal.exceptions import ( - DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, - PreviousBuildDirError, VcsHashUnsupported, + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + VcsHashUnsupported, ) -from pip._internal.utils.compat import expanduser +from pip._internal.utils.filesystem import copy2_fixed from pip._internal.utils.hashes import MissingHashes from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.utils.misc import ( + display_path, + hide_url, + path_to_display, + rmtree, +) +from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import unpack_file from pip._internal.vcs import vcs if MYPY_CHECK_RUNNING: - from typing import Any, Optional # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.req.req_tracker import RequirementTracker # noqa: F401 + from typing import ( + Callable, List, Optional, Tuple, + ) + + from mypy_extensions import TypedDict + + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.network.download import Downloader + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.utils.hashes import Hashes + + if PY2: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'ignore': Callable[[str, List[str]], List[str]], + 'symlinks': bool, + }, + total=False, + ) + else: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'copy_function': Callable[[str, str], None], + 'ignore': Callable[[str, List[str]], List[str]], + 'ignore_dangling_symlinks': bool, + 'symlinks': bool, + }, + total=False, + ) logger = logging.getLogger(__name__) -def make_abstract_dist(req): - # type: (InstallRequirement) -> DistAbstraction - """Factory to make an abstract dist object. - - Preconditions: Either an editable req with a source_dir, or satisfied_by or - a wheel link, or a non-editable req with a source_dir. - - :return: A concrete DistAbstraction. - """ - if req.editable: - return IsSDist(req) - elif req.link and req.link.is_wheel: - return IsWheel(req) - else: - return IsSDist(req) - - -class DistAbstraction(object): - """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. - - The requirements for anything installable are as follows: - - we must be able to determine the requirement name - (or we can't correctly handle the non-upgrade case). - - we must be able to generate a list of run-time dependencies - without installing any additional packages (or we would - have to either burn time by doing temporary isolated installs - or alternatively violate pips 'don't start installing unless - all requirements are available' rule - neither of which are - desirable). - - for packages with setup requirements, we must also be able - to determine their requirements without installing additional - packages (for the same reason as run-time dependencies) - - we must be able to create a Distribution object exposing the - above metadata. +def _get_prepared_distribution( + req, # type: InstallRequirement + req_tracker, # type: RequirementTracker + finder, # type: PackageFinder + build_isolation # type: bool +): + # type: (...) -> AbstractDistribution + """Prepare a distribution for installation. """ + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist + + +def unpack_vcs_link(link, location): + # type: (Link, str) -> None + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +class File(object): + def __init__(self, path, content_type): + # type: (str, str) -> None + self.path = path + self.content_type = content_type + + +def get_http_url( + link, # type: Link + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> File + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) - def __init__(self, req): - # type: (InstallRequirement) -> None - self.req = req # type: InstallRequirement - - def dist(self): - # type: () -> Any - """Return a setuptools Dist object.""" - raise NotImplementedError - - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - """Ensure that we can get a Dist for this requirement.""" - raise NotImplementedError - + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url( + link, downloader, temp_dir.path, hashes + ) -class IsWheel(DistAbstraction): + return File(from_path, content_type) - def dist(self): - # type: () -> pkg_resources.Distribution - return list(pkg_resources.find_distributions( - self.req.source_dir))[0] - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - # FIXME:https://github.com/pypa/pip/issues/1112 - pass +def _copy2_ignoring_special_files(src, dest): + # type: (str, str) -> None + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + path_to_display(src), + path_to_display(dest), + ) -class IsSDist(DistAbstraction): +def _copy_source_tree(source, target): + # type: (str, str) -> None + target_abspath = os.path.abspath(target) + target_basename = os.path.basename(target_abspath) + target_dirname = os.path.dirname(target_abspath) + + def ignore(d, names): + # type: (str, List[str]) -> List[str] + skipped = [] # type: List[str] + if d == source: + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + skipped += ['.tox', '.nox'] + if os.path.abspath(d) == target_dirname: + # Prevent an infinite recursion if the target is in source. + # This can happen when TMPDIR is set to ${PWD}/... + # and we copy PWD to TMPDIR. + skipped += [target_basename] + return skipped + + kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs + + if not PY2: + # Python 2 does not support copy_function, so we only ignore + # errors on special file copy in Python 3. + kwargs['copy_function'] = _copy2_ignoring_special_files + + shutil.copytree(source, target, **kwargs) + + +def get_file_url( + link, # type: Link + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> File + """Get file and optionally check its hash. + """ + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) - def dist(self): - return self.req.get_dist() + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + + content_type = mimetypes.guess_type(from_path)[0] + + return File(from_path, content_type) + + +def unpack_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> Optional[File] + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # If it's a url to a local directory + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link.file_path, location) + return None + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + downloader, + download_dir, + hashes=hashes, + ) - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None - # Prepare for building. We need to: - # 1. Load pyproject.toml (if it exists) - # 2. Set up the build environment + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) - self.req.load_pyproject_toml() - should_isolate = self.req.use_pep517 and build_isolation + return file - def _raise_conflicts(conflicting_with, conflicting_reqs): - raise InstallationError( - "Some build dependencies for %s conflict with %s: %s." % ( - self.req, conflicting_with, ', '.join( - '%s is incompatible with %s' % (installed, wanted) - for installed, wanted in sorted(conflicting)))) - if should_isolate: - # Isolate in a BuildEnvironment and install the build-time - # requirements. - self.req.build_env = BuildEnvironment() - self.req.build_env.install_requirements( - finder, self.req.pyproject_requires, 'overlay', - "Installing build dependencies" - ) - conflicting, missing = self.req.build_env.check_requirements( - self.req.requirements_to_check - ) - if conflicting: - _raise_conflicts("PEP 517/518 supported requirements", - conflicting) - if missing: - logger.warning( - "Missing build requirements in pyproject.toml for %s.", - self.req, - ) - logger.warning( - "The project does not specify a build backend, and " - "pip cannot fall back to setuptools without %s.", - " and ".join(map(repr, sorted(missing))) - ) - # Install any extra build dependencies that the backend requests. - # This must be done in a second pass, as the pyproject.toml - # dependencies must be installed before we can call the backend. - with self.req.build_env: - # We need to have the env active when calling the hook. - self.req.spin_message = "Getting requirements to build wheel" - reqs = self.req.pep517_backend.get_requires_for_build_wheel() - conflicting, missing = self.req.build_env.check_requirements(reqs) - if conflicting: - _raise_conflicts("the backend dependencies", conflicting) - self.req.build_env.install_requirements( - finder, missing, 'normal', - "Installing backend dependencies" - ) +def _download_http_url( + link, # type: Link + downloader, # type: Downloader + temp_dir, # type: str + hashes, # type: Optional[Hashes] +): + # type: (...) -> Tuple[str, str] + """Download link url into temp_dir using provided session""" + download = downloader(link) - self.req.prepare_metadata() - self.req.assert_source_matches_version() + file_path = os.path.join(temp_dir, download.filename) + with open(file_path, 'wb') as content_file: + for chunk in download.chunks: + content_file.write(chunk) + if hashes: + hashes.check_against_path(file_path) -class Installed(DistAbstraction): + return file_path, download.response.headers.get('content-type', '') - def dist(self): - # type: () -> pkg_resources.Distribution - return self.req.satisfied_by - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - pass +def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Optional[Hashes]) -> Optional[str] + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path class RequirementPreparer(object): @@ -180,9 +324,12 @@ class RequirementPreparer(object): download_dir, # type: Optional[str] src_dir, # type: str wheel_download_dir, # type: Optional[str] - progress_bar, # type: str build_isolation, # type: bool - req_tracker # type: RequirementTracker + req_tracker, # type: RequirementTracker + downloader, # type: Downloader + finder, # type: PackageFinder + require_hashes, # type: bool + use_user_site, # type: bool ): # type: (...) -> None super(RequirementPreparer, self).__init__() @@ -190,16 +337,16 @@ class RequirementPreparer(object): self.src_dir = src_dir self.build_dir = build_dir self.req_tracker = req_tracker + self.downloader = downloader + self.finder = finder - # Where still packed archives should be written to. If None, they are + # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. self.download_dir = download_dir # Where still-packed .whl files should be written to. If None, they are # written to the download_dir parameter. Separate to download_dir to # permit only keeping wheel archives for pip wheel. - if wheel_download_dir: - wheel_download_dir = normalize_path(wheel_download_dir) self.wheel_download_dir = wheel_download_dir # NOTE @@ -207,160 +354,158 @@ class RequirementPreparer(object): # be combined if we're willing to have non-wheel archives present in # the wheelhouse output by 'pip wheel'. - self.progress_bar = progress_bar - # Is build isolation allowed? self.build_isolation = build_isolation + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + @property def _download_should_save(self): # type: () -> bool - # TODO: Modify to reduce indentation needed - if self.download_dir: - self.download_dir = expanduser(self.download_dir) - if os.path.exists(self.download_dir): - return True - else: - logger.critical('Could not find download directory') - raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) - return False + if not self.download_dir: + return False - def prepare_linked_requirement( - self, - req, # type: InstallRequirement - session, # type: PipSession - finder, # type: PackageFinder - upgrade_allowed, # type: bool - require_hashes # type: bool - ): - # type: (...) -> DistAbstraction - """Prepare a requirement that would be obtained from req.link - """ - # TODO: Breakup into smaller functions - if req.link and req.link.scheme == 'file': - path = url_to_path(req.link.url) + if os.path.exists(self.download_dir): + return True + + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '{}'" + .format(self.download_dir)) + + def _log_preparing_link(self, req): + # type: (InstallRequirement) -> None + """Log the way the link prepared.""" + if req.link.is_file: + path = req.link.file_path logger.info('Processing %s', display_path(path)) else: - logger.info('Collecting %s', req) + logger.info('Collecting %s', req.req or req) + + def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds): + # type: (InstallRequirement, Optional[str], bool) -> None + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) - with indent_log(): - # @@ if filesystem packages are not marked - # editable in a req, a non deterministic error - # occurs when the script attempts to unpack the - # build directory - req.ensure_has_source_dir(self.build_dir) - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` - if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - raise PreviousBuildDirError( - "pip can't proceed with requirements '%s' due to a" - " pre-existing build directory (%s). This is " - "likely due to a previous installation that failed" - ". pip is being responsible and not assuming it " - "can delete this. Please delete it and try again." - % (req, req.source_dir) - ) - req.populate_link(finder, upgrade_allowed, require_hashes) - - # We can't hit this spot and have populate_link return None. - # req.satisfied_by is None here (because we're - # guarded) and upgrade has no impact except when satisfied_by - # is not None. - # Then inside find_requirement existing_applicable -> False - # If no new versions are found, DistributionNotFound is raised, - # otherwise a result is guaranteed. - assert req.link - link = req.link - - # Now that we have the real link, we can tell what kind of - # requirements we have and raise some more informative errors - # than otherwise. (For example, we can raise VcsHashUnsupported - # for a VCS URL rather than HashMissing.) - if require_hashes: - # We could check these first 2 conditions inside - # unpack_url and save repetition of conditions, but then - # we would report less-useful error messages for - # unhashable requirements, complaining that there's no - # hash provided. - if is_vcs_url(link): - raise VcsHashUnsupported() - elif is_file_url(link) and is_dir_url(link): - raise DirectoryUrlHashUnsupported() - if not req.original_link and not req.is_pinned: - # Unpinned packages are asking for trouble when a new - # version is uploaded. This isn't a security check, but - # it saves users a surprising hash mismatch in the - # future. - # - # file:/// URLs aren't pinnable, so don't complain - # about them not being pinned. - raise HashUnpinned() - - hashes = req.hashes(trust_internet=not require_hashes) - if require_hashes and not hashes: - # Known-good hashes are missing for this requirement, so - # shim it with a facade object that will provoke hash - # computation and then raise a HashMissing exception - # showing the user what the hash should be. - hashes = MissingHashes() + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + + def _get_linked_req_hashes(self, req): + # type: (InstallRequirement) -> Hashes + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def prepare_linked_requirement(self, req, parallel_builds=False): + # type: (InstallRequirement, bool) -> AbstractDistribution + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) + if link.is_wheel and self.wheel_download_dir: + # Download wheels to a dedicated dir when doing `pip wheel`. + download_dir = self.wheel_download_dir + else: + download_dir = self.download_dir + with indent_log(): + self._ensure_link_req_src_dir(req, download_dir, parallel_builds) try: - download_dir = self.download_dir - # We always delete unpacked sdists after pip ran. - autodelete_unpacked = True - if req.link.is_wheel and self.wheel_download_dir: - # when doing 'pip wheel` we download wheels to a - # dedicated dir. - download_dir = self.wheel_download_dir - if req.link.is_wheel: - if download_dir: - # When downloading, we only unpack wheels to get - # metadata. - autodelete_unpacked = True - else: - # When installing a wheel, we use the unpacked - # wheel. - autodelete_unpacked = False - unpack_url( - req.link, req.source_dir, - download_dir, autodelete_unpacked, - session=session, hashes=hashes, - progress_bar=self.progress_bar - ) - except requests.HTTPError as exc: - logger.critical( - 'Could not install requirement %s because of error %s', - req, - exc, + local_file = unpack_url( + link, req.source_dir, self.downloader, download_dir, + hashes=self._get_linked_req_hashes(req) ) + except NetworkConnectionError as exc: raise InstallationError( - 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % - (req, exc, req.link) + 'Could not install requirement {} because of HTTP ' + 'error {} for URL {}'.format(req, exc, link) ) - abstract_dist = make_abstract_dist(req) - with self.req_tracker.track(req): - abstract_dist.prep_for_dist(finder, self.build_isolation) + + # For use in later processing, preserve the file path on the + # requirement. + if local_file: + req.local_file_path = local_file.path + + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + + if download_dir: + if link.is_existing_dir(): + logger.info('Link is a directory, ignoring download_dir') + elif local_file: + download_location = os.path.join( + download_dir, link.filename + ) + if not os.path.exists(download_location): + shutil.copy(local_file.path, download_location) + download_path = display_path(download_location) + logger.info('Saved %s', download_path) + if self._download_should_save: # Make a .zip of the source_dir we already created. - if req.link.scheme in vcs.all_schemes: + if link.is_vcs: req.archive(self.download_dir) return abstract_dist def prepare_editable_requirement( self, req, # type: InstallRequirement - require_hashes, # type: bool - use_user_site, # type: bool - finder # type: PackageFinder ): - # type: (...) -> DistAbstraction + # type: (...) -> AbstractDistribution """Prepare an editable requirement """ assert req.editable, "cannot prepare a non-editable req as editable" @@ -368,46 +513,50 @@ class RequirementPreparer(object): logger.info('Obtaining %s', req) with indent_log(): - if require_hashes: + if self.require_hashes: raise InstallationError( - 'The editable requirement %s cannot be installed when ' + 'The editable requirement {} cannot be installed when ' 'requiring hashes, because there is no single file to ' - 'hash.' % req + 'hash.'.format(req) ) req.ensure_has_source_dir(self.src_dir) req.update_editable(not self._download_should_save) - abstract_dist = make_abstract_dist(req) - with self.req_tracker.track(req): - abstract_dist.prep_for_dist(finder, self.build_isolation) + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) if self._download_should_save: req.archive(self.download_dir) - req.check_if_exists(use_user_site) + req.check_if_exists(self.use_user_site) return abstract_dist - def prepare_installed_requirement(self, req, require_hashes, skip_reason): - # type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction + def prepare_installed_requirement( + self, + req, # type: InstallRequirement + skip_reason # type: str + ): + # type: (...) -> AbstractDistribution """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " - "is set to %r" % (req.satisfied_by,) + "is set to {}".format(req.satisfied_by) ) logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version ) with indent_log(): - if require_hashes: + if self.require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.' ) - abstract_dist = Installed(req) + abstract_dist = InstalledDistribution(req) return abstract_dist diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pep425tags.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pep425tags.py deleted file mode 100644 index 1e782d1a..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pep425tags.py +++ /dev/null @@ -1,381 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags.""" -from __future__ import absolute_import - -import distutils.util -import logging -import platform -import re -import sys -import sysconfig -import warnings -from collections import OrderedDict - -import pip._internal.utils.glibc -from pip._internal.utils.compat import get_extension_suffixes -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Tuple, Callable, List, Optional, Union, Dict - ) - - Pep425Tag = Tuple[str, str, str] - -logger = logging.getLogger(__name__) - -_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') - - -def get_config_var(var): - # type: (str) -> Optional[str] - try: - return sysconfig.get_config_var(var) - except IOError as e: # Issue #1074 - warnings.warn("{}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - # type: () -> str - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - -def get_impl_ver(): - # type: () -> str - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - # type: () -> Tuple[int, ...] - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - # attrs exist only on pypy - return (sys.version_info[0], - sys.pypy_version_info.major, # type: ignore - sys.pypy_version_info.minor) # type: ignore - else: - return sys.version_info[0], sys.version_info[1] - - -def get_impl_tag(): - # type: () -> str - """ - Returns the Tag for this specific implementation. - """ - return "{}{}".format(get_abbr_impl(), get_impl_ver()) - - -def get_flag(var, fallback, expected=True, warn=True): - # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - logger.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) - return fallback() - return val == expected - - -def get_abi_tag(): - # type: () -> Optional[str] - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp')): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - sys.version_info < (3, 3))) \ - and sys.version_info < (3, 3): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def _is_running_32bit(): - # type: () -> bool - return sys.maxsize == 2147483647 - - -def get_platform(): - # type: () -> str - """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': - # distutils.util.get_platform() returns the release based on the value - # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be significantly older than the user's current machine. - release, _, machine = platform.mac_ver() - split_ver = release.split('.') - - if machine == "x86_64" and _is_running_32bit(): - machine = "i386" - elif machine == "ppc64" and _is_running_32bit(): - machine = "ppc" - - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) - - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and _is_running_32bit(): - # 32 bit Python program (running on a 64 bit Linux): pip should only - # install and run 32 bit compiled extensions in that case. - result = "linux_i686" - - return result - - -def is_manylinux1_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux1_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. - return pip._internal.utils.glibc.have_compatible_glibc(2, 5) - - -def is_manylinux2010_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux2010_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 6 uses glibc 2.12. - return pip._internal.utils.glibc.have_compatible_glibc(2, 12) - - -def get_darwin_arches(major, minor, machine): - # type: (int, int, str) -> List[str] - """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of an macOS machine. - """ - arches = [] - - def _supports_arch(major, minor, arch): - # type: (int, int, str) -> bool - # Looking at the application support for macOS versions in the chart - # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears - # our timeline looks roughly like: - # - # 10.0 - Introduces ppc support. - # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 - # and x86_64 support is CLI only, and cannot be used for GUI - # applications. - # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. - # 10.6 - Drops support for ppc64 - # 10.7 - Drops support for ppc - # - # Given that we do not know if we're installing a CLI or a GUI - # application, we must be conservative and assume it might be a GUI - # application and behave as if ppc64 and x86_64 support did not occur - # until 10.5. - # - # Note: The above information is taken from the "Application support" - # column in the chart not the "Processor support" since I believe - # that we care about what instruction sets an application can use - # not which processors the OS supports. - if arch == 'ppc': - return (major, minor) <= (10, 5) - if arch == 'ppc64': - return (major, minor) == (10, 5) - if arch == 'i386': - return (major, minor) >= (10, 4) - if arch == 'x86_64': - return (major, minor) >= (10, 5) - if arch in groups: - for garch in groups[arch]: - if _supports_arch(major, minor, garch): - return True - return False - - groups = OrderedDict([ - ("fat", ("i386", "ppc")), - ("intel", ("x86_64", "i386")), - ("fat64", ("x86_64", "ppc64")), - ("fat32", ("x86_64", "i386", "ppc")), - ]) # type: Dict[str, Tuple[str, ...]] - - if _supports_arch(major, minor, machine): - arches.append(machine) - - for garch in groups: - if machine in groups[garch] and _supports_arch(major, minor, garch): - arches.append(garch) - - arches.append('universal') - - return arches - - -def get_all_minor_versions_as_strings(version_info): - # type: (Tuple[int, ...]) -> List[str] - versions = [] - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - return versions - - -def get_supported( - versions=None, # type: Optional[List[str]] - noarch=False, # type: bool - platform=None, # type: Optional[str] - impl=None, # type: Optional[str] - abi=None # type: Optional[str] -): - # type: (...) -> List[Pep425Tag] - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - :param platform: specify the exact platform you want valid - tags for, or None. If None, use the local system platform. - :param impl: specify the exact implementation you want valid - tags for, or None. If None, use the local interpreter impl. - :param abi: specify the exact abi you want valid - tags for, or None. If None, use the local interpreter abi. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - version_info = get_impl_version_info() - versions = get_all_minor_versions_as_strings(version_info) - - impl = impl or get_abbr_impl() - - abis = [] # type: List[str] - - abi = abi or get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - for suffix in get_extension_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - if not noarch: - arch = platform or get_platform() - arch_prefix, arch_sep, arch_suffix = arch.partition('_') - if arch.startswith('macosx'): - # support macosx-10.6-intel on macosx-10.9-x86_64 - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) - arches = [] - for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): - arches.append(tpl % (m, a)) - else: - # arch pattern didn't match (?!) - arches = [arch] - elif arch_prefix == 'manylinux2010': - # manylinux1 wheels run on most manylinux2010 systems with the - # exception of wheels depending on ncurses. PEP 571 states - # manylinux1 wheels should be considered manylinux2010 wheels: - # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches = [arch, 'manylinux1' + arch_sep + arch_suffix] - elif platform is None: - arches = [] - if is_manylinux2010_compatible(): - arches.append('manylinux2010' + arch_sep + arch_suffix) - if is_manylinux1_compatible(): - arches.append('manylinux1' + arch_sep + arch_suffix) - arches.append(arch) - else: - arches = [arch] - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: - break - for abi in abi3s: # empty set if not Python 3 - for arch in arches: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # Has binaries, does not use the Python API: - for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported - - -implementation_tag = get_impl_tag() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pyproject.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pyproject.py index 8d739a6c..6b4faf7a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pyproject.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/pyproject.py @@ -3,14 +3,16 @@ from __future__ import absolute_import import io import os import sys +from collections import namedtuple -from pip._vendor import pytoml, six +from pip._vendor import six, toml +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._internal.exceptions import InstallationError from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Tuple, Optional, List # noqa: F401 + from typing import Any, Optional, List def _is_list_of_str(obj): @@ -21,9 +23,9 @@ def _is_list_of_str(obj): ) -def make_pyproject_path(setup_py_dir): +def make_pyproject_path(unpacked_source_directory): # type: (str) -> str - path = os.path.join(setup_py_dir, 'pyproject.toml') + path = os.path.join(unpacked_source_directory, 'pyproject.toml') # Python2 __file__ should not be unicode if six.PY2 and isinstance(path, six.text_type): @@ -32,13 +34,18 @@ def make_pyproject_path(setup_py_dir): return path +BuildSystemDetails = namedtuple('BuildSystemDetails', [ + 'requires', 'backend', 'check', 'backend_path' +]) + + def load_pyproject_toml( use_pep517, # type: Optional[bool] pyproject_toml, # type: str setup_py, # type: str req_name # type: str ): - # type: (...) -> Optional[Tuple[List[str], str, List[str]]] + # type: (...) -> Optional[BuildSystemDetails] """Load the pyproject.toml file. Parameters: @@ -56,6 +63,8 @@ def load_pyproject_toml( name of PEP 517 backend, requirements we should check are installed after setting up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. ) """ has_pyproject = os.path.isfile(pyproject_toml) @@ -63,7 +72,7 @@ def load_pyproject_toml( if has_pyproject: with io.open(pyproject_toml, encoding="utf-8") as f: - pp_toml = pytoml.load(f) + pp_toml = toml.load(f) build_system = pp_toml.get("build-system") else: build_system = None @@ -150,7 +159,23 @@ def load_pyproject_toml( reason="'build-system.requires' is not a list of strings.", )) + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use @@ -168,4 +193,4 @@ def load_pyproject_toml( backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] - return (requires, backend, check) + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/__init__.py index 5e4eb92f..8568d3f8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/__init__.py @@ -1,15 +1,17 @@ from __future__ import absolute_import +import collections import logging -from .req_install import InstallRequirement -from .req_set import RequirementSet -from .req_file import parse_requirements from pip._internal.utils.logging import indent_log from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + if MYPY_CHECK_RUNNING: - from typing import List, Sequence # noqa: F401 + from typing import Iterator, List, Optional, Sequence, Tuple __all__ = [ "RequirementSet", "InstallRequirement", @@ -19,59 +21,83 @@ __all__ = [ logger = logging.getLogger(__name__) +class InstallationResult(object): + def __init__(self, name): + # type: (str) -> None + self.name = name + + def __repr__(self): + # type: () -> str + return "InstallationResult(name={!r})".format(self.name) + + +def _validate_requirements( + requirements, # type: List[InstallRequirement] +): + # type: (...) -> Iterator[Tuple[str, InstallRequirement]] + for req in requirements: + assert req.name, "invalid to-be-installed requirement: {}".format(req) + yield req.name, req + + def install_given_reqs( - to_install, # type: List[InstallRequirement] + requirements, # type: List[InstallRequirement] install_options, # type: List[str] - global_options=(), # type: Sequence[str] - *args, **kwargs + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + warn_script_location, # type: bool + use_user_site, # type: bool + pycompile, # type: bool ): - # type: (...) -> List[InstallRequirement] + # type: (...) -> List[InstallationResult] """ Install everything in the given list. (to be called after having downloaded and unpacked the packages) """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) if to_install: logger.info( 'Installing collected packages: %s', - ', '.join([req.name for req in to_install]), + ', '.join(to_install.keys()), ) + installed = [] + with indent_log(): - for requirement in to_install: - if requirement.conflicts_with: - logger.info( - 'Found existing installation: %s', - requirement.conflicts_with, - ) + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info('Attempting uninstall: %s', req_name) with indent_log(): uninstalled_pathset = requirement.uninstall( auto_confirm=True ) + else: + uninstalled_pathset = None + try: requirement.install( install_options, global_options, - *args, - **kwargs + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, ) except Exception: - should_rollback = ( - requirement.conflicts_with and - not requirement.install_succeeded - ) # if install did not succeed, rollback previous uninstall - if should_rollback: + if uninstalled_pathset and not requirement.install_succeeded: uninstalled_pathset.rollback() raise else: - should_commit = ( - requirement.conflicts_with and - requirement.install_succeeded - ) - if should_commit: + if uninstalled_pathset and requirement.install_succeeded: uninstalled_pathset.commit() - requirement.remove_temporary_source() - return to_install + installed.append(InstallationResult(req_name)) + + return installed diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/constructors.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/constructors.py index 1eed1dd3..7a4641ef 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/constructors.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/constructors.py @@ -17,24 +17,24 @@ from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._vendor.packaging.specifiers import Specifier from pip._vendor.pkg_resources import RequirementParseError, parse_requirements -from pip._internal.download import ( - is_archive_file, is_url, path_to_url, url_to_path, -) from pip._internal.exceptions import InstallationError from pip._internal.models.index import PyPI, TestPyPI from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel from pip._internal.pyproject import make_pyproject_path from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import is_installable_dir, splitext from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.vcs import vcs -from pip._internal.wheel import Wheel +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Set, Any, Union, Text, Dict, + from typing import ( + Any, Dict, Optional, Set, Tuple, Union, ) - from pip._internal.cache import WheelCache # noqa: F401 + from pip._internal.req.req_file import ParsedRequirement __all__ = [ @@ -46,6 +46,15 @@ logger = logging.getLogger(__name__) operators = Specifier._operators.keys() +def is_archive_file(name): + # type: (str) -> bool + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + def _strip_extras(path): # type: (str) -> Tuple[str, Optional[str]] m = re.match(r'^(.+)(\[[^\]]+\])$', path) @@ -59,8 +68,15 @@ def _strip_extras(path): return path_no_extras, extras +def convert_extras(extras): + # type: (Optional[str]) -> Set[str] + if not extras: + return set() + return Requirement("placeholder" + extras.lower()).extras + + def parse_editable(editable_req): - # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] + # type: (str) -> Tuple[Optional[str], str, Set[str]] """Parses an editable requirement into: - a requirement name - an URL @@ -102,35 +118,36 @@ def parse_editable(editable_req): Requirement("placeholder" + extras.lower()).extras, ) else: - return package_name, url_no_extras, None + return package_name, url_no_extras, set() for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) + if url.lower().startswith('{}:'.format(version_control)): + url = '{}+{}'.format(version_control, url) break if '+' not in url: raise InstallationError( - '%s should either be a path to a local project or a VCS url ' - 'beginning with svn+, git+, hg+, or bzr+' % - editable_req + '{} is not a valid editable requirement. ' + 'It should either be a path to a local project or a VCS URL ' + '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req) ) vc_type = url.split('+', 1)[0].lower() if not vcs.get_backend(vc_type): - error_message = 'For --editable=%s only ' % editable_req + \ - ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ - ' is currently supported' + backends = ", ".join([bends.name + '+URL' for bends in vcs.backends]) + error_message = "For --editable={}, " \ + "only {} are currently supported".format( + editable_req, backends) raise InstallationError(error_message) package_name = Link(url).egg_fragment if not package_name: raise InstallationError( - "Could not detect requirement name for '%s', please specify one " - "with #egg=your_package_name" % editable_req + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) ) - return package_name, url, None + return package_name, url, set() def deduce_helpful_msg(req): @@ -148,71 +165,141 @@ def deduce_helpful_msg(req): with open(req, 'r') as fp: # parse first line only next(parse_requirements(fp.read())) - msg += " The argument you provided " + \ - "(%s) appears to be a" % (req) + \ - " requirements file. If that is the" + \ - " case, use the '-r' flag to install" + \ + msg += ( + "The argument you provided " + "({}) appears to be a" + " requirements file. If that is the" + " case, use the '-r' flag to install" " the packages specified within it." + ).format(req) except RequirementParseError: - logger.debug("Cannot parse '%s' as requirements \ - file" % (req), exc_info=True) + logger.debug( + "Cannot parse '%s' as requirements file", req, exc_info=True + ) else: - msg += " File '%s' does not exist." % (req) + msg += " File '{}' does not exist.".format(req) return msg +class RequirementParts(object): + def __init__( + self, + requirement, # type: Optional[Requirement] + link, # type: Optional[Link] + markers, # type: Optional[Marker] + extras, # type: Set[str] + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req): + # type: (str) -> RequirementParts + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req = Requirement(name) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '{}'".format(name)) + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + # ---- The actual constructors follow ---- def install_req_from_editable( editable_req, # type: str - comes_from=None, # type: Optional[str] + comes_from=None, # type: Optional[Union[InstallRequirement, str]] use_pep517=None, # type: Optional[bool] isolated=False, # type: bool options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False # type: bool + constraint=False, # type: bool + user_supplied=False, # type: bool ): # type: (...) -> InstallRequirement - name, url, extras_override = parse_editable(editable_req) - if url.startswith('file:'): - source_dir = url_to_path(url) - else: - source_dir = None - if name is not None: - try: - req = Requirement(name) - except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % name) - else: - req = None + parts = parse_req_from_editable(editable_req) + return InstallRequirement( - req, comes_from, source_dir=source_dir, + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, editable=True, - link=Link(url), + link=parts.link, constraint=constraint, use_pep517=use_pep517, isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, - extras=extras_override or (), + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + extras=parts.extras, ) -def install_req_from_line( - name, # type: str - comes_from=None, # type: Optional[Union[str, InstallRequirement]] - use_pep517=None, # type: Optional[bool] - isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False # type: bool -): - # type: (...) -> InstallRequirement - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. +def _looks_like_path(name): + # type: (str) -> bool + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path, name): + # type: (str, str) -> Optional[str] """ + First, it checks whether a provided path is an installable directory + (e.g. it has a setup.py). If it is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + raise InstallationError( + "Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found.".format(**locals()) + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split('@', 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + return path_to_url(path) + + +def parse_req_from_line(name, line_source): + # type: (str, Optional[str]) -> RequirementParts if is_url(name): marker_sep = '; ' else: @@ -236,26 +323,9 @@ def install_req_from_line( link = Link(name) else: p, extras_as_string = _strip_extras(path) - looks_like_dir = os.path.isdir(p) and ( - os.path.sep in name or - (os.path.altsep is not None and os.path.altsep in name) or - name.startswith('.') - ) - if looks_like_dir: - if not is_installable_dir(p): - raise InstallationError( - "Directory %r is not installable. Neither 'setup.py' " - "nor 'pyproject.toml' found." % name - ) - link = Link(path_to_url(p)) - elif is_archive_file(p): - if not os.path.isfile(p): - logger.warning( - 'Requirement %r looks like a filename, but the ' - 'file does not exist', - name - ) - link = Link(path_to_url(p)) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) # it's a local file, dir, or url if link: @@ -266,7 +336,7 @@ def install_req_from_line( # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename - req_as_string = "%s==%s" % (wheel.name, wheel.version) + req_as_string = "{wheel.name}=={wheel.version}".format(**locals()) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement @@ -276,10 +346,14 @@ def install_req_from_line( else: req_as_string = name - if extras_as_string: - extras = Requirement("placeholder" + extras_as_string.lower()).extras - else: - extras = () + extras = convert_extras(extras_as_string) + + def with_source(text): + # type: (str) -> str + if not line_source: + return text + return '{} (from {})'.format(text, line_source) + if req_as_string is not None: try: req = Requirement(req_as_string) @@ -291,20 +365,58 @@ def install_req_from_line( not any(op in req_as_string for op in operators)): add_msg = "= is not a valid operator. Did you mean == ?" else: - add_msg = "" - raise InstallationError( - "Invalid requirement: '%s'\n%s" % (req_as_string, add_msg) + add_msg = '' + msg = with_source( + 'Invalid requirement: {!r}'.format(req_as_string) ) + if add_msg: + msg += '\nHint: {}'.format(add_msg) + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith(']'): + msg = "Extras after version '{}'.".format(spec_str) + replace = "moving the extras before version specifiers" + deprecated(msg, replacement=replace, gone_in="21.0") else: req = None + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name, # type: str + comes_from=None, # type: Optional[Union[str, InstallRequirement]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + constraint=False, # type: bool + line_source=None, # type: Optional[str] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + return InstallRequirement( - req, comes_from, link=link, markers=markers, + parts.requirement, comes_from, link=parts.link, markers=parts.markers, use_pep517=use_pep517, isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, constraint=constraint, - extras=extras, + extras=parts.extras, + user_supplied=user_supplied, ) @@ -312,28 +424,63 @@ def install_req_from_req_string( req_string, # type: str comes_from=None, # type: Optional[InstallRequirement] isolated=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool ): # type: (...) -> InstallRequirement try: req = Requirement(req_string) except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % req) + raise InstallationError("Invalid requirement: '{}'".format(req_string)) domains_not_allowed = [ PyPI.file_storage_domain, TestPyPI.file_storage_domain, ] - if req.url and comes_from.link.netloc in domains_not_allowed: + if (req.url and comes_from and comes_from.link and + comes_from.link.netloc in domains_not_allowed): # Explicitly disallow pypi packages that depend on external urls raise InstallationError( "Packages installed from PyPI cannot depend on packages " "which are not also hosted on PyPI.\n" - "%s depends on %s " % (comes_from.name, req) + "{} depends on {} ".format(comes_from.name, req) ) return InstallRequirement( - req, comes_from, isolated=isolated, wheel_cache=wheel_cache, - use_pep517=use_pep517 + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, ) + + +def install_req_from_parsed_requirement( + parsed_req, # type: ParsedRequirement + isolated=False, # type: bool + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=parsed_req.options, + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + ) + return req diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_file.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_file.py index 726f2f6a..10505822 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_file.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_file.py @@ -10,32 +10,37 @@ import re import shlex import sys -from pip._vendor.six.moves import filterfalse from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._internal.cli import cmdoptions -from pip._internal.download import get_file_content -from pip._internal.exceptions import RequirementsFileParseError -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, +from pip._internal.exceptions import ( + InstallationError, + RequirementsFileParseError, ) +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Iterator, Tuple, Optional, List, Callable, Text + from optparse import Values + from typing import ( + Any, Callable, Dict, Iterator, List, NoReturn, Optional, Text, Tuple, ) - from pip._internal.req import InstallRequirement # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 + + from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession ReqFileLines = Iterator[Tuple[int, Text]] + LineParser = Callable[[Text], Tuple[str, Values]] + + __all__ = ['parse_requirements'] SCHEME_RE = re.compile(r'^(http|https|file):', re.I) -COMMENT_RE = re.compile(r'(^|\s)+#.*$') +COMMENT_RE = re.compile(r'(^|\s+)#.*$') # Matches environment variable-style values in '${MY_VARIABLE_1}' with the # variable name consisting of only uppercase letters, digits or the '_' @@ -44,19 +49,20 @@ COMMENT_RE = re.compile(r'(^|\s)+#.*$') ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') SUPPORTED_OPTIONS = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, cmdoptions.constraints, - cmdoptions.editable, cmdoptions.requirements, - cmdoptions.no_index, - cmdoptions.index_url, + cmdoptions.editable, cmdoptions.find_links, - cmdoptions.extra_index_url, - cmdoptions.always_unzip, cmdoptions.no_binary, cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, cmdoptions.pre, cmdoptions.trusted_host, - cmdoptions.require_hashes, + cmdoptions.use_new_feature, ] # type: List[Callable[..., optparse.Option]] # options to be passed to requirements @@ -70,174 +76,175 @@ SUPPORTED_OPTIONS_REQ = [ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +class ParsedRequirement(object): + def __init__( + self, + requirement, # type:str + is_editable, # type: bool + comes_from, # type: str + constraint, # type: bool + options=None, # type: Optional[Dict[str, Any]] + line_source=None, # type: Optional[str] + ): + # type: (...) -> None + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine(object): + def __init__( + self, + filename, # type: str + lineno, # type: int + comes_from, # type: Optional[str] + args, # type: str + opts, # type: Values + constraint, # type: bool + ): + # type: (...) -> None + self.filename = filename + self.lineno = lineno + self.comes_from = comes_from + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + def parse_requirements( filename, # type: str + session, # type: PipSession finder=None, # type: Optional[PackageFinder] comes_from=None, # type: Optional[str] options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] constraint=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] ): - # type: (...) -> Iterator[InstallRequirement] - """Parse a requirements file and yield InstallRequirement instances. + # type: (...) -> Iterator[ParsedRequirement] + """Parse a requirements file and yield ParsedRequirement instances. :param filename: Path or url of requirements file. + :param session: PipSession instance. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: cli options. - :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. - :param wheel_cache: Instance of pip.wheel.WheelCache - :param use_pep517: Value of the --use-pep517 option. """ - if session is None: - raise TypeError( - "parse_requirements() missing 1 required keyword argument: " - "'session'" + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser, comes_from) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, + options=options, + finder=finder, + session=session ) + if parsed_req is not None: + yield parsed_req - _, content = get_file_content( - filename, comes_from=comes_from, session=session - ) - - lines_enum = preprocess(content, options) - - for line_number, line in lines_enum: - req_iter = process_line(line, filename, line_number, finder, - comes_from, options, session, wheel_cache, - use_pep517=use_pep517, constraint=constraint) - for req in req_iter: - yield req - -def preprocess(content, options): - # type: (Text, Optional[optparse.Values]) -> ReqFileLines +def preprocess(content): + # type: (Text) -> ReqFileLines """Split, filter, and join lines, and return a line iterator :param content: the content of the requirements file - :param options: cli options """ lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines lines_enum = join_lines(lines_enum) lines_enum = ignore_comments(lines_enum) - lines_enum = skip_regex(lines_enum, options) lines_enum = expand_env_variables(lines_enum) return lines_enum -def process_line( - line, # type: Text - filename, # type: str - line_number, # type: int - finder=None, # type: Optional[PackageFinder] - comes_from=None, # type: Optional[str] +def handle_requirement_line( + line, # type: ParsedLine options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None, # type: Optional[bool] - constraint=False # type: bool ): - # type: (...) -> Iterator[InstallRequirement] - """Process a single requirements line; This can result in creating/yielding - requirements, or updating the finder. - - For lines that contain requirements, the only options that have an effect - are from SUPPORTED_OPTIONS_REQ, and they are scoped to the - requirement. Other options from SUPPORTED_OPTIONS may be present, but are - ignored. - - For lines that do not contain requirements, the only options that have an - effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may - be present, but are ignored. These lines may contain multiple options - (although our docs imply only one is supported), and all our parsed and - affect the finder. - - :param constraint: If True, parsing a constraints file. - :param options: OptionParser options that we may update - """ - parser = build_parser(line) - defaults = parser.get_default_values() - defaults.index_url = None - if finder: - defaults.format_control = finder.format_control - args_str, options_str = break_args_options(line) - # Prior to 2.7.3, shlex cannot deal with unicode entries - if sys.version_info < (2, 7, 3): - # https://github.com/python/mypy/issues/1174 - options_str = options_str.encode('utf8') # type: ignore - # https://github.com/python/mypy/issues/1174 - opts, _ = parser.parse_args( - shlex.split(options_str), defaults) # type: ignore + # type: (...) -> ParsedRequirement # preserve for the nested code path - line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', filename, line_number, + line_comes_from = '{} {} (line {})'.format( + '-c' if line.constraint else '-r', line.filename, line.lineno, ) - # yield a line requirement - if args_str: - isolated = options.isolated_mode if options else False + assert line.is_requirement + + if line.is_editable: + # For editable requirements, we don't support per-requirement + # options, so just return the parsed requirement. + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + ) + else: if options: - cmdoptions.check_install_build_global(options, opts) + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) + # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in opts.__dict__ and opts.__dict__[dest]: - req_options[dest] = opts.__dict__[dest] - yield install_req_from_line( - args_str, line_comes_from, constraint=constraint, - use_pep517=use_pep517, - isolated=isolated, options=req_options, wheel_cache=wheel_cache + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = 'line {} of {}'.format(line.lineno, line.filename) + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, ) - # yield an editable requirement - elif opts.editables: - isolated = options.isolated_mode if options else False - yield install_req_from_editable( - opts.editables[0], comes_from=line_comes_from, - use_pep517=use_pep517, - constraint=constraint, isolated=isolated, wheel_cache=wheel_cache - ) - # parse a nested requirements file - elif opts.requirements or opts.constraints: - if opts.requirements: - req_path = opts.requirements[0] - nested_constraint = False - else: - req_path = opts.constraints[0] - nested_constraint = True - # original file is over http - if SCHEME_RE.search(filename): - # do a url join so relative paths work - req_path = urllib_parse.urljoin(filename, req_path) - # original file and nested file are paths - elif not SCHEME_RE.search(req_path): - # do a join so relative paths work - req_path = os.path.join(os.path.dirname(filename), req_path) - # TODO: Why not use `comes_from='-r {} (line {})'` here as well? - parsed_reqs = parse_requirements( - req_path, finder, comes_from, options, session, - constraint=nested_constraint, wheel_cache=wheel_cache - ) - for req in parsed_reqs: - yield req - - # percolate hash-checking option upward - elif opts.require_hashes: - options.require_hashes = opts.require_hashes +def handle_option_line( + opts, # type: Values + filename, # type: str + lineno, # type: int + finder=None, # type: Optional[PackageFinder] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] +): + # type: (...) -> None + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled + if f not in options.features_enabled + ) # set finder options - elif finder: + if finder: + find_links = finder.find_links + index_urls = finder.index_urls if opts.index_url: - finder.index_urls = [opts.index_url] + index_urls = [opts.index_url] if opts.no_index is True: - finder.index_urls = [] + index_urls = [] if opts.extra_index_urls: - finder.index_urls.extend(opts.extra_index_urls) + index_urls.extend(opts.extra_index_urls) if opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path @@ -247,12 +254,174 @@ def process_line( relative_to_reqs_file = os.path.join(req_dir, value) if os.path.exists(relative_to_reqs_file): value = relative_to_reqs_file - finder.find_links.append(value) + find_links.append(value) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + if opts.pre: - finder.allow_all_prereleases = True - if opts.trusted_hosts: - finder.secure_origins.extend( - ("*", host, "*") for host in opts.trusted_hosts) + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = 'line {} of {}'.format(lineno, filename) + session.add_trusted_host(host, source=source) + + +def handle_line( + line, # type: ParsedLine + options=None, # type: Optional[optparse.Values] + finder=None, # type: Optional[PackageFinder] + session=None, # type: Optional[PipSession] +): + # type: (...) -> Optional[ParsedRequirement] + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser(object): + def __init__( + self, + session, # type: PipSession + line_parser, # type: LineParser + comes_from, # type: Optional[str] + ): + # type: (...) -> None + self._session = session + self._line_parser = line_parser + self._comes_from = comes_from + + def parse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + """Parse a given file, yielding parsed lines. + """ + for line in self._parse_and_recurse(filename, constraint): + yield line + + def _parse_and_recurse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + for line in self._parse_file(filename, constraint): + if ( + not line.is_requirement and + (line.opts.requirements or line.opts.constraints) + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), req_path, + ) + + for inner_line in self._parse_and_recurse( + req_path, nested_constraint, + ): + yield inner_line + else: + yield line + + def _parse_file(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + _, content = get_file_content( + filename, self._session, comes_from=self._comes_from + ) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = 'Invalid requirement: {}\n{}'.format(line, e.msg) + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + self._comes_from, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder): + # type: (Optional[PackageFinder]) -> LineParser + def parse_line(line): + # type: (Text) -> Tuple[str, Values] + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + # Prior to 2.7.3, shlex cannot deal with unicode entries + if sys.version_info < (2, 7, 3): + # https://github.com/python/mypy/issues/1174 + options_str = options_str.encode('utf8') # type: ignore + + # https://github.com/python/mypy/issues/1174 + opts, _ = parser.parse_args( + shlex.split(options_str), defaults) # type: ignore + + return args_str, opts + + return parse_line def break_args_options(line): @@ -273,8 +442,14 @@ def break_args_options(line): return ' '.join(args), ' '.join(options) # type: ignore -def build_parser(line): - # type: (Text) -> optparse.OptionParser +class OptionParsingError(Exception): + def __init__(self, msg): + # type: (str) -> None + self.msg = msg + + +def build_parser(): + # type: () -> optparse.OptionParser """ Return a parser for parsing requirement lines """ @@ -288,9 +463,8 @@ def build_parser(line): # By default optparse sys.exits on parsing errors. We want to wrap # that in our own exception. def parser_exit(self, msg): - # add offending line - msg = 'Invalid requirement: %s\n%s' % (line, msg) - raise RequirementsFileParseError(msg) + # type: (Any, str) -> NoReturn + raise OptionParsingError(msg) # NOTE: mypy disallows assigning to a method # https://github.com/python/mypy/issues/2427 parser.exit = parser_exit # type: ignore @@ -312,6 +486,7 @@ def join_lines(lines_enum): line = ' ' + line if new_line: new_line.append(line) + assert primary_line_number is not None yield primary_line_number, ''.join(new_line) new_line = [] else: @@ -323,6 +498,7 @@ def join_lines(lines_enum): # last line contains \ if new_line: + assert primary_line_number is not None yield primary_line_number, ''.join(new_line) # TODO: handle space after '\'. @@ -340,20 +516,6 @@ def ignore_comments(lines_enum): yield line_number, line -def skip_regex(lines_enum, options): - # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines - """ - Skip lines that match '--skip-requirements-regex' pattern - - Note: the regex pattern is only built once - """ - skip_regex = options.skip_requirements_regex if options else None - if skip_regex: - pattern = re.compile(skip_regex) - lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) - return lines_enum - - def expand_env_variables(lines_enum): # type: (ReqFileLines) -> ReqFileLines """Replace all environment variables that can be retrieved via `os.getenv`. @@ -364,7 +526,7 @@ def expand_env_variables(lines_enum): 1. Strings that contain a `$` aren't accidentally (partially) expanded. 2. Ensure consistency across platforms for requirement files. - These points are the result of a discusssion on the `github pull + These points are the result of a discussion on the `github pull request #3514 `_. Valid characters in variable names follow the `POSIX standard @@ -380,3 +542,51 @@ def expand_env_variables(lines_enum): line = line.replace(env_var, value) yield line_number, line + + +def get_file_content(url, session, comes_from=None): + # type: (str, PipSession, Optional[str]) -> Tuple[str, Text] + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + :param comes_from: Origin description of requirements. + """ + scheme = get_url_scheme(url) + + if scheme in ['http', 'https']: + # FIXME: catch some errors + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + elif scheme == 'file': + if comes_from and comes_from.startswith('http'): + raise InstallationError( + 'Requirements file {} references URL {}, ' + 'which is local'.format(comes_from, url) + ) + + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: {}'.format(exc) + ) + return url, content + + +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_install.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_install.py index a4834b00..644930a1 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_install.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_install.py @@ -1,12 +1,14 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import import logging import os import shutil import sys -import sysconfig +import uuid import zipfile -from distutils.util import change_root from pip._vendor import pkg_resources, six from pip._vendor.packaging.requirements import Requirement @@ -15,50 +17,85 @@ from pip._vendor.packaging.version import Version from pip._vendor.packaging.version import parse as parse_version from pip._vendor.pep517.wrappers import Pep517HookCaller -from pip._internal import wheel from pip._internal.build_env import NoOpBuildEnvironment from pip._internal.exceptions import InstallationError -from pip._internal.locations import ( - PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, -) +from pip._internal.locations import get_scheme from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_legacy import \ + generate_metadata as generate_metadata_legacy +from pip._internal.operations.install.editable_legacy import \ + install_editable as install_editable_legacy +from pip._internal.operations.install.legacy import LegacyInstallFailure +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.compat import native_str +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.direct_url_helpers import direct_url_from_link from pip._internal.utils.hashes import Hashes from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - _make_build_dir, ask_path_exists, backup_dir, call_subprocess, - display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, - get_installed_version, redact_password_from_url, rmtree, + ask_path_exists, + backup_dir, + display_path, + dist_in_site_packages, + dist_in_usersite, + get_distribution, + get_installed_version, + hide_url, + redact_auth_from_url, ) from pip._internal.utils.packaging import get_metadata -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner +from pip._internal.utils.virtualenv import running_under_virtualenv from pip._internal.vcs import vcs -from pip._internal.wheel import move_wheel_files if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Iterable, List, Union, Any, Text, Sequence, Dict + from typing import ( + Any, Dict, Iterable, List, Optional, Sequence, Union, ) - from pip._internal.build_env import BuildEnvironment # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._vendor.pkg_resources import Distribution # noqa: F401 - from pip._vendor.packaging.specifiers import SpecifierSet # noqa: F401 - from pip._vendor.packaging.markers import Marker # noqa: F401 + from pip._internal.build_env import BuildEnvironment + from pip._vendor.pkg_resources import Distribution + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.markers import Marker logger = logging.getLogger(__name__) +def _get_dist(metadata_directory): + # type: (str) -> Distribution + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + class InstallRequirement(object): """ Represents something that may be installed later on, may have information - about where to fetch the relavant requirement and also contains logic for + about where to fetch the relevant requirement and also contains logic for installing the said requirement. """ @@ -66,34 +103,48 @@ class InstallRequirement(object): self, req, # type: Optional[Requirement] comes_from, # type: Optional[Union[str, InstallRequirement]] - source_dir=None, # type: Optional[str] editable=False, # type: bool link=None, # type: Optional[Link] - update=True, # type: bool markers=None, # type: Optional[Marker] use_pep517=None, # type: Optional[bool] isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] + install_options=None, # type: Optional[List[str]] + global_options=None, # type: Optional[List[str]] + hash_options=None, # type: Optional[Dict[str, List[str]]] constraint=False, # type: bool - extras=() # type: Iterable[str] + extras=(), # type: Iterable[str] + user_supplied=False, # type: bool ): # type: (...) -> None assert req is None or isinstance(req, Requirement), req self.req = req self.comes_from = comes_from self.constraint = constraint - if source_dir is not None: - self.source_dir = os.path.normpath(os.path.abspath(source_dir)) - else: - self.source_dir = None self.editable = editable - self._wheel_cache = wheel_cache + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir = None # type: Optional[str] + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath( + os.path.abspath(link.file_path) + ) + if link is None and req and req.url: # PEP 508 URL requirement link = Link(req.url) self.link = self.original_link = link + self.original_link_is_in_wheel_cache = False + + # Path to any downloaded or already-existing package. + self.local_file_path = None # type: Optional[str] + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path if extras: self.extras = extras @@ -107,28 +158,35 @@ class InstallRequirement(object): markers = req.marker self.markers = markers - self._egg_info_path = None # type: Optional[str] # This holds the pkg_resources.Distribution object if this requirement # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None + self.satisfied_by = None # type: Optional[Distribution] + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False # Temporary build location - self._temp_build_dir = TempDirectory(kind="req-build") - # Used to store the global directory where the _temp_build_dir should - # have been created. Cf _correct_build_location method. - self._ideal_build_dir = None # type: Optional[str] - # True if the editable should be updated: - self.update = update + self._temp_build_dir = None # type: Optional[TempDirectory] # Set to True after successful installation self.install_succeeded = None # type: Optional[bool] - # UninstallPathSet of uninstalled distribution (for possible rollback) - self.uninstalled_pathset = None - self.options = options if options else {} + # Supplied options + self.install_options = install_options if install_options else [] + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} # Set to True after successful preparation of this requirement self.prepared = False - self.is_direct = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + # Set by the legacy resolver when the requirement has been downloaded + # TODO: This introduces a strong coupling between the resolver and the + # requirement (the coupling was previously between the resolver + # and the requirement set). This should be refactored to allow + # the requirement to decide for itself when it has been + # successfully downloaded - but that is more tricky to get right, + # se we are making the change in stages. + self.successfully_downloaded = False self.isolated = isolated self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment @@ -156,49 +214,45 @@ class InstallRequirement(object): self.use_pep517 = use_pep517 def __str__(self): + # type: () -> str if self.req: s = str(self.req) if self.link: - s += ' from %s' % redact_password_from_url(self.link.url) + s += ' from {}'.format(redact_auth_from_url(self.link.url)) elif self.link: - s = redact_password_from_url(self.link.url) + s = redact_auth_from_url(self.link.url) else: s = '' if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) + s += ' in {}'.format(display_path(self.satisfied_by.location)) if self.comes_from: if isinstance(self.comes_from, six.string_types): - comes_from = self.comes_from + comes_from = self.comes_from # type: Optional[str] else: comes_from = self.comes_from.from_path() if comes_from: - s += ' (from %s)' % comes_from + s += ' (from {})'.format(comes_from) return s def __repr__(self): - return '<%s object: %s editable=%r>' % ( + # type: () -> str + return '<{} object: {} editable={!r}>'.format( self.__class__.__name__, str(self), self.editable) - def populate_link(self, finder, upgrade, require_hashes): - # type: (PackageFinder, bool, bool) -> None - """Ensure that if a link can be found for this, that it is found. - - Note that self.link may still be None - if Upgrade is False and the - requirement is already installed. - - If require_hashes is True, don't use the wheel cache, because cached - wheels, always built locally, have different hashes than the files - downloaded from the index server and thus throw false hash mismatches. - Furthermore, cached wheels at present have undeterministic contents due - to file modification times. + def format_debug(self): + # type: () -> str + """An un-tested helper for getting state, for debugging. """ - if self.link is None: - self.link = finder.find_requirement(self, upgrade) - if self._wheel_cache is not None and not require_hashes: - old_link = self.link - self.link = self._wheel_cache.get(self.link, self.name) - if old_link != self.link: - logger.debug('Using cached wheel link: %s', self.link) + attributes = vars(self) + names = sorted(attributes) + + state = ( + "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) + ) + return '<{name} object: {{{state}}}>'.format( + name=self.__class__.__name__, + state=", ".join(state), + ) # Things that are valid for all kinds of requirements? @property @@ -206,7 +260,7 @@ class InstallRequirement(object): # type: () -> Optional[str] if self.req is None: return None - return native_str(pkg_resources.safe_name(self.req.name)) + return six.ensure_str(pkg_resources.safe_name(self.req.name)) @property def specifier(self): @@ -226,6 +280,7 @@ class InstallRequirement(object): @property def installed_version(self): + # type: () -> Optional[str] return get_installed_version(self.name) def match_markers(self, extras_requested=None): @@ -250,7 +305,7 @@ class InstallRequirement(object): URL do not. """ - return bool(self.options.get('hashes', {})) + return bool(self.hash_options) def hashes(self, trust_internet=True): # type: (bool) -> Hashes @@ -268,7 +323,7 @@ class InstallRequirement(object): downloaded from the internet, as by populate_link() """ - good_hashes = self.options.get('hashes', {}).copy() + good_hashes = self.hash_options.copy() link = self.link if trust_internet else self.original_link if link and link.hash: good_hashes.setdefault(link.hash_name, []).append(link.hash) @@ -290,130 +345,127 @@ class InstallRequirement(object): s += '->' + comes_from return s - def build_location(self, build_dir): - # type: (str) -> Optional[str] + def ensure_build_location(self, build_dir, autodelete, parallel_builds): + # type: (str, bool, bool) -> str assert build_dir is not None - if self._temp_build_dir.path is not None: + if self._temp_build_dir is not None: + assert self._temp_build_dir.path return self._temp_build_dir.path if self.req is None: - # for requirement via a path to a directory: the name of the - # package is not available yet so we create a temp directory - # Once run_egg_info will have run, we'll be able - # to fix it via _correct_build_location # Some systems have /tmp as a symlink which confuses custom # builds (such as numpy). Thus, we ensure that the real path # is returned. - self._temp_build_dir.create() - self._ideal_build_dir = build_dir + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) return self._temp_build_dir.path - if self.editable: - name = self.name.lower() - else: - name = self.name + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name = canonicalize_name(self.name) + if parallel_builds: + dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex) + # FIXME: Is there a better place to create the build_dir? (hg and bzr # need this) if not os.path.exists(build_dir): logger.debug('Creating directory %s', build_dir) - _make_build_dir(build_dir) - return os.path.join(build_dir, name) - - def _correct_build_location(self): + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self): # type: () -> None - """Move self._temp_build_dir to self._ideal_build_dir/self.req.name + """Set requirement after generating metadata. + """ + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None - For some requirements (e.g. a path to a directory), the name of the - package is not available until we run egg_info, so the build_location - will return a temporary directory and store the _ideal_build_dir. + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join([ + self.metadata["Name"], + op, + self.metadata["Version"], + ]) + ) - This is only called by self.run_egg_info to fix the temporary build - directory. - """ - if self.source_dir is not None: + def warn_on_mismatching_name(self): + # type: () -> None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. return - assert self.req is not None - assert self._temp_build_dir.path - assert (self._ideal_build_dir is not None and - self._ideal_build_dir.path) # type: ignore - old_location = self._temp_build_dir.path - self._temp_build_dir.path = None - - new_location = self.build_location(self._ideal_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location)) - logger.debug( - 'Moving package %s from %s to new location %s', - self, display_path(old_location), display_path(new_location), + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + 'Generating metadata for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.name, metadata_name, self.name ) - shutil.move(old_location, new_location) - self._temp_build_dir.path = new_location - self._ideal_build_dir = None - self.source_dir = os.path.normpath(os.path.abspath(new_location)) - self._egg_info_path = None - - # Correct the metadata directory, if it exists - if self.metadata_directory: - old_meta = self.metadata_directory - rel = os.path.relpath(old_meta, start=old_location) - new_meta = os.path.join(new_location, rel) - new_meta = os.path.normpath(os.path.abspath(new_meta)) - self.metadata_directory = new_meta - - def remove_temporary_source(self): - # type: () -> None - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.source_dir and os.path.exists( - os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): - logger.debug('Removing source in %s', self.source_dir) - rmtree(self.source_dir) - self.source_dir = None - self._temp_build_dir.cleanup() - self.build_env.cleanup() + self.req = Requirement(metadata_name) def check_if_exists(self, use_user_site): - # type: (bool) -> bool + # type: (bool) -> None """Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately. + self.should_reinstall appropriately. """ if self.req is None: - return False + return + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + + # pkg_resources uses the canonical name to look up packages, but + # the name passed passed to get_distribution is not canonicalized + # so we have to explicitly convert it to a canonical name + no_marker.name = canonicalize_name(no_marker.name) try: - # get_distribution() will resolve the entire list of requirements - # anyway, and we've already determined that we need the requirement - # in question, so strip the marker so that we don't try to - # evaluate it. - no_marker = Requirement(str(self.req)) - no_marker.marker = None self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) - if self.editable and self.satisfied_by: - self.conflicts_with = self.satisfied_by - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - return True except pkg_resources.DistributionNotFound: - return False + return except pkg_resources.VersionConflict: - existing_dist = pkg_resources.get_distribution( + existing_dist = get_distribution( self.req.name ) if use_user_site: if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist + self.should_reinstall = True elif (running_under_virtualenv() and dist_in_site_packages(existing_dist)): raise InstallationError( "Will not install to the user site because it will " - "lack sys.path precedence to %s in %s" % - (existing_dist.project_name, existing_dist.location) + "lack sys.path precedence to {} in {}".format( + existing_dist.project_name, existing_dist.location) ) else: - self.conflicts_with = existing_dist - return True + self.should_reinstall = True + else: + if self.editable and self.satisfied_by: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None # Things valid for wheels @property @@ -423,42 +475,19 @@ class InstallRequirement(object): return False return self.link.is_wheel - def move_wheel_files( - self, - wheeldir, # type: str - root=None, # type: Optional[str] - home=None, # type: Optional[str] - prefix=None, # type: Optional[str] - warn_script_location=True, # type: bool - use_user_site=False, # type: bool - pycompile=True # type: bool - ): - # type: (...) -> None - move_wheel_files( - self.name, self.req, wheeldir, - user=use_user_site, - home=home, - root=root, - prefix=prefix, - pycompile=pycompile, - isolated=self.isolated, - warn_script_location=warn_script_location, - ) - # Things valid for sdists @property - def setup_py_dir(self): + def unpacked_source_directory(self): # type: () -> str return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or '') @property - def setup_py(self): + def setup_py_path(self): # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - setup_py = os.path.join(self.setup_py_dir, 'setup.py') + assert self.source_dir, "No source dir for {}".format(self) + setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') # Python2 __file__ should not be unicode if six.PY2 and isinstance(setup_py, six.text_type): @@ -467,11 +496,10 @@ class InstallRequirement(object): return setup_py @property - def pyproject_toml(self): + def pyproject_toml_path(self): # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - return make_pyproject_path(self.setup_py_dir) + assert self.source_dir, "No source dir for {}".format(self) + return make_pyproject_path(self.unpacked_source_directory) def load_pyproject_toml(self): # type: () -> None @@ -482,37 +510,46 @@ class InstallRequirement(object): use_pep517 attribute can be used to determine whether we should follow the PEP 517 or legacy (setup.py) code path. """ - pep517_data = load_pyproject_toml( + pyproject_toml_data = load_pyproject_toml( self.use_pep517, - self.pyproject_toml, - self.setup_py, + self.pyproject_toml_path, + self.setup_py_path, str(self) ) - if pep517_data is None: + if pyproject_toml_data is None: self.use_pep517 = False - else: - self.use_pep517 = True - requires, backend, check = pep517_data - self.requirements_to_check = check - self.pyproject_requires = requires - self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend) - - # Use a custom function to call subprocesses - self.spin_message = "" - - def runner(cmd, cwd=None, extra_environ=None): - with open_spinner(self.spin_message) as spinner: - call_subprocess( - cmd, - cwd=cwd, - extra_environ=extra_environ, - show_stdout=False, - spinner=spinner - ) - self.spin_message = "" + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, backend, backend_path=backend_path, + ) + + def _generate_metadata(self): + # type: () -> str + """Invokes metadata generator functions, with the required arguments. + """ + if not self.use_pep517: + assert self.unpacked_source_directory + + return generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=self.name or "from {}".format(self.link) + ) + + assert self.pep517_backend is not None - self.pep517_backend._subprocess_runner = runner + return generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) def prepare_metadata(self): # type: () -> None @@ -524,145 +561,19 @@ class InstallRequirement(object): assert self.source_dir with indent_log(): - if self.use_pep517: - self.prepare_pep517_metadata() - else: - self.run_egg_info() + self.metadata_directory = self._generate_metadata() - if not self.req: - if isinstance(parse_version(self.metadata["Version"]), Version): - op = "==" - else: - op = "===" - self.req = Requirement( - "".join([ - self.metadata["Name"], - op, - self.metadata["Version"], - ]) - ) - self._correct_build_location() + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() else: - metadata_name = canonicalize_name(self.metadata["Name"]) - if canonicalize_name(self.req.name) != metadata_name: - logger.warning( - 'Generating metadata for package %s ' - 'produced metadata for project name %s. Fix your ' - '#egg=%s fragments.', - self.name, metadata_name, self.name - ) - self.req = Requirement(metadata_name) - - def prepare_pep517_metadata(self): - # type: () -> None - assert self.pep517_backend is not None + self.warn_on_mismatching_name() - metadata_dir = os.path.join( - self.setup_py_dir, - 'pip-wheel-metadata' - ) - ensure_dir(metadata_dir) - - with self.build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel, so we don't have to - # consider the possibility that this hook doesn't exist. - backend = self.pep517_backend - self.spin_message = "Preparing wheel metadata" - distinfo_dir = backend.prepare_metadata_for_build_wheel( - metadata_dir - ) - - self.metadata_directory = os.path.join(metadata_dir, distinfo_dir) - - def run_egg_info(self): - # type: () -> None - if self.name: - logger.debug( - 'Running setup.py (path:%s) egg_info for package %s', - self.setup_py, self.name, - ) - else: - logger.debug( - 'Running setup.py (path:%s) egg_info for package from %s', - self.setup_py, self.link, - ) - script = SETUPTOOLS_SHIM % self.setup_py - base_cmd = [sys.executable, '-c', script] - if self.isolated: - base_cmd += ["--no-user-cfg"] - egg_info_cmd = base_cmd + ['egg_info'] - # We can't put the .egg-info files at the root, because then the - # source code will be mistaken for an installed egg, causing - # problems - if self.editable: - egg_base_option = [] # type: List[str] - else: - egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') - ensure_dir(egg_info_dir) - egg_base_option = ['--egg-base', 'pip-egg-info'] - with self.build_env: - call_subprocess( - egg_info_cmd + egg_base_option, - cwd=self.setup_py_dir, - show_stdout=False, - command_desc='python setup.py egg_info') - - @property - def egg_info_path(self): - # type: () -> str - if self._egg_info_path is None: - if self.editable: - base = self.source_dir - else: - base = os.path.join(self.setup_py_dir, 'pip-egg-info') - filenames = os.listdir(base) - if self.editable: - filenames = [] - for root, dirs, files in os.walk(base): - for dir in vcs.dirnames: - if dir in dirs: - dirs.remove(dir) - # Iterate over a copy of ``dirs``, since mutating - # a list while iterating over it can cause trouble. - # (See https://github.com/pypa/pip/pull/462.) - for dir in list(dirs): - # Don't search in anything that looks like a virtualenv - # environment - if ( - os.path.lexists( - os.path.join(root, dir, 'bin', 'python') - ) or - os.path.exists( - os.path.join( - root, dir, 'Scripts', 'Python.exe' - ) - )): - dirs.remove(dir) - # Also don't search through tests - elif dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) - filenames = [f for f in filenames if f.endswith('.egg-info')] - - if not filenames: - raise InstallationError( - "Files/directories not found in %s" % base - ) - # if we have more than one match, we pick the toplevel one. This - # can easily be the case if there is a dist folder which contains - # an extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort( - key=lambda x: x.count(os.path.sep) + - (os.path.altsep and x.count(os.path.altsep) or 0) - ) - self._egg_info_path = os.path.join(base, filenames[0]) - return self._egg_info_path + self.assert_source_matches_version() @property def metadata(self): + # type: () -> Any if not hasattr(self, '_metadata'): self._metadata = get_metadata(self.get_dist()) @@ -670,27 +581,7 @@ class InstallRequirement(object): def get_dist(self): # type: () -> Distribution - """Return a pkg_resources.Distribution for this requirement""" - if self.metadata_directory: - base_dir, distinfo = os.path.split(self.metadata_directory) - metadata = pkg_resources.PathMetadata( - base_dir, self.metadata_directory - ) - dist_name = os.path.splitext(distinfo)[0] - typ = pkg_resources.DistInfoDistribution - else: - egg_info = self.egg_info_path.rstrip(os.path.sep) - base_dir = os.path.dirname(egg_info) - metadata = pkg_resources.PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - # https://github.com/python/mypy/issues/1174 - typ = pkg_resources.Distribution # type: ignore - - return typ( - base_dir, - project_name=dist_name, - metadata=metadata, - ) + return _get_dist(self.metadata_directory) def assert_source_matches_version(self): # type: () -> None @@ -711,8 +602,13 @@ class InstallRequirement(object): ) # For both source distributions and editables - def ensure_has_source_dir(self, parent_dir): - # type: (str) -> str + def ensure_has_source_dir( + self, + parent_dir, + autodelete=False, + parallel_builds=False, + ): + # type: (str, bool, bool) -> None """Ensure that a source_dir is set. This will create a temporary build dir if the name of the requirement @@ -723,45 +619,13 @@ class InstallRequirement(object): :return: self.source_dir """ if self.source_dir is None: - self.source_dir = self.build_location(parent_dir) - return self.source_dir + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) # For editable installations - def install_editable( - self, - install_options, # type: List[str] - global_options=(), # type: Sequence[str] - prefix=None # type: Optional[str] - ): - # type: (...) -> None - logger.info('Running setup.py develop for %s', self.name) - - if self.isolated: - global_options = list(global_options) + ["--no-user-cfg"] - - if prefix: - prefix_param = ['--prefix={}'.format(prefix)] - install_options = list(install_options) + prefix_param - - with indent_log(): - # FIXME: should we do --install-headers here too? - with self.build_env: - call_subprocess( - [ - sys.executable, - '-c', - SETUPTOOLS_SHIM % self.setup_py - ] + - list(global_options) + - ['develop', '--no-deps'] + - list(install_options), - - cwd=self.setup_py_dir, - show_stdout=False, - ) - - self.install_succeeded = True - def update_editable(self, obtain=True): # type: (bool) -> None if not self.link: @@ -776,26 +640,38 @@ class InstallRequirement(object): if self.link.scheme == 'file': # Static paths don't get updated return - assert '+' in self.link.url, "bad url: %r" % self.link.url - if not self.update: - return + assert '+' in self.link.url, \ + "bad url: {self.link.url!r}".format(**locals()) vc_type, url = self.link.url.split('+', 1) - backend = vcs.get_backend(vc_type) - if backend: - vcs_backend = backend(self.link.url) + vcs_backend = vcs.get_backend(vc_type) + if vcs_backend: + if not self.link.is_vcs: + reason = ( + "This form of VCS requirement is being deprecated: {}." + ).format( + self.link.url + ) + replacement = None + if self.link.url.startswith("git+git@"): + replacement = ( + "git+https://git@example.com/..., " + "git+ssh://git@example.com/..., " + "or the insecure git+git://git@example.com/..." + ) + deprecated(reason, replacement, gone_in="21.0", issue=7554) + hidden_url = hide_url(self.link.url) if obtain: - vcs_backend.obtain(self.source_dir) + vcs_backend.obtain(self.source_dir, url=hidden_url) else: - vcs_backend.export(self.source_dir) + vcs_backend.export(self.source_dir, url=hidden_url) else: assert 0, ( - 'Unexpected version control type (in %s): %s' - % (self.link, vc_type)) + 'Unexpected version control type (in {}): {}'.format( + self.link, vc_type)) # Top-level Actions - def uninstall(self, auto_confirm=False, verbose=False, - use_user_site=False): - # type: (bool, bool, bool) -> Optional[UninstallPathSet] + def uninstall(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> Optional[UninstallPathSet] """ Uninstall the distribution currently satisfying this requirement. @@ -808,41 +684,52 @@ class InstallRequirement(object): linked to global site-packages. """ - if not self.check_if_exists(use_user_site): + assert self.req + dist = get_distribution(self.req.name) + if not dist: logger.warning("Skipping %s as it is not installed.", self.name) return None - dist = self.satisfied_by or self.conflicts_with + logger.info('Found existing installation: %s', dist) uninstalled_pathset = UninstallPathSet.from_dist(dist) uninstalled_pathset.remove(auto_confirm, verbose) return uninstalled_pathset - def _clean_zip_name(self, name, prefix): # only used by archive. - assert name.startswith(prefix + os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix) - ) - name = name[len(prefix) + 1:] - name = name.replace(os.path.sep, '/') - return name - def _get_archive_name(self, path, parentdir, rootdir): # type: (str, str, str) -> str + + def _clean_zip_name(name, prefix): + # type: (str, str) -> str + assert name.startswith(prefix + os.path.sep), ( + "name {name!r} doesn't start with prefix {prefix!r}" + .format(**locals()) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + path = os.path.join(parentdir, path) - name = self._clean_zip_name(path, rootdir) + name = _clean_zip_name(path, rootdir) return self.name + '/' + name - # TODO: Investigate if this should be kept in InstallRequirement - # Seems to be used only when VCS + downloads def archive(self, build_dir): # type: (str) -> None + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ assert self.source_dir + create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) + archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % - display_path(archive_path), ('i', 'w', 'b', 'a')) + 'The file {} exists. (i)gnore, (w)ipe, ' + '(b)ackup, (a)bort '.format( + display_path(archive_path)), + ('i', 'w', 'b', 'a')) if response == 'i': create_archive = False elif response == 'w': @@ -858,32 +745,33 @@ class InstallRequirement(object): shutil.move(archive_path, dest_file) elif response == 'a': sys.exit(-1) - if create_archive: - zip = zipfile.ZipFile( - archive_path, 'w', zipfile.ZIP_DEFLATED, - allowZip64=True + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, + ) + with zip_output: + dir = os.path.normcase( + os.path.abspath(self.unpacked_source_directory) ) - dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') for dirname in dirnames: - dir_arcname = self._get_archive_name(dirname, - parentdir=dirpath, - rootdir=dir) + dir_arcname = self._get_archive_name( + dirname, parentdir=dirpath, rootdir=dir, + ) zipdir = zipfile.ZipInfo(dir_arcname + '/') zipdir.external_attr = 0x1ED << 16 # 0o755 - zip.writestr(zipdir, '') + zip_output.writestr(zipdir, '') for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - file_arcname = self._get_archive_name(filename, - parentdir=dirpath, - rootdir=dir) + file_arcname = self._get_archive_name( + filename, parentdir=dirpath, rootdir=dir, + ) filename = os.path.join(dirpath, filename) - zip.write(filename, file_arcname) - zip.close() - logger.info('Saved %s', display_path(archive_path)) + zip_output.write(filename, file_arcname) + + logger.info('Saved %s', display_path(archive_path)) def install( self, @@ -897,125 +785,118 @@ class InstallRequirement(object): pycompile=True # type: bool ): # type: (...) -> None + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + global_options = global_options if global_options is not None else [] if self.editable: - self.install_editable( - install_options, global_options, prefix=prefix, + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, ) + self.install_succeeded = True return - if self.is_wheel: - version = wheel.wheel_version(self.source_dir) - wheel.check_compatibility(version, self.name) - self.move_wheel_files( - self.source_dir, root=root, prefix=prefix, home=home, + if self.is_wheel: + assert self.local_file_path + direct_url = None + if self.original_link: + direct_url = direct_url_from_link( + self.original_link, + self.source_dir, + self.original_link_is_in_wheel_cache, + ) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, warn_script_location=warn_script_location, - use_user_site=use_user_site, pycompile=pycompile, + direct_url=direct_url, + requested=self.user_supplied, ) self.install_succeeded = True return + # TODO: Why don't we do this for editable installs? + # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. - global_options = list(global_options) + \ - self.options.get('global_options', []) - install_options = list(install_options) + \ - self.options.get('install_options', []) - - if self.isolated: - # https://github.com/python/mypy/issues/1174 - global_options = global_options + ["--no-user-cfg"] # type: ignore - - with TempDirectory(kind="record") as temp_dir: - record_filename = os.path.join(temp_dir.path, 'install-record.txt') - install_args = self.get_install_args( - global_options, record_filename, root, prefix, pycompile, + global_options = list(global_options) + self.global_options + install_options = list(install_options) + self.install_options + + try: + success = install_legacy( + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + req_name=self.name, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + req_description=str(self.req), ) - msg = 'Running setup.py install for %s' % (self.name,) - with open_spinner(msg) as spinner: - with indent_log(): - with self.build_env: - call_subprocess( - install_args + install_options, - cwd=self.setup_py_dir, - show_stdout=False, - spinner=spinner, - ) - - if not os.path.exists(record_filename): - logger.debug('Record file %s not found', record_filename) - return + except LegacyInstallFailure as exc: + self.install_succeeded = False + six.reraise(*exc.parent) + except Exception: self.install_succeeded = True + raise + + self.install_succeeded = success + + +def check_invalid_constraint_type(req): + # type: (InstallRequirement) -> str + + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.link: + problem = "Links are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement=( + "replacing the constraint with a requirement." + ), + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210 + ) - def prepend_root(path): - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - with open(record_filename) as f: - for line in f: - directory = os.path.dirname(line) - if directory.endswith('.egg-info'): - egg_info_dir = prepend_root(directory) - break - else: - logger.warning( - 'Could not find .egg-info directory in install record' - ' for %s', - self, - ) - # FIXME: put the record somewhere - # FIXME: should this be an error? - return - new_lines = [] - with open(record_filename) as f: - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append( - os.path.relpath(prepend_root(filename), egg_info_dir) - ) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') - with open(inst_files_path, 'w') as f: - f.write('\n'.join(new_lines) + '\n') - - def get_install_args( - self, - global_options, # type: Sequence[str] - record_filename, # type: str - root, # type: Optional[str] - prefix, # type: Optional[str] - pycompile # type: bool - ): - # type: (...) -> List[str] - install_args = [sys.executable, "-u"] - install_args.append('-c') - install_args.append(SETUPTOOLS_SHIM % self.setup_py) - install_args += list(global_options) + \ - ['install', '--record', record_filename] - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - if prefix is not None: - install_args += ['--prefix', prefix] - - if pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] - - return install_args + return problem diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_set.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_set.py index d1410e93..ab4b6f84 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_set.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_set.py @@ -3,14 +3,16 @@ from __future__ import absolute_import import logging from collections import OrderedDict +from pip._vendor.packaging.utils import canonicalize_name + from pip._internal.exceptions import InstallationError -from pip._internal.utils.logging import indent_log +from pip._internal.models.wheel import Wheel +from pip._internal.utils import compatibility_tags from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel if MYPY_CHECK_RUNNING: - from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import Dict, Iterable, List, Optional, Tuple + from pip._internal.req.req_install import InstallRequirement logger = logging.getLogger(__name__) @@ -18,33 +20,49 @@ logger = logging.getLogger(__name__) class RequirementSet(object): - def __init__(self, require_hashes=False, check_supported_wheels=True): - # type: (bool, bool) -> None + def __init__(self, check_supported_wheels=True): + # type: (bool) -> None """Create a RequirementSet. """ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 - self.require_hashes = require_hashes self.check_supported_wheels = check_supported_wheels - # Mapping of alias: real_name - self.requirement_aliases = {} # type: Dict[str, str] self.unnamed_requirements = [] # type: List[InstallRequirement] - self.successfully_downloaded = [] # type: List[InstallRequirement] - self.reqs_to_cleanup = [] # type: List[InstallRequirement] def __str__(self): - reqs = [req for req in self.requirements.values() - if not req.comes_from] - reqs.sort(key=lambda req: req.name.lower()) - return ' '.join([str(req.req) for req in reqs]) + # type: () -> str + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name), + ) + return ' '.join(str(req.req) for req in requirements) def __repr__(self): - reqs = [req for req in self.requirements.values()] - reqs.sort(key=lambda req: req.name.lower()) - reqs_str = ', '.join([str(req.req) for req in reqs]) - return ('<%s object; %d requirement(s): %s>' - % (self.__class__.__name__, len(reqs), reqs_str)) + # type: () -> str + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name), + ) + + format_string = '<{classname} object; {count} requirement(s): {reqs}>' + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=', '.join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req def add_requirement( self, @@ -67,13 +85,11 @@ class RequirementSet(object): the requirement is not applicable, or [install_req] if the requirement is applicable and has just been added. """ - name = install_req.name - # If the markers do not match, ignore this requirement. if not install_req.match_markers(extras_requested): logger.info( "Ignoring %s: markers '%s' don't match your environment", - name, install_req.markers, + install_req.name, install_req.markers, ) return [], None @@ -83,27 +99,27 @@ class RequirementSet(object): # single requirements file. if install_req.link and install_req.link.is_wheel: wheel = Wheel(install_req.link.filename) - if self.check_supported_wheels and not wheel.supported(): + tags = compatibility_tags.get_supported() + if (self.check_supported_wheels and not wheel.supported(tags)): raise InstallationError( - "%s is not a supported wheel on this platform." % - wheel.filename + "{} is not a supported wheel on this platform.".format( + wheel.filename) ) # This next bit is really a sanity check. - assert install_req.is_direct == (parent_req_name is None), ( - "a direct req shouldn't have a parent and also, " - "a non direct req should have a parent" + assert not install_req.user_supplied or parent_req_name is None, ( + "a user supplied req shouldn't have a parent" ) # Unnamed requirements are scanned again and the requirement won't be # added as a dependency until after scanning. - if not name: - # url or path requirement w/o an egg fragment - self.unnamed_requirements.append(install_req) + if not install_req.name: + self.add_unnamed_requirement(install_req) return [install_req], None try: - existing_req = self.get_requirement(name) + existing_req = self.get_requirement( + install_req.name) # type: Optional[InstallRequirement] except KeyError: existing_req = None @@ -116,18 +132,15 @@ class RequirementSet(object): ) if has_conflicting_requirement: raise InstallationError( - "Double requirement given: %s (already in %s, name=%r)" - % (install_req, existing_req, name) + "Double requirement given: {} (already in {}, name={!r})" + .format(install_req, existing_req, install_req.name) ) # When no existing requirement exists, add the requirement as a # dependency and it will be scanned again after. if not existing_req: - self.requirements[name] = install_req - # FIXME: what about other normalizations? E.g., _ vs. -? - if name.lower() != name: - self.requirement_aliases[name.lower()] = name - # We'd want to rescan this requirements later + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later return [install_req], install_req # Assume there's no need to scan, and that we've already @@ -143,15 +156,18 @@ class RequirementSet(object): ) ) if does_not_satisfy_constraint: - self.reqs_to_cleanup.append(install_req) raise InstallationError( - "Could not satisfy constraints for '%s': " + "Could not satisfy constraints for '{}': " "installation from path or url cannot be " - "constrained to a version" % name, + "constrained to a version".format(install_req.name) ) # If we're now installing a constraint, mark the existing # object for real installation. existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True existing_req.extras = tuple(sorted( set(existing_req.extras) | set(install_req.extras) )) @@ -163,35 +179,25 @@ class RequirementSet(object): # scanning again. return [existing_req], existing_req - def has_requirement(self, project_name): + def has_requirement(self, name): # type: (str) -> bool - name = project_name.lower() - if (name in self.requirements and - not self.requirements[name].constraint or - name in self.requirement_aliases and - not self.requirements[self.requirement_aliases[name]].constraint): - return True - return False + project_name = canonicalize_name(name) - @property - def has_requirements(self): - # type: () -> List[InstallRequirement] - return list(req for req in self.requirements.values() if not - req.constraint) or self.unnamed_requirements + return ( + project_name in self.requirements and + not self.requirements[project_name].constraint + ) - def get_requirement(self, project_name): + def get_requirement(self, name): # type: (str) -> InstallRequirement - for name in project_name, project_name.lower(): - if name in self.requirements: - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] - raise KeyError("No project with the name %r" % project_name) - - def cleanup_files(self): - # type: () -> None - """Clean up files, remove builds.""" - logger.debug('Cleaning up...') - with indent_log(): - for req in self.reqs_to_cleanup: - req.remove_temporary_source() + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError("No project with the name {name!r}".format(**locals())) + + @property + def all_requirements(self): + # type: () -> List[InstallRequirement] + return self.unnamed_requirements + list(self.requirements.values()) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_tracker.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_tracker.py index 82e084a4..13fb2456 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_tracker.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_tracker.py @@ -6,36 +6,83 @@ import hashlib import logging import os +from pip._vendor import contextlib2 + from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Set, Iterator # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 + from types import TracebackType + from typing import Dict, Iterator, Optional, Set, Type, Union + from pip._internal.req.req_install import InstallRequirement + from pip._internal.models.link import Link logger = logging.getLogger(__name__) +@contextlib.contextmanager +def update_env_context_manager(**changes): + # type: (str) -> Iterator[None] + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values = {} # type: Dict[str, Union[object, str]] + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker(): + # type: () -> Iterator[RequirementTracker] + root = os.environ.get('PIP_REQ_TRACKER') + with contextlib2.ExitStack() as ctx: + if root is None: + root = ctx.enter_context( + TempDirectory(kind='req-tracker') + ).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + class RequirementTracker(object): - def __init__(self): - # type: () -> None - self._root = os.environ.get('PIP_REQ_TRACKER') - if self._root is None: - self._temp_dir = TempDirectory(delete=False, kind='req-tracker') - self._temp_dir.create() - self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path - logger.debug('Created requirements tracker %r', self._root) - else: - self._temp_dir = None - logger.debug('Re-using requirements tracker %r', self._root) + def __init__(self, root): + # type: (str) -> None + self._root = root self._entries = set() # type: Set[InstallRequirement] + logger.debug("Created build tracker: %s", self._root) def __enter__(self): + # type: () -> RequirementTracker + logger.debug("Entered build tracker: %s", self._root) return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None self.cleanup() def _entry_path(self, link): @@ -45,40 +92,55 @@ class RequirementTracker(object): def add(self, req): # type: (InstallRequirement) -> None - link = req.link - info = str(req) - entry_path = self._entry_path(link) + """Add an InstallRequirement to build tracking. + """ + + assert req.link + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. try: with open(entry_path) as fp: - # Error, these's already a build in progress. - raise LookupError('%s is already being built: %s' - % (link, fp.read())) + contents = fp.read() except IOError as e: + # if the error is anything other than "file does not exist", raise. if e.errno != errno.ENOENT: raise - assert req not in self._entries - with open(entry_path, 'w') as fp: - fp.write(info) - self._entries.add(req) - logger.debug('Added %s to build tracker %r', req, self._root) + else: + message = '{} is already being built: {}'.format( + req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, 'w') as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug('Added %s to build tracker %r', req, self._root) def remove(self, req): # type: (InstallRequirement) -> None - link = req.link + """Remove an InstallRequirement from build tracking. + """ + + assert req.link + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) self._entries.remove(req) - os.unlink(self._entry_path(link)) + logger.debug('Removed %s from build tracker %r', req, self._root) def cleanup(self): # type: () -> None for req in set(self._entries): self.remove(req) - remove = self._temp_dir is not None - if remove: - self._temp_dir.cleanup() - logger.debug('%s build tracker %r', - 'Removed' if remove else 'Cleaned', - self._root) + + logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager def track(self, req): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_uninstall.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_uninstall.py index c80959e4..69719d33 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_uninstall.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/req/req_uninstall.py @@ -14,15 +14,30 @@ from pip._internal.locations import bin_py, bin_user from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, - normalize_path, renames, rmtree, + FakeFile, + ask, + dist_in_usersite, + dist_is_local, + egg_link_path, + is_local, + normalize_path, + renames, + rmtree, ) from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple, + ) + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) def _script_names(dist, script_name, is_gui): + # type: (Distribution, str, bool) -> List[str] """Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names @@ -44,9 +59,11 @@ def _script_names(dist, script_name, is_gui): def _unique(fn): + # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]] @functools.wraps(fn) def unique(*args, **kw): - seen = set() + # type: (Any, Any) -> Iterator[Any] + seen = set() # type: Set[Any] for item in fn(*args, **kw): if item not in seen: seen.add(item) @@ -56,6 +73,7 @@ def _unique(fn): @_unique def uninstallation_paths(dist): + # type: (Distribution) -> Iterator[str] """ Yield all the uninstallation paths for dist based on RECORD-without-.py[co] @@ -78,13 +96,14 @@ def uninstallation_paths(dist): def compact(paths): + # type: (Iterable[str]) -> Set[str] """Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.""" sep = os.path.sep - short_paths = set() + short_paths = set() # type: Set[str] for path in sorted(paths, key=len): should_skip = any( path.startswith(shortpath.rstrip("*")) and @@ -97,6 +116,7 @@ def compact(paths): def compress_for_rename(paths): + # type: (Iterable[str]) -> Set[str] """Returns a set containing the paths that need to be renamed. This set may include directories when the original sequence of paths @@ -106,9 +126,10 @@ def compress_for_rename(paths): remaining = set(case_map) unchecked = sorted(set(os.path.split(p)[0] for p in case_map.values()), key=len) - wildcards = set() + wildcards = set() # type: Set[str] def norm_join(*a): + # type: (str) -> str return os.path.normcase(os.path.join(*a)) for root in unchecked: @@ -117,8 +138,8 @@ def compress_for_rename(paths): # This directory has already been handled. continue - all_files = set() - all_subdirs = set() + all_files = set() # type: Set[str] + all_subdirs = set() # type: Set[str] for dirname, subdirs, files in os.walk(root): all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) @@ -135,6 +156,7 @@ def compress_for_rename(paths): def compress_for_output_listing(paths): + # type: (Iterable[str]) -> Tuple[Set[str], Set[str]] """Returns a tuple of 2 sets of which paths to display to user The first set contains paths that would be deleted. Files of a package @@ -145,7 +167,7 @@ def compress_for_output_listing(paths): folders. """ - will_remove = list(paths) + will_remove = set(paths) will_skip = set() # Determine folders and files @@ -158,7 +180,8 @@ def compress_for_output_listing(paths): folders.add(os.path.dirname(path)) files.add(path) - _normcased_files = set(map(os.path.normcase, files)) + # probably this one https://github.com/python/mypy/issues/390 + _normcased_files = set(map(os.path.normcase, files)) # type: ignore folders = compact(folders) @@ -187,30 +210,31 @@ class StashedUninstallPathSet(object): """A set of file rename operations to stash files while tentatively uninstalling them.""" def __init__(self): + # type: () -> None # Mapping from source file root to [Adjacent]TempDirectory # for files under that directory. - self._save_dirs = {} + self._save_dirs = {} # type: Dict[str, TempDirectory] # (old path, new path) tuples for each move that may need # to be undone. - self._moves = [] + self._moves = [] # type: List[Tuple[str, str]] def _get_directory_stash(self, path): + # type: (str) -> str """Stashes a directory. Directories are stashed adjacent to their original location if possible, or else moved/copied into the user's temp dir.""" try: - save_dir = AdjacentTempDirectory(path) - save_dir.create() + save_dir = AdjacentTempDirectory(path) # type: TempDirectory except OSError: save_dir = TempDirectory(kind="uninstall") - save_dir.create() self._save_dirs[os.path.normcase(path)] = save_dir return save_dir.path def _get_file_stash(self, path): + # type: (str) -> str """Stashes a file. If no root has been provided, one will be created for the directory @@ -230,7 +254,6 @@ class StashedUninstallPathSet(object): # Did not find any suitable root head = os.path.dirname(path) save_dir = TempDirectory(kind='uninstall') - save_dir.create() self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) @@ -239,15 +262,18 @@ class StashedUninstallPathSet(object): return save_dir.path def stash(self, path): + # type: (str) -> str """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. """ - if os.path.isdir(path): + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: new_path = self._get_directory_stash(path) else: new_path = self._get_file_stash(path) self._moves.append((path, new_path)) - if os.path.isdir(path) and os.path.isdir(new_path): + if (path_is_dir and os.path.isdir(new_path)): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. @@ -258,6 +284,7 @@ class StashedUninstallPathSet(object): return new_path def commit(self): + # type: () -> None """Commits the uninstall by removing stashed files.""" for _, save_dir in self._save_dirs.items(): save_dir.cleanup() @@ -265,14 +292,15 @@ class StashedUninstallPathSet(object): self._save_dirs = {} def rollback(self): + # type: () -> None """Undoes the uninstall by moving stashed files back.""" for p in self._moves: - logging.info("Moving to %s\n from %s", *p) + logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) - if os.path.isfile(new_path): + if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) @@ -285,6 +313,7 @@ class StashedUninstallPathSet(object): @property def can_rollback(self): + # type: () -> bool return bool(self._moves) @@ -292,13 +321,15 @@ class UninstallPathSet(object): """A set of file paths to be removed in the uninstallation of a requirement.""" def __init__(self, dist): - self.paths = set() - self._refuse = set() - self.pth = {} + # type: (Distribution) -> None + self.paths = set() # type: Set[str] + self._refuse = set() # type: Set[str] + self.pth = {} # type: Dict[str, UninstallPthEntries] self.dist = dist self._moved_paths = StashedUninstallPathSet() def _permitted(self, path): + # type: (str) -> bool """ Return True if the given path is one we are permitted to remove/modify, False otherwise. @@ -307,6 +338,7 @@ class UninstallPathSet(object): return is_local(path) def add(self, path): + # type: (str) -> None head, tail = os.path.split(path) # we normalize the head to resolve parent directory symlinks, but not @@ -326,6 +358,7 @@ class UninstallPathSet(object): self.add(cache_from_source(path)) def add_pth(self, pth_file, entry): + # type: (str, str) -> None pth_file = normalize_path(pth_file) if self._permitted(pth_file): if pth_file not in self.pth: @@ -335,6 +368,7 @@ class UninstallPathSet(object): self._refuse.add(pth_file) def remove(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> None """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" @@ -366,10 +400,12 @@ class UninstallPathSet(object): logger.info('Successfully uninstalled %s', dist_name_version) def _allowed_to_proceed(self, verbose): + # type: (bool) -> bool """Display which files would be deleted and prompt for confirmation """ def _display(msg, paths): + # type: (str, Iterable[str]) -> None if not paths: return @@ -383,7 +419,7 @@ class UninstallPathSet(object): else: # In verbose mode, display all the files that are going to be # deleted. - will_remove = list(self.paths) + will_remove = set(self.paths) will_skip = set() _display('Would remove:', will_remove) @@ -395,24 +431,27 @@ class UninstallPathSet(object): return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' def rollback(self): + # type: () -> None """Rollback the changes previously made by remove().""" if not self._moved_paths.can_rollback: logger.error( "Can't roll back %s; was not uninstalled", self.dist.project_name, ) - return False + return logger.info('Rolling back uninstall of %s', self.dist.project_name) self._moved_paths.rollback() for pth in self.pth.values(): pth.rollback() def commit(self): + # type: () -> None """Remove temporary save dir: rollback will no longer be possible.""" self._moved_paths.commit() @classmethod def from_dist(cls, dist): + # type: (Distribution) -> UninstallPathSet dist_path = normalize_path(dist.location) if not dist_is_local(dist): logger.info( @@ -501,8 +540,9 @@ class UninstallPathSet(object): with open(develop_egg_link, 'r') as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert (link_pointer == dist.location), ( - 'Egg-link %s does not match installed location of %s ' - '(at %s)' % (link_pointer, dist.project_name, dist.location) + 'Egg-link {} does not match installed location of {} ' + '(at {})'.format( + link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), @@ -544,26 +584,37 @@ class UninstallPathSet(object): class UninstallPthEntries(object): def __init__(self, pth_file): - if not os.path.isfile(pth_file): - raise UninstallationError( - "Cannot remove entries from nonexistent file %s" % pth_file - ) + # type: (str) -> None self.file = pth_file - self.entries = set() - self._saved_lines = None + self.entries = set() # type: Set[str] + self._saved_lines = None # type: Optional[List[bytes]] def add(self, entry): + # type: (str) -> None entry = os.path.normcase(entry) # On Windows, os.path.normcase converts the entry to use # backslashes. This is correct for entries that describe absolute # paths outside of site-packages, but all the others use forward # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive. if WINDOWS and not os.path.splitdrive(entry)[0]: entry = entry.replace('\\', '/') self.entries.add(entry) def remove(self): + # type: () -> None logger.debug('Removing pth entries from %s:', self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning( + "Cannot remove entries from nonexistent file %s", self.file + ) + return with open(self.file, 'rb') as fh: # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() @@ -585,6 +636,7 @@ class UninstallPthEntries(object): fh.writelines(lines) def rollback(self): + # type: () -> bool if self._saved_lines is None: logger.error( 'Cannot roll back changes to %s, none were made', self.file diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/base.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 00000000..2fa118bd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Callable, List + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_set import RequirementSet + + InstallRequirementProvider = Callable[ + [str, InstallRequirement], InstallRequirement + ] + + +class BaseResolver(object): + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + raise NotImplementedError() + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + raise NotImplementedError() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolve.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py similarity index 60% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolve.py rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py index 33f572f1..c9b4c661 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolve.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -10,35 +10,102 @@ for sub-dependencies a. "first found, wins" (where the order is breadth first) """ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + import logging +import sys from collections import defaultdict from itertools import chain +from pip._vendor.packaging import specifiers + from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, UnsupportedPythonVersion, ) -from pip._internal.req.constructors import install_req_from_req_string +from pip._internal.req.req_install import check_invalid_constraint_type +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver +from pip._internal.utils.compatibility_tags import get_supported from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import dist_in_usersite, ensure_dir -from pip._internal.utils.packaging import check_dist_requires_python +from pip._internal.utils.misc import dist_in_usersite, normalize_version_info +from pip._internal.utils.packaging import ( + check_requires_python, + get_requires_python, +) from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, DefaultDict, List, Set # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.req.req_set import RequirementSet # noqa: F401 - from pip._internal.operations.prepare import ( # noqa: F401 - DistAbstraction, RequirementPreparer - ) - from pip._internal.cache import WheelCache # noqa: F401 + from typing import DefaultDict, List, Optional, Set, Tuple + from pip._vendor import pkg_resources + + from pip._internal.cache import WheelCache + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.req.req_install import InstallRequirement + from pip._internal.resolution.base import InstallRequirementProvider + + DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] logger = logging.getLogger(__name__) -class Resolver(object): +def _check_dist_requires_python( + dist, # type: pkg_resources.Distribution + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> None + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + requires_python = get_requires_python(dist) + try: + is_compatible = check_requires_python( + requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", + dist.project_name, exc, + ) + return + + if is_compatible: + return + + version = '.'.join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + 'Ignoring failed Requires-Python check for package %r: ' + '%s not in %r', + dist.project_name, version, requires_python, + ) + return + + raise UnsupportedPythonVersion( + 'Package {!r} requires a different Python: {} not in {!r}'.format( + dist.project_name, version, requires_python, + )) + + +class Resolver(BaseResolver): """Resolves which packages need to be installed/uninstalled to perform \ the requested operation without breaking the requirements of any package. """ @@ -48,47 +115,45 @@ class Resolver(object): def __init__( self, preparer, # type: RequirementPreparer - session, # type: PipSession finder, # type: PackageFinder wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider use_user_site, # type: bool ignore_dependencies, # type: bool ignore_installed, # type: bool ignore_requires_python, # type: bool force_reinstall, # type: bool - isolated, # type: bool upgrade_strategy, # type: str - use_pep517=None # type: Optional[bool] + py_version_info=None, # type: Optional[Tuple[int, ...]] ): # type: (...) -> None super(Resolver, self).__init__() assert upgrade_strategy in self._allowed_strategies + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + self.preparer = preparer self.finder = finder - self.session = session - - # NOTE: This would eventually be replaced with a cache that can give - # information about both sdist and wheels transparently. self.wheel_cache = wheel_cache - # This is set in resolve - self.require_hashes = None # type: Optional[bool] - self.upgrade_strategy = upgrade_strategy self.force_reinstall = force_reinstall - self.isolated = isolated self.ignore_dependencies = ignore_dependencies self.ignore_installed = ignore_installed self.ignore_requires_python = ignore_requires_python self.use_user_site = use_user_site - self.use_pep517 = use_pep517 + self._make_install_req = make_install_req self._discovered_dependencies = \ - defaultdict(list) # type: DefaultDict[str, List] + defaultdict(list) # type: DiscoveredDependencies - def resolve(self, requirement_set): - # type: (RequirementSet) -> None + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet """Resolve what operations need to be done As a side-effect of this method, the packages (and their dependencies) @@ -99,37 +164,23 @@ class Resolver(object): possible to move the preparation to become a step separated from dependency resolution. """ - # make the wheelhouse - if self.preparer.wheel_download_dir: - ensure_dir(self.preparer.wheel_download_dir) - - # If any top-level requirement has a hash specified, enter - # hash-checking mode, which requires hashes from all. - root_reqs = ( - requirement_set.unnamed_requirements + - list(requirement_set.requirements.values()) - ) - self.require_hashes = ( - requirement_set.require_hashes or - any(req.has_hash_options for req in root_reqs) + requirement_set = RequirementSet( + check_supported_wheels=check_supported_wheels ) - - # Display where finder is looking for packages - locations = self.finder.get_formatted_locations() - if locations: - logger.info(locations) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + requirement_set.add_requirement(req) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because - # req.populate_link() needs to be called before we can make decisions + # _populate_link() needs to be called before we can make decisions # based on link type. discovered_reqs = [] # type: List[InstallRequirement] hash_errors = HashErrors() - for req in chain(root_reqs, discovered_reqs): + for req in chain(requirement_set.all_requirements, discovered_reqs): try: - discovered_reqs.extend( - self._resolve_one(requirement_set, req) - ) + discovered_reqs.extend(self._resolve_one(requirement_set, req)) except HashError as exc: exc.req = req hash_errors.append(exc) @@ -137,6 +188,8 @@ class Resolver(object): if hash_errors: raise hash_errors + return requirement_set + def _is_upgrade_allowed(self, req): # type: (InstallRequirement) -> bool if self.upgrade_strategy == "to-satisfy-only": @@ -145,7 +198,7 @@ class Resolver(object): return True else: assert self.upgrade_strategy == "only-if-needed" - return req.is_direct + return req.user_supplied or req.constraint def _set_req_to_reinstall(self, req): # type: (InstallRequirement) -> None @@ -155,10 +208,9 @@ class Resolver(object): # Don't uninstall the conflict if doing a user install and the # conflict is not a user install. if not self.use_user_site or dist_in_usersite(req.satisfied_by): - req.conflicts_with = req.satisfied_by + req.should_reinstall = True req.satisfied_by = None - # XXX: Stop passing requirement_set for options def _check_skip_installed(self, req_to_install): # type: (InstallRequirement) -> Optional[str] """Check if req_to_install should be skipped. @@ -212,19 +264,66 @@ class Resolver(object): self._set_req_to_reinstall(req_to_install) return None + def _find_requirement_link(self, req): + # type: (InstallRequirement) -> Optional[Link] + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or '' + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + u'The candidate selected for download or install is a ' + 'yanked version: {candidate}\n' + 'Reason for being yanked: {reason}' + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return link + + def _populate_link(self, req): + # type: (InstallRequirement) -> None + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug('Using cached wheel link: %s', cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.original_link_is_in_wheel_cache = True + req.link = cache_entry.link + def _get_abstract_dist_for(self, req): - # type: (InstallRequirement) -> DistAbstraction + # type: (InstallRequirement) -> AbstractDistribution """Takes a InstallRequirement and returns a single AbstractDist \ representing a prepared variant of the same. """ - assert self.require_hashes is not None, ( - "require_hashes should have been set in Resolver.resolve()" - ) - if req.editable: - return self.preparer.prepare_editable_requirement( - req, self.require_hashes, self.use_user_site, self.finder, - ) + return self.preparer.prepare_editable_requirement(req) # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. @@ -233,14 +332,12 @@ class Resolver(object): if req.satisfied_by: return self.preparer.prepare_installed_requirement( - req, self.require_hashes, skip_reason + req, skip_reason ) - upgrade_allowed = self._is_upgrade_allowed(req) - abstract_dist = self.preparer.prepare_linked_requirement( - req, self.session, self.finder, upgrade_allowed, - self.require_hashes - ) + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + abstract_dist = self.preparer.prepare_linked_requirement(req) # NOTE # The following portion is for determining if a certain package is @@ -273,7 +370,7 @@ class Resolver(object): def _resolve_one( self, requirement_set, # type: RequirementSet - req_to_install # type: InstallRequirement + req_to_install, # type: InstallRequirement ): # type: (...) -> List[InstallRequirement] """Prepare a single requirements file. @@ -288,30 +385,23 @@ class Resolver(object): req_to_install.prepared = True - # register tmp src for cleanup in case something goes wrong - requirement_set.reqs_to_cleanup.append(req_to_install) - abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies - dist = abstract_dist.dist() - try: - check_dist_requires_python(dist) - except UnsupportedPythonVersion as err: - if self.ignore_requires_python: - logger.warning(err.args[0]) - else: - raise + dist = abstract_dist.get_pkg_resources_distribution() + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): - sub_install_req = install_req_from_req_string( + sub_install_req = self._make_install_req( str(subreq), req_to_install, - isolated=self.isolated, - wheel_cache=self.wheel_cache, - use_pep517=self.use_pep517 ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( @@ -330,7 +420,9 @@ class Resolver(object): # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here - req_to_install.is_direct = True + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied requirement_set.add_requirement( req_to_install, parent_req_name=None, ) @@ -346,7 +438,7 @@ class Resolver(object): ) for missing in missing_requested: logger.warning( - '%s does not provide the extra \'%s\'', + "%s does not provide the extra '%s'", dist, missing ) @@ -360,7 +452,7 @@ class Resolver(object): # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. - requirement_set.successfully_downloaded.append(req_to_install) + req_to_install.successfully_downloaded = True return more_reqs diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 00000000..a155a110 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,82 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import FrozenSet, Iterable, Optional, Tuple + + from pip._vendor.packaging.version import _BaseVersion + + from pip._internal.models.link import Link + from pip._internal.req.req_install import InstallRequirement + + CandidateLookup = Tuple[ + Optional["Candidate"], + Optional[InstallRequirement], + ] + + +def format_name(project, extras): + # type: (str, FrozenSet[str]) -> str + if not extras: + return project + canonical_extras = sorted(canonicalize_name(e) for e in extras) + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +class Requirement(object): + @property + def name(self): + # type: () -> str + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return False + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + raise NotImplementedError("Subclass should override") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") + + +class Candidate(object): + @property + def name(self): + # type: () -> str + raise NotImplementedError("Override in subclass") + + @property + def version(self): + # type: () -> _BaseVersion + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def source_link(self): + # type: () -> Optional[Link] + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + raise NotImplementedError("Override in subclass") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 00000000..c289bb58 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,594 @@ +import logging +import sys + +from pip._vendor.contextlib2 import suppress +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import HashError, MetadataInconsistent +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_is_editable, normalize_version_info +from pip._internal.utils.packaging import get_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .base import Candidate, format_name + +if MYPY_CHECK_RUNNING: + from typing import Any, FrozenSet, Iterable, Optional, Tuple, Union + + from pip._vendor.packaging.version import _BaseVersion + from pip._vendor.pkg_resources import Distribution + + from pip._internal.distributions import AbstractDistribution + from pip._internal.models.link import Link + + from .base import Requirement + from .factory import Factory + + BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", + ] + + +logger = logging.getLogger(__name__) + + +def make_install_req_from_link(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + ireq.original_link = template.original_link + ireq.link = link + return ireq + + +def make_install_req_from_editable(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert template.editable, "template not editable" + return install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + + +def make_install_req_from_dist(dist, template): + # type: (Distribution, InstallRequirement) -> InstallRequirement + project_name = canonicalize_name(dist.project_name) + if template.req: + line = str(template.req) + elif template.link: + line = "{} @ {}".format(project_name, template.link.url) + else: + line = "{}=={}".format(project_name, dist.parsed_version) + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + is_installed = False + + def __init__( + self, + link, # type: Link + source_link, # type: Link + ireq, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self._dist = None # type: Optional[Distribution] + self._prepared = False + + def __repr__(self): + # type: () -> str + return "{class_name}({link!r})".format( + class_name=self.__class__.__name__, + link=str(self._link), + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self._link)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self._link == other._link + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def source_link(self): + # type: () -> Optional[Link] + return self._source_link + + @property + def name(self): + # type: () -> str + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = canonicalize_name(self.dist.project_name) + return self._name + + @property + def version(self): + # type: () -> _BaseVersion + if self._version is None: + self._version = self.dist.parsed_version + return self._version + + def format_for_error(self): + # type: () -> str + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self): + # type: () -> None + """Check for consistency of project name and version of dist.""" + # TODO: (Longer term) Rather than abort, reject this candidate + # and backtrack. This would need resolvelib support. + dist = self._dist # type: Distribution + name = canonicalize_name(dist.project_name) + if self._name is not None and self._name != name: + raise MetadataInconsistent(self._ireq, "name", dist.project_name) + version = dist.parsed_version + if self._version is not None and self._version != version: + raise MetadataInconsistent(self._ireq, "version", dist.version) + + def _prepare(self): + # type: () -> None + if self._prepared: + return + try: + abstract_dist = self._prepare_abstract_distribution() + except HashError as e: + e.req = self._ireq + raise + + self._dist = abstract_dist.get_pkg_resources_distribution() + assert self._dist is not None, "Distribution already installed" + self._check_metadata_consistency() + self._prepared = True + + def _fetch_metadata(self): + # type: () -> None + """Fetch metadata, using lazy wheel if possible.""" + preparer = self._factory.preparer + use_lazy_wheel = self._factory.use_lazy_wheel + remote_wheel = self._link.is_wheel and not self._link.is_file + if use_lazy_wheel and remote_wheel and not preparer.require_hashes: + assert self._name is not None + logger.info('Collecting %s', self._ireq.req or self._ireq) + # If HTTPRangeRequestUnsupported is raised, fallback silently. + with indent_log(), suppress(HTTPRangeRequestUnsupported): + logger.info( + 'Obtaining dependency information from %s %s', + self._name, self._version, + ) + url = self._link.url.split('#', 1)[0] + session = preparer.downloader._session + self._dist = dist_from_wheel_url(self._name, url, session) + self._check_metadata_consistency() + if self._dist is None: + self._prepare() + + @property + def dist(self): + # type: () -> Distribution + if self._dist is None: + self._fetch_metadata() + return self._dist + + def _get_requires_python_specifier(self): + # type: () -> Optional[SpecifierSet] + requires_python = get_requires_python(self.dist) + if requires_python is None: + return None + try: + spec = SpecifierSet(requires_python) + except InvalidSpecifier as e: + logger.warning( + "Package %r has an invalid Requires-Python: %s", self.name, e, + ) + return None + return spec + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + python_dep = self._factory.make_requires_python_requirement( + self._get_requires_python_specifier(), + ) + if python_dep: + yield python_dep + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + self._prepare() + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + source_link = link + cache_entry = factory.get_wheel_cache_entry(link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + + if (cache_entry is not None and + cache_entry.persistent and + template.link is template.original_link): + ireq.original_link_is_in_wheel_cache = True + + super(LinkCandidate, self).__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + return self._factory.preparer.prepare_linked_requirement( + self._ireq, parallel_builds=True, + ) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + super(EditableCandidate, self).__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist, # type: Distribution + template, # type: InstallRequirement + factory, # type: Factory + ): + # type: (...) -> None + self.dist = dist + self._ireq = make_install_req_from_dist(dist, template) + self._factory = factory + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __repr__(self): + # type: () -> str + return "{class_name}({distribution!r})".format( + class_name=self.__class__.__name__, + distribution=self.dist, + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def name(self): + # type: () -> str + return canonicalize_name(self.dist.project_name) + + @property + def version(self): + # type: () -> _BaseVersion + return self.dist.parsed_version + + @property + def is_editable(self): + # type: () -> bool + return dist_is_editable(self.dist) + + def format_for_error(self): + # type: () -> str + return "{} {} (Installed)".format(self.name, self.version) + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + def __init__( + self, + base, # type: BaseCandidate + extras, # type: FrozenSet[str] + ): + # type: (...) -> None + self.base = base + self.extras = extras + + def __repr__(self): + # type: () -> str + return "{class_name}(base={base!r}, extras={extras!r})".format( + class_name=self.__class__.__name__, + base=self.base, + extras=self.extras, + ) + + def __hash__(self): + # type: () -> int + return hash((self.base, self.extras)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def name(self): + # type: () -> str + """The normalised name of the project the candidate refers to""" + return format_name(self.base.name, self.extras) + + @property + def version(self): + # type: () -> _BaseVersion + return self.base.version + + def format_for_error(self): + # type: () -> str + return "{} [{}]".format( + self.base.format_for_error(), + ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self): + # type: () -> bool + return self.base.is_installed + + @property + def is_editable(self): + # type: () -> bool + return self.base.is_editable + + @property + def source_link(self): + # type: () -> Optional[Link] + return self.base.source_link + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + factory = self.base._factory + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.extras) + invalid_extras = self.extras.difference(self.base.dist.extras) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra + ) + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + + for r in self.base.dist.requires(valid_extras): + requirement = factory.make_requirement_from_spec( + str(r), self.base._ireq, valid_extras, + ) + if requirement: + yield requirement + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info): + # type: (Optional[Tuple[int, ...]]) -> None + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + @property + def name(self): + # type: () -> str + # Avoid conflicting with the PyPI package "Python". + return "" + + @property + def version(self): + # type: () -> _BaseVersion + return self._version + + def format_for_error(self): + # type: () -> str + return "Python {}".format(self.version) + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + return () + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 00000000..bd7e3efd --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,459 @@ +import collections +import logging + +from pip._vendor import six +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + dist_in_site_packages, + dist_in_usersite, + get_installed_distributions, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .candidates import ( + AlreadyInstalledCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, +) +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, +) + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, + Dict, + Iterable, + List, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + ) + + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.version import _BaseVersion + from pip._vendor.pkg_resources import Distribution + from pip._vendor.resolvelib import ResolutionImpossible + + from pip._internal.cache import CacheEntry, WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.resolution.base import InstallRequirementProvider + + from .base import Candidate, Requirement + from .candidates import BaseCandidate + + C = TypeVar("C") + Cache = Dict[Link, C] + VersionCandidates = Dict[_BaseVersion, Candidate] + + +logger = logging.getLogger(__name__) + + +class Factory(object): + def __init__( + self, + finder, # type: PackageFinder + preparer, # type: RequirementPreparer + make_install_req, # type: InstallRequirementProvider + wheel_cache, # type: Optional[WheelCache] + use_user_site, # type: bool + force_reinstall, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + py_version_info=None, # type: Optional[Tuple[int, ...]] + lazy_wheel=False, # type: bool + ): + # type: (...) -> None + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + self.use_lazy_wheel = lazy_wheel + + self._link_candidate_cache = {} # type: Cache[LinkCandidate] + self._editable_candidate_cache = {} # type: Cache[EditableCandidate] + + if not ignore_installed: + self._installed_dists = { + canonicalize_name(dist.project_name): dist + for dist in get_installed_distributions() + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self): + # type: () -> bool + return self._force_reinstall + + def _make_candidate_from_dist( + self, + dist, # type: Distribution + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + ): + # type: (...) -> Candidate + base = AlreadyInstalledCandidate(dist, template, factory=self) + if extras: + return ExtrasCandidate(base, extras) + return base + + def _make_candidate_from_link( + self, + link, # type: Link + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + name, # type: Optional[str] + version, # type: Optional[_BaseVersion] + ): + # type: (...) -> Candidate + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + if template.editable: + if link not in self._editable_candidate_cache: + self._editable_candidate_cache[link] = EditableCandidate( + link, template, factory=self, name=name, version=version, + ) + base = self._editable_candidate_cache[link] # type: BaseCandidate + else: + if link not in self._link_candidate_cache: + self._link_candidate_cache[link] = LinkCandidate( + link, template, factory=self, name=name, version=version, + ) + base = self._link_candidate_cache[link] + if extras: + return ExtrasCandidate(base, extras) + return base + + def _iter_found_candidates( + self, + ireqs, # type: Sequence[InstallRequirement] + specifier, # type: SpecifierSet + ): + # type: (...) -> Iterable[Candidate] + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + name = canonicalize_name(template.req.name) + + hashes = Hashes() + extras = frozenset() # type: FrozenSet[str] + for ireq in ireqs: + specifier &= ireq.req.specifier + hashes |= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + # We use this to ensure that we only yield a single candidate for + # each version (the finder's preferred one for that version). The + # requirement needs to return only one candidate per version, so we + # implement that logic here so that requirements using this helper + # don't all have to do the same thing later. + candidates = collections.OrderedDict() # type: VersionCandidates + + # Get the installed version, if it matches, unless the user + # specified `--force-reinstall`, when we want the version from + # the index instead. + installed_version = None + installed_candidate = None + if not self._force_reinstall and name in self._installed_dists: + installed_dist = self._installed_dists[name] + installed_version = installed_dist.parsed_version + if specifier.contains(installed_version, prereleases=True): + installed_candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + + found = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + for ican in found.iter_applicable(): + if ican.version == installed_version and installed_candidate: + candidate = installed_candidate + else: + candidate = self._make_candidate_from_link( + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + candidates[ican.version] = candidate + + # Yield the installed version even if it is not found on the index. + if installed_version and installed_candidate: + candidates[installed_version] = installed_candidate + + return six.itervalues(candidates) + + def find_candidates(self, requirements, constraint): + # type: (Sequence[Requirement], SpecifierSet) -> Iterable[Candidate] + explicit_candidates = set() # type: Set[Candidate] + ireqs = [] # type: List[InstallRequirement] + for req in requirements: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates(ireqs, constraint) + + if constraint: + name = explicit_candidates.pop().name + raise InstallationError( + "Could not satisfy constraints for {!r}: installation from " + "path or url cannot be constrained to a version".format(name) + ) + + return ( + c for c in explicit_candidates + if all(req.is_satisfied_by(c) for req in requirements) + ) + + def make_requirement_from_install_req(self, ireq, requested_extras): + # type: (InstallRequirement, Iterable[str]) -> Optional[Requirement] + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, ireq.markers, + ) + return None + if not ireq.link: + return SpecifierRequirement(ireq) + if ireq.link.is_wheel: + wheel = Wheel(ireq.link.filename) + if not wheel.supported(self._finder.target_python.get_tags()): + msg = "{} is not a supported wheel on this platform.".format( + wheel.filename, + ) + raise UnsupportedWheel(msg) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + return self.make_requirement_from_candidate(cand) + + def make_requirement_from_candidate(self, candidate): + # type: (Candidate) -> ExplicitRequirement + return ExplicitRequirement(candidate) + + def make_requirement_from_spec( + self, + specifier, # type: str + comes_from, # type: InstallRequirement + requested_extras=(), # type: Iterable[str] + ): + # type: (...) -> Optional[Requirement] + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self.make_requirement_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement(self, specifier): + # type: (Optional[SpecifierSet]) -> Optional[Requirement] + if self._ignore_requires_python or specifier is None: + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry(self, link, name): + # type: (Link, Optional[str]) -> Optional[CacheEntry] + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None or self.preparer.require_hashes: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate): + # type: (Candidate) -> Optional[Distribution] + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist_in_usersite(dist): + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist_in_site_packages(dist): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to {} in {}".format( + dist.project_name, dist.location, + ) + ) + return None + + def _report_requires_python_error( + self, + requirement, # type: RequiresPythonRequirement + template, # type: Candidate + ): + # type: (...) -> UnsupportedPythonVersion + message_format = ( + "Package {package!r} requires a different Python: " + "{version} not in {specifier!r}" + ) + message = message_format.format( + package=template.name, + version=self._python_candidate.version, + specifier=str(requirement.specifier), + ) + return UnsupportedPythonVersion(message) + + def get_installation_error(self, e): + # type: (ResolutionImpossible) -> InstallationError + + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + for cause in e.causes: + if isinstance(cause.requirement, RequiresPythonRequirement): + return self._report_requires_python_error( + cause.requirement, + cause.parent, + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if parent is None: + req_disp = str(req) + else: + req_disp = '{} (from {})'.format(req, parent.name) + logger.critical( + "Could not find a version that satisfies the requirement %s", + req_disp, + ) + return DistributionNotFound( + 'No matching distribution found for {}'.format(req) + ) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts): + # type: (List[str]) -> str + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def readable_form(cand): + # type: (Candidate) -> str + return "{} {}".format(cand.name, cand.version) + + def describe_trigger(parent): + # type: (Candidate) -> str + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return "{} {}".format(parent.name, parent.version) + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = [] + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.append(trigger) + + if triggers: + info = text_join(triggers) + else: + info = "the requested packages" + + msg = "Cannot install {} because these package versions " \ + "have conflicting dependencies.".format(info) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + for req, parent in e.causes: + msg = msg + "\n " + if parent: + msg = msg + "{} {} depends on ".format( + parent.name, + parent.version + ) + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + + msg = msg + "\n\n" + \ + "To fix this you could try to:\n" + \ + "1. loosen the range of package versions you've specified\n" + \ + "2. remove package versions to allow pip attempt to solve " + \ + "the dependency conflict\n" + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible For help visit: " + "https://pip.pypa.io/en/stable/user_guide/" + "#fixing-conflicting-dependencies" + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 00000000..72f16205 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,150 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.resolvelib.providers import AbstractProvider + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, + Dict, + Iterable, + Optional, + Sequence, + Set, + Tuple, + Union, + ) + + from .base import Requirement, Candidate + from .factory import Factory + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +class PipProvider(AbstractProvider): + def __init__( + self, + factory, # type: Factory + constraints, # type: Dict[str, SpecifierSet] + ignore_dependencies, # type: bool + upgrade_strategy, # type: str + user_requested, # type: Set[str] + ): + # type: (...) -> None + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self.user_requested = user_requested + + def _sort_matches(self, matches): + # type: (Iterable[Candidate]) -> Sequence[Candidate] + + # The requirement is responsible for returning a sequence of potential + # candidates, one per version. The provider handles the logic of + # deciding the order in which these candidates should be passed to + # the resolver. + + # The `matches` argument is a sequence of candidates, one per version, + # which are potential options to be installed. The requirement will + # have already sorted out whether to give us an already-installed + # candidate or a version from PyPI (i.e., it will deal with options + # like --force-reinstall and --ignore-installed). + + # We now work out the correct order. + # + # 1. If no other considerations apply, later versions take priority. + # 2. An already installed distribution is preferred over any other, + # unless the user has requested an upgrade. + # Upgrades are allowed when: + # * The --upgrade flag is set, and + # - The project was specified on the command line, or + # - The project is a dependency and the "eager" upgrade strategy + # was requested. + def _eligible_for_upgrade(name): + # type: (str) -> bool + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + return (name in self.user_requested) + return False + + def sort_key(c): + # type: (Candidate) -> int + """Return a sort key for the matches. + + The highest priority should be given to installed candidates that + are not eligible for upgrade. We use the integer value in the first + part of the key to sort these before other candidates. + + We only pull the installed candidate to the bottom (i.e. most + preferred), but otherwise keep the ordering returned by the + requirement. The requirement is responsible for returning a list + otherwise sorted for the resolver, taking account for versions + and binary preferences as specified by the user. + """ + if c.is_installed and not _eligible_for_upgrade(c.name): + return 1 + return 0 + + return sorted(matches, key=sort_key) + + def identify(self, dependency): + # type: (Union[Requirement, Candidate]) -> str + return dependency.name + + def get_preference( + self, + resolution, # type: Optional[Candidate] + candidates, # type: Sequence[Candidate] + information # type: Sequence[Tuple[Requirement, Candidate]] + ): + # type: (...) -> Any + # Use the "usual" value for now + return len(candidates) + + def find_matches(self, requirements): + # type: (Sequence[Requirement]) -> Iterable[Candidate] + if not requirements: + return [] + constraint = self._constraints.get( + requirements[0].name, SpecifierSet(), + ) + candidates = self._factory.find_candidates(requirements, constraint) + return reversed(self._sort_matches(candidates)) + + def is_satisfied_by(self, requirement, candidate): + # type: (Requirement, Candidate) -> bool + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate): + # type: (Candidate) -> Sequence[Requirement] + if self._ignore_dependencies: + return [] + return [r for r in candidate.iter_dependencies() if r is not None] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 00000000..bc1061f4 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,137 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .base import Requirement, format_name + +if MYPY_CHECK_RUNNING: + from pip._vendor.packaging.specifiers import SpecifierSet + + from pip._internal.req.req_install import InstallRequirement + + from .base import Candidate, CandidateLookup + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate): + # type: (Candidate) -> None + self.candidate = candidate + + def __repr__(self): + # type: () -> str + return "{class_name}({candidate!r})".format( + class_name=self.__class__.__name__, + candidate=self.candidate, + ) + + @property + def name(self): + # type: () -> str + # No need to canonicalise - the candidate did this + return self.candidate.name + + def format_for_error(self): + # type: () -> str + return self.candidate.format_for_error() + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return self.candidate, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq): + # type: (InstallRequirement) -> None + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(ireq.extras) + + def __str__(self): + # type: () -> str + return str(self._ireq.req) + + def __repr__(self): + # type: () -> str + return "{class_name}({requirement!r})".format( + class_name=self.__class__.__name__, + requirement=str(self._ireq.req), + ) + + @property + def name(self): + # type: () -> str + canonical_name = canonicalize_name(self._ireq.req.name) + return format_name(canonical_name, self._extras) + + def format_for_error(self): + # type: () -> str + + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return None, self._ireq + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self.name, \ + "Internal issue: Candidate is not for this requirement " \ + " {} vs {}".format(candidate.name, self.name) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata. + """ + def __init__(self, specifier, match): + # type: (SpecifierSet, Candidate) -> None + self.specifier = specifier + self._candidate = match + + def __repr__(self): + # type: () -> str + return "{class_name}({specifier!r})".format( + class_name=self.__class__.__name__, + specifier=str(self.specifier), + ) + + @property + def name(self): + # type: () -> str + return self._candidate.name + + def format_for_error(self): + # type: () -> str + return "Python " + str(self.specifier) + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 00000000..43ea2486 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,258 @@ +import functools +import logging + +from pip._vendor import six +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver + +from pip._internal.exceptions import InstallationError +from pip._internal.req.req_install import check_invalid_constraint_type +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.utils.misc import dist_is_editable +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .factory import Factory + +if MYPY_CHECK_RUNNING: + from typing import Dict, List, Optional, Set, Tuple + + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.resolvelib.resolvers import Result + from pip._vendor.resolvelib.structs import Graph + + from pip._internal.cache import WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.req.req_install import InstallRequirement + from pip._internal.resolution.base import InstallRequirementProvider + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + upgrade_strategy, # type: str + py_version_info=None, # type: Optional[Tuple[int, ...]] + lazy_wheel=False, # type: bool + ): + super(Resolver, self).__init__() + if lazy_wheel: + logger.warning( + 'pip is using lazily downloaded wheels using HTTP ' + 'range requests to obtain dependency information. ' + 'This experimental feature is enabled through ' + '--use-feature=fast-deps and it is not ready for production.' + ) + + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + lazy_wheel=lazy_wheel, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result = None # type: Optional[Result] + + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + + constraints = {} # type: Dict[str, SpecifierSet] + user_requested = set() # type: Set[str] + requirements = [] + for req in root_reqs: + if req.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(req) + if problem: + raise InstallationError(problem) + + name = canonicalize_name(req.name) + if name in constraints: + constraints[name] = constraints[name] & req.specifier + else: + constraints[name] = req.specifier + else: + if req.user_supplied and req.name: + user_requested.add(canonicalize_name(req.name)) + r = self.factory.make_requirement_from_install_req( + req, requested_extras=(), + ) + if r is not None: + requirements.append(r) + + provider = PipProvider( + factory=self.factory, + constraints=constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=user_requested, + ) + reporter = BaseReporter() + resolver = RLResolver(provider, reporter) + + try: + try_to_avoid_resolution_too_deep = 2000000 + self._result = resolver.resolve( + requirements, max_rounds=try_to_avoid_resolution_too_deep, + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error(e) + six.raise_from(error, e) + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for candidate in self._result.mapping.values(): + ireq = candidate.get_install_requirement() + if ireq is None: + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + # * There isn't, good -- no uninstalltion needed. + # * The --force-reinstall flag is set. Always reinstall. + # * The installation is different in version or editable-ness, so + # we need to uninstall it to install the new distribution. + # * The installed version is the same as the pending distribution. + # Skip this distrubiton altogether to save work. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + ireq.should_reinstall = False + elif self.factory.force_reinstall: + ireq.should_reinstall = True + elif installed_dist.parsed_version != candidate.version: + ireq.should_reinstall = True + elif dist_is_editable(installed_dist) != candidate.is_editable: + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + u'The candidate selected for download or install is a ' + u'yanked version: {name!r} candidate (version {version} ' + u'at {link})\nReason for being yanked: {reason}' + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or u'', + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + return req_set + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, while breaking any cycles in the graph at arbitrary + points. We make no guarantees about where the cycle would be broken, + other than they would be broken. + """ + assert self._result is not None, "must call resolve() first" + + graph = self._result.graph + weights = get_topological_weights(graph) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights(graph): + # type: (Graph) -> Dict[Optional[str], int] + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We take the length for the longest path to any node from root, ignoring any + paths that contain a single node twice (i.e. cycles). This is done through + a depth-first search through the graph, while keeping track of the path to + the node. + + Cycles in the graph result would result in node being revisited while also + being it's own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + """ + path = set() # type: Set[Optional[str]] + weights = {} # type: Dict[Optional[str], int] + + def visit(node): + # type: (Optional[str]) -> None + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity checks + assert weights[None] == 0 + assert len(weights) == len(graph) + + return weights + + +def _req_set_item_sorter( + item, # type: Tuple[str, InstallRequirement] + weights, # type: Dict[Optional[str], int] +): + # type: (...) -> Tuple[int, str] + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/outdated.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/self_outdated_check.py similarity index 50% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/outdated.py rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/self_outdated_check.py index 37c47a4a..fbd9dfd4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/outdated.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/self_outdated_check.py @@ -1,24 +1,36 @@ from __future__ import absolute_import import datetime +import hashlib import json import logging import os.path import sys -from pip._vendor import lockfile, pkg_resources from pip._vendor.packaging import version as packaging_version - -from pip._internal.index import PackageFinder -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, get_installed_version +from pip._vendor.six import ensure_binary + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.utils.filesystem import ( + adjacent_tmp_file, + check_path_owner, + replace, +) +from pip._internal.utils.misc import ( + ensure_dir, + get_distribution, + get_installed_version, +) +from pip._internal.utils.packaging import get_installer from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - import optparse # noqa: F401 - from typing import Any, Dict # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 + import optparse + from typing import Any, Dict, Text, Union + + from pip._internal.network.session import PipSession SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" @@ -27,6 +39,13 @@ SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) +def _get_statefile_name(key): + # type: (Union[str, Text]) -> str + key_bytes = ensure_binary(key) + name = hashlib.sha224(key_bytes).hexdigest() + return name + + class SelfCheckState(object): def __init__(self, cache_dir): # type: (str) -> None @@ -35,15 +54,22 @@ class SelfCheckState(object): # Try to load the existing state if cache_dir: - self.statefile_path = os.path.join(cache_dir, "selfcheck.json") + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) try: with open(self.statefile_path) as statefile: - self.state = json.load(statefile)[sys.prefix] + self.state = json.load(statefile) except (IOError, ValueError, KeyError): # Explicitly suppressing exceptions, since we don't want to # error out if the cache file is invalid. pass + @property + def key(self): + # type: () -> str + return sys.prefix + def save(self, pypi_version, current_time): # type: (str, datetime.datetime) -> None # If we do not have a path to cache in, don't bother saving. @@ -58,22 +84,26 @@ class SelfCheckState(object): # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) - # Attempt to write out our version check file - with lockfile.LockFile(self.statefile_path): - if os.path.exists(self.statefile_path): - with open(self.statefile_path) as statefile: - state = json.load(statefile) - else: - state = {} + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) - state[sys.prefix] = { - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), - "pypi_version": pypi_version, - } + with adjacent_tmp_file(self.statefile_path) as f: + f.write(ensure_binary(text)) - with open(self.statefile_path, "w") as statefile: - json.dump(state, statefile, sort_keys=True, - separators=(",", ":")) + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass def was_installed_by_pip(pkg): @@ -83,15 +113,13 @@ def was_installed_by_pip(pkg): This is used not to display the upgrade message when pip is in fact installed by system package manager, such as dnf on Fedora. """ - try: - dist = pkg_resources.get_distribution(pkg) - return (dist.has_metadata('INSTALLER') and - 'pip' in dist.get_metadata_lines('INSTALLER')) - except pkg_resources.DistributionNotFound: + dist = get_distribution(pkg) + if not dist: return False + return "pip" == get_installer(dist) -def pip_version_check(session, options): +def pip_self_version_check(session, options): # type: (PipSession, optparse.Values) -> None """Check for an update for pip. @@ -122,41 +150,54 @@ def pip_version_check(session, options): # Refresh the version if we need to or just see if we need to warn if pypi_version is None: # Lets use PackageFinder to see what the latest pip version is - finder = PackageFinder( - find_links=options.find_links, - index_urls=[options.index_url] + options.extra_index_urls, + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, allow_all_prereleases=False, # Explicitly set to False - trusted_hosts=options.trusted_hosts, - session=session, ) - all_candidates = finder.find_all_candidates("pip") - if not all_candidates: - return - pypi_version = str( - max(all_candidates, key=lambda c: c.version).version + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) # save that we've performed a check state.save(pypi_version, current_time) remote_version = packaging_version.parse(pypi_version) + local_version_is_older = ( + pip_version < remote_version and + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip') + ) + # Determine if our pypi_version is older - if (pip_version < remote_version and - pip_version.base_version != remote_version.base_version and - was_installed_by_pip('pip')): - # Advise "python -m pip" on Windows to avoid issues - # with overwriting pip.exe. - if WINDOWS: - pip_cmd = "python -m pip" - else: - pip_cmd = "pip" - logger.warning( - "You are using pip version %s, however version %s is " - "available.\nYou should consider upgrading via the " - "'%s install --upgrade pip' command.", - pip_version, pypi_version, pip_cmd - ) + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = "{} -m pip".format(sys.executable) + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) except Exception: logger.debug( "There was an error checking the latest version of pip", diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/appdirs.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/appdirs.py index 9af9fa7b..3989ed31 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/appdirs.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/appdirs.py @@ -1,270 +1,44 @@ """ -This code was taken from https://github.com/ActiveState/appdirs and modified -to suit our purposes. +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. """ + from __future__ import absolute_import import os -import sys -from pip._vendor.six import PY2, text_type +from pip._vendor import appdirs as _appdirs -from pip._internal.utils.compat import WINDOWS, expanduser from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - List, Union - ) + from typing import List def user_cache_dir(appname): # type: (str) -> str - r""" - Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - - Typical user cache directories are: - macOS: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Windows: C:\Users\\AppData\Local\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go - in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the - non-roaming app data dir (the default returned by `user_data_dir`). Apps - typically put cache data somewhere *under* the given dir here. Some - examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - """ - if WINDOWS: - # Get the base path - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - - # When using Python 2, return paths as bytes on Windows like we do on - # other operating systems. See helper function docs for more details. - if PY2 and isinstance(path, text_type): - path = _win_path_to_bytes(path) - - # Add our app name and Cache directory to it - path = os.path.join(path, appname, "Cache") - elif sys.platform == "darwin": - # Get the base path - path = expanduser("~/Library/Caches") - - # Add our app name to it - path = os.path.join(path, appname) - else: - # Get the base path - path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) - - # Add our app name to it - path = os.path.join(path, appname) - - return path - - -def user_data_dir(appname, roaming=False): - # type: (str, bool) -> str - r""" - Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: ~/Library/Application Support/ - if it exists, else ~/.config/ - Unix: ~/.local/share/ # or in - $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\ ... - ...Application Data\ - Win XP (roaming): C:\Documents and Settings\\Local ... - ...Settings\Application Data\ - Win 7 (not roaming): C:\\Users\\AppData\Local\ - Win 7 (roaming): C:\\Users\\AppData\Roaming\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if WINDOWS: - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) - elif sys.platform == "darwin": - path = os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) if os.path.isdir(os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) - ) else os.path.join( - expanduser('~/.config/'), - appname, - ) - else: - path = os.path.join( - os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), - appname, - ) - - return path + return _appdirs.user_cache_dir(appname, appauthor=False) def user_config_dir(appname, roaming=True): # type: (str, bool) -> str - """Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default True) can be set False to not use the - Windows roaming appdata directory. That means that for users on a - Windows network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: same as user_data_dir - Unix: ~/.config/ - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/". - """ - if WINDOWS: - path = user_data_dir(appname, roaming=roaming) - elif sys.platform == "darwin": - path = user_data_dir(appname) - else: - path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) - path = os.path.join(path, appname) - + path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + if _appdirs.system == "darwin" and not os.path.isdir(path): + path = os.path.expanduser('~/.config/') + if appname: + path = os.path.join(path, appname) return path -# for the discussion regarding site_config_dirs locations +# for the discussion regarding site_config_dir locations # see def site_config_dirs(appname): # type: (str) -> List[str] - r"""Return a list of potential user-shared config dirs for this application. - - "appname" is the name of application. - - Typical user config directories are: - macOS: /Library/Application Support// - Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in - $XDG_CONFIG_DIRS - Win XP: C:\Documents and Settings\All Users\Application ... - ...Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory - on Vista.) - Win 7: Hidden, but writeable on Win 7: - C:\ProgramData\\ - """ - if WINDOWS: - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - pathlist = [os.path.join(path, appname)] - elif sys.platform == 'darwin': - pathlist = [os.path.join('/Library/Application Support', appname)] - else: - # try looking in $XDG_CONFIG_DIRS - xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - if xdg_config_dirs: - pathlist = [ - os.path.join(expanduser(x), appname) - for x in xdg_config_dirs.split(os.pathsep) - ] - else: - pathlist = [] - + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if _appdirs.system not in ["win32", "darwin"]: # always look in /etc directly as well - pathlist.append('/etc') - - return pathlist - - -# -- Windows support functions -- - -def _get_win_folder_from_registry(csidl_name): - # type: (str) -> str - """ - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - directory, _type = _winreg.QueryValueEx(key, shell_folder_name) - return directory - - -def _get_win_folder_with_ctypes(csidl_name): - # type: (str) -> str - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - - -if WINDOWS: - try: - import ctypes - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -def _win_path_to_bytes(path): - """Encode Windows paths to bytes. Only used on Python 2. - - Motivation is to be consistent with other operating systems where paths - are also returned as bytes. This avoids problems mixing bytes and Unicode - elsewhere in the codebase. For more details and discussion see - . - - If encoding using ASCII and MBCS fails, return the original Unicode path. - """ - for encoding in ('ASCII', 'MBCS'): - try: - return path.encode(encoding) - except (UnicodeEncodeError, LookupError): - pass - return path + return dirval.split(os.pathsep) + ['/etc'] + return [dirval] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compat.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compat.py index 2d8b3bf0..89c5169a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compat.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compat.py @@ -1,5 +1,9 @@ """Stuff that differs in different Python versions and platform distributions.""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import, division import codecs @@ -9,12 +13,12 @@ import os import shutil import sys -from pip._vendor.six import text_type +from pip._vendor.six import PY2, text_type from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Tuple, Text # noqa: F401 + from typing import Optional, Text, Tuple, Union try: import ipaddress @@ -28,18 +32,14 @@ except ImportError: __all__ = [ - "ipaddress", "uses_pycache", "console_to_str", "native_str", + "ipaddress", "uses_pycache", "console_to_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", - "get_extension_suffixes", ] logger = logging.getLogger(__name__) -if sys.version_info >= (3, 4): - uses_pycache = True - from importlib.util import cache_from_source -else: +if PY2: import imp try: @@ -49,41 +49,66 @@ else: cache_from_source = None uses_pycache = cache_from_source is not None +else: + uses_pycache = True + from importlib.util import cache_from_source -if sys.version_info >= (3, 5): - backslashreplace_decode = "backslashreplace" -else: - # In version 3.4 and older, backslashreplace exists +if PY2: + # In Python 2.7, backslashreplace exists # but does not support use for decoding. # We implement our own replace handler for this # situation, so that we can consistently use # backslash replacement for all versions. def backslashreplace_decode_fn(err): raw_bytes = (err.object[i] for i in range(err.start, err.end)) - if sys.version_info[0] == 2: - # Python 2 gave us characters - convert to numeric bytes - raw_bytes = (ord(b) for b in raw_bytes) - return u"".join(u"\\x%x" % c for c in raw_bytes), err.end + # Python 2 gave us characters - convert to numeric bytes + raw_bytes = (ord(b) for b in raw_bytes) + return u"".join(map(u"\\x{:x}".format, raw_bytes)), err.end codecs.register_error( "backslashreplace_decode", backslashreplace_decode_fn, ) backslashreplace_decode = "backslashreplace_decode" +else: + backslashreplace_decode = "backslashreplace" -def console_to_str(data): - # type: (bytes) -> Text - """Return a string, safe for output, of subprocess output. +def has_tls(): + # type: () -> bool + try: + import _ssl # noqa: F401 # ignore unused + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + return IS_PYOPENSSL - We assume the data is in the locale preferred encoding. - If it won't decode properly, we warn the user but decode as - best we can. - We also ensure that the output can be safely written to - standard output without encoding errors. +def str_to_display(data, desc=None): + # type: (Union[bytes, Text], Optional[str]) -> Text """ + For display or logging purposes, convert a bytes object (or text) to + text (e.g. unicode in Python 2) safe for output. + :param desc: An optional phrase describing the input data, for use in + the log message if a warning is logged. Defaults to "Bytes object". + + This function should never error out and so can take a best effort + approach. It is okay to be lossy if needed since the return value is + just for display. + + We assume the data is in the locale preferred encoding. If it won't + decode properly, we warn the user but decode as best we can. + + We also ensure that the output can be safely written to standard output + without encoding errors. + """ + if isinstance(data, text_type): + return data + + # Otherwise, data is a bytes object (str in Python 2). # First, get the encoding we assume. This is the preferred # encoding for the locale, unless that is not found, or # it is ASCII, in which case assume UTF-8 @@ -97,7 +122,8 @@ def console_to_str(data): decoded_data = data.decode(encoding) except UnicodeDecodeError: logger.warning( - "Subprocess output does not appear to be encoded as %s", + '%s does not appear to be encoded as %s', + desc or 'Bytes object', encoding, ) decoded_data = data.decode(encoding, errors=backslashreplace_decode) @@ -127,20 +153,11 @@ def console_to_str(data): return decoded_data -if sys.version_info >= (3,): - def native_str(s, replace=False): - # type: (str, bool) -> str - if isinstance(s, bytes): - return s.decode('utf-8', 'replace' if replace else 'strict') - return s - -else: - def native_str(s, replace=False): - # type: (str, bool) -> str - # Replace is ignored -- unicode to UTF-8 can't fail - if isinstance(s, text_type): - return s.encode('utf-8') - return s +def console_to_str(data): + # type: (bytes) -> Text + """Return a string, safe for output, of subprocess output. + """ + return str_to_display(data, desc='Subprocess output') def get_path_uid(path): @@ -168,23 +185,12 @@ def get_path_uid(path): else: # raise OSError for parity with os.O_NOFOLLOW above raise OSError( - "%s is a symlink; Will not return uid for symlinks" % path + "{} is a symlink; Will not return uid for symlinks".format( + path) ) return file_uid -if sys.version_info >= (3, 4): - from importlib.machinery import EXTENSION_SUFFIXES - - def get_extension_suffixes(): - return EXTENSION_SUFFIXES -else: - from imp import get_suffixes - - def get_extension_suffixes(): - return [suffix[0] for suffix in get_suffixes()] - - def expanduser(path): # type: (str) -> str """ @@ -253,12 +259,13 @@ else: return cr cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) - os.close(fd) - except Exception: - pass + if sys.platform != "win32": + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass if not cr: cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(cr[1]), int(cr[0]) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compatibility_tags.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 00000000..4f21874e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,166 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +from __future__ import absolute_import + +import re + +from pip._vendor.packaging.tags import ( + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import PythonVersion + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def version_info_to_nodot(version_info): + # type: (Tuple[int, ...]) -> str + # Only use up to the first two numbers. + return ''.join(map(str, version_info[:2])) + + +def _mac_platforms(arch): + # type: (str) -> List[str] + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + '{}_{}'.format(name, arch[len('macosx_'):]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch): + # type: (str) -> List[str] + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch_prefix == 'manylinux2014': + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {'i686', 'x86_64'}: + arches.append('manylinux2010' + arch_sep + arch_suffix) + arches.append('manylinux1' + arch_sep + arch_suffix) + elif arch_prefix == 'manylinux2010': + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append('manylinux1' + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch): + # type: (str) -> List[str] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch.startswith('macosx'): + arches = _mac_platforms(arch) + elif arch_prefix in ['manylinux2014', 'manylinux2010']: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _get_python_version(version): + # type: (str) -> PythonVersion + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter(implementation=None, version=None): + # type: (Optional[str], Optional[str]) -> str + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return "{}{}".format(implementation, version) + + +def get_supported( + version=None, # type: Optional[str] + platform=None, # type: Optional[str] + impl=None, # type: Optional[str] + abi=None # type: Optional[str] +): + # type: (...) -> List[Tag] + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify the exact platform you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abi: specify the exact abi you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported = [] # type: List[Tag] + + python_version = None # type: Optional[PythonVersion] + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + abis = None # type: Optional[List[str]] + if abi is not None: + abis = [abi] + + platforms = None # type: Optional[List[str]] + if platform is not None: + platforms = _get_custom_platforms(platform) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/datetime.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 00000000..4d0503c2 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,14 @@ +"""For when pip wants to check the date or time. +""" + +from __future__ import absolute_import + +import datetime + + +def today_is_later_than(year, month, day): + # type: (int, int, int) -> bool + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/deprecation.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/deprecation.py index 0beaf74b..2f20cfd4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/deprecation.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/deprecation.py @@ -1,6 +1,10 @@ """ A module that implements tooling to enable easy warnings about deprecations. """ + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -12,7 +16,10 @@ from pip import __version__ as current_version from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Optional # noqa: F401 + from typing import Any, Optional + + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " class PipDeprecationWarning(Warning): @@ -75,16 +82,23 @@ def deprecated(reason, replacement, gone_in, issue=None): """ # Construct a nice message. - # This is purposely eagerly formatted as we want it to appear as if someone - # typed this entire message out. - message = "DEPRECATION: " + reason - if replacement is not None: - message += " A possible replacement is {}.".format(replacement) - if issue is not None: - url = "https://github.com/pypa/pip/issues/" + str(issue) - message += " You can find discussion regarding this at {}.".format(url) + # This is eagerly formatted as we want it to get logged as if someone + # typed this entire message out. + sentences = [ + (reason, DEPRECATION_MSG_PREFIX + "{}"), + (gone_in, "pip {} will remove support for this functionality."), + (replacement, "A possible replacement is {}."), + (issue, ( + "You can find discussion regarding this at " + "https://github.com/pypa/pip/issues/{}." + )), + ] + message = " ".join( + template.format(val) for val, template in sentences if val is not None + ) # Raise as an error if it has to be removed. if gone_in is not None and parse(current_version) >= parse(gone_in): raise PipDeprecationWarning(message) + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 00000000..f1fe209e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,130 @@ +import logging + +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + ArchiveInfo, + DirectUrl, + DirectUrlValidationError, + DirInfo, + VcsInfo, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs import vcs + +try: + from json import JSONDecodeError +except ImportError: + # PY2 + JSONDecodeError = ValueError # type: ignore + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._internal.models.link import Link + + from pip._vendor.pkg_resources import Distribution + +logger = logging.getLogger(__name__) + + +def direct_url_as_pep440_direct_reference(direct_url, name): + # type: (DirectUrl, str) -> str + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + # pip should never reach this point for editables, since + # pip freeze inspects the editable project location to produce + # the requirement string + assert not direct_url.info.editable + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False): + # type: (Link, Optional[str], bool) -> DirectUrl + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = ( + vcs_backend.get_url_rev_and_auth(link.url_without_fragment) + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = "{}={}".format(hash_name, link.hash) + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) + + +def dist_get_direct_url(dist): + # type: (Distribution) -> Optional[DirectUrl] + """Obtain a DirectUrl from a pkg_resource.Distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + if not dist.has_metadata(DIRECT_URL_METADATA_NAME): + return None + try: + return DirectUrl.from_json(dist.get_metadata(DIRECT_URL_METADATA_NAME)) + except ( + DirectUrlValidationError, + JSONDecodeError, + UnicodeDecodeError + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + dist.project_name, + e, + ) + return None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/distutils_args.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 00000000..e38e402d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,48 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, List + + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args): + # type: (List[str]) -> Dict[str, str] + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/encoding.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/encoding.py index d36defad..5b83d61b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/encoding.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/encoding.py @@ -6,16 +6,16 @@ import sys from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Tuple, Text # noqa: F401 + from typing import List, Tuple, Text BOMS = [ - (codecs.BOM_UTF8, 'utf8'), - (codecs.BOM_UTF16, 'utf16'), - (codecs.BOM_UTF16_BE, 'utf16-be'), - (codecs.BOM_UTF16_LE, 'utf16-le'), - (codecs.BOM_UTF32, 'utf32'), - (codecs.BOM_UTF32_BE, 'utf32-be'), - (codecs.BOM_UTF32_LE, 'utf32-le'), + (codecs.BOM_UTF8, 'utf-8'), + (codecs.BOM_UTF16, 'utf-16'), + (codecs.BOM_UTF16_BE, 'utf-16-be'), + (codecs.BOM_UTF16_LE, 'utf-16-le'), + (codecs.BOM_UTF32, 'utf-32'), + (codecs.BOM_UTF32_BE, 'utf-32-be'), + (codecs.BOM_UTF32_LE, 'utf-32-le'), ] # type: List[Tuple[bytes, Text]] ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') @@ -32,7 +32,9 @@ def auto_decode(data): # Lets check the first two lines as in PEP263 for line in data.split(b'\n')[:2]: if line[0:1] == b'#' and ENCODING_RE.search(line): - encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode('ascii') return data.decode(encoding) return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/entrypoints.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 00000000..befd01c8 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,31 @@ +import sys + +from pip._internal.cli.main import main +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def _wrapper(args=None): + # type: (Optional[List[str]]) -> int + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filesystem.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filesystem.py index 1e6b0338..303243fd 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filesystem.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filesystem.py @@ -1,16 +1,42 @@ +import errno +import fnmatch import os import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile + +# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import. +from pip._vendor.retrying import retry # type: ignore +from pip._vendor.six import PY2 from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast + +if MYPY_CHECK_RUNNING: + from typing import Any, BinaryIO, Iterator, List, Union + + class NamedTemporaryFileResult(BinaryIO): + @property + def file(self): + # type: () -> BinaryIO + pass def check_path_owner(path): # type: (str) -> bool # If we don't have a way to check the effective uid of this process, then # we'll just assume that we own the directory. - if not hasattr(os, "geteuid"): + if sys.platform == "win32" or not hasattr(os, "geteuid"): return True + assert os.path.isabs(path) + previous = None while path != previous: if os.path.lexists(path): @@ -28,3 +54,171 @@ def check_path_owner(path): else: previous, path = path, os.path.dirname(path) return False # assume we don't own the path + + +def copy2_fixed(src, dest): + # type: (str, str) -> None + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except (OSError, IOError): + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError( + "`{f}` is a socket".format(**locals())) + + raise + + +def is_socket(path): + # type: (str) -> bool + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path, **kwargs): + # type: (str, **Any) -> Iterator[NamedTemporaryFileResult] + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix='.tmp', + **kwargs + ) as f: + result = cast('NamedTemporaryFileResult', f) + try: + yield result + finally: + result.file.flush() + os.fsync(result.file.fileno()) + + +_replace_retry = retry(stop_max_delay=1000, wait_fixed=250) + +if PY2: + @_replace_retry + def replace(src, dest): + # type: (str, str) -> None + try: + os.rename(src, dest) + except OSError: + os.remove(dest) + os.rename(src, dest) + +else: + replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path): + # type: (str) -> bool + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == 'posix': + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path): + # type: (str) -> bool + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = 'accesstest_deleteme_fishfingers_custard_' + alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789' + for _ in range(10): + name = basename + ''.join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + # Python 2 doesn't support FileExistsError and PermissionError. + except OSError as e: + # exception FileExistsError + if e.errno == errno.EEXIST: + continue + # exception PermissionError + if e.errno == errno.EPERM or e.errno == errno.EACCES: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + raise + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise EnvironmentError( + 'Unexpected condition testing for writable directory' + ) + + +def find_files(path, pattern): + # type: (str, str) -> List[str] + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result = [] # type: List[str] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path): + # type: (str) -> Union[int, float] + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path): + # type: (str) -> str + return format_size(file_size(path)) + + +def directory_size(path): + # type: (str) -> Union[int, float] + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path): + # type: (str) -> str + return format_size(directory_size(path)) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filetypes.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 00000000..daa0ca77 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,16 @@ +"""Filetype information. +""" +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Tuple + +WHEEL_EXTENSION = '.whl' +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') # type: Tuple[str, ...] +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', + '.tar.lz', '.tar.lzma') # type: Tuple[str, ...] +ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) # type: Tuple[str, ...] +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') # type: Tuple[str, ...] +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS +) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/glibc.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/glibc.py index 8a51f695..36104244 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/glibc.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/glibc.py @@ -1,18 +1,49 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import -import ctypes -import re -import warnings +import os +import sys from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple # noqa: F401 + from typing import Optional, Tuple def glibc_version_string(): # type: () -> Optional[str] "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr(): + # type: () -> Optional[str] + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes(): + # type: () -> Optional[str] + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the @@ -36,32 +67,6 @@ def glibc_version_string(): return version_str -# Separated out from have_compatible_glibc for easier unit testing -def check_glibc_version(version_str, required_major, minimum_minor): - # type: (str, int, int) -> bool - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return (int(m.group("major")) == required_major and - int(m.group("minor")) >= minimum_minor) - - -def have_compatible_glibc(required_major, minimum_minor): - # type: (int, int) -> bool - version_str = glibc_version_string() # type: Optional[str] - if version_str is None: - return False - return check_glibc_version(version_str, required_major, minimum_minor) - - # platform.libc_ver regularly returns completely nonsensical glibc # versions. E.g. on my computer, platform says: # diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/hashes.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/hashes.py index c6df7a18..d1b062fe 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/hashes.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/hashes.py @@ -5,20 +5,22 @@ import hashlib from pip._vendor.six import iteritems, iterkeys, itervalues from pip._internal.exceptions import ( - HashMismatch, HashMissing, InstallationError, + HashMismatch, + HashMissing, + InstallationError, ) from pip._internal.utils.misc import read_chunks from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Dict, List, BinaryIO, NoReturn, Iterator ) from pip._vendor.six import PY3 if PY3: - from hashlib import _Hash # noqa: F401 + from hashlib import _Hash else: - from hashlib import _hash as _Hash # noqa: F401 + from hashlib import _hash as _Hash # The recommended hash algo of the moment. Change this whenever the state of @@ -44,6 +46,32 @@ class Hashes(object): """ self._allowed = {} if hashes is None else hashes + def __or__(self, other): + # type: (Hashes) -> Hashes + if not isinstance(other, Hashes): + return NotImplemented + new = self._allowed.copy() + for alg, values in iteritems(other._allowed): + try: + new[alg] += values + except KeyError: + new[alg] = values + return Hashes(new) + + @property + def digest_count(self): + # type: () -> int + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed( + self, + hash_name, # type: str + hex_digest, # type: str + ): + # type: (...) -> bool + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + def check_against_chunks(self, chunks): # type: (Iterator[bytes]) -> None """Check good hashes against ones built from iterable of chunks of @@ -57,7 +85,9 @@ class Hashes(object): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): - raise InstallationError('Unknown hash name: %s' % hash_name) + raise InstallationError( + 'Unknown hash name: {}'.format(hash_name) + ) for chunk in chunks: for hash in itervalues(gots): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/inject_securetransport.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 00000000..5b93b1d6 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,36 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport(): + # type: () -> None + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/logging.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/logging.py index 579d6962..9a017cf7 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/logging.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/logging.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import contextlib @@ -6,10 +9,12 @@ import logging import logging.handlers import os import sys +from logging import Filter, getLogger from pip._vendor.six import PY2 from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX from pip._internal.utils.misc import ensure_dir try: @@ -19,15 +24,35 @@ except ImportError: try: - from pip._vendor import colorama + # Use "import as" and set colorama in the else clause to avoid mypy + # errors and get the following correct revealed type for colorama: + # `Union[_importlib_modulespec.ModuleType, None]` + # Otherwise, we get an error like the following in the except block: + # > Incompatible types in assignment (expression has type "None", + # variable has type Module) + # TODO: eliminate the need to use "import as" once mypy addresses some + # of its issues with conditional imports. Here is an umbrella issue: + # https://github.com/python/mypy/issues/1297 + from pip._vendor import colorama as _colorama # Lots of different errors can come from this, including SystemError and # ImportError. except Exception: colorama = None +else: + # Import Fore explicitly rather than accessing below as colorama.Fore + # to avoid the following error running mypy: + # > Module has no attribute "Fore" + # TODO: eliminate the need to import Fore once mypy addresses some of its + # issues with conditional imports. This particular case could be an + # instance of the following issue (but also see the umbrella issue above): + # https://github.com/python/mypy/issues/3500 + from pip._vendor.colorama import Fore + + colorama = _colorama _log_state = threading.local() -_log_state.indentation = 0 +subprocess_logger = getLogger('pip.subprocessor') class BrokenStdoutLoggingError(Exception): @@ -78,6 +103,8 @@ def indent_log(num=2): A context manager which will cause the log output to be indented for any log messages emitted inside it. """ + # For thread-safety + _log_state.indentation = get_indentation() _log_state.indentation += num try: yield @@ -90,9 +117,10 @@ def get_indentation(): class IndentingFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): """ - A logging.Formatter obeying containing indent_log contexts. + A logging.Formatter that obeys the indent_log() context manager. :param add_timestamp: A bool indicating output lines should be prefixed with their record's timestamp. @@ -100,15 +128,36 @@ class IndentingFormatter(logging.Formatter): self.add_timestamp = kwargs.pop("add_timestamp", False) super(IndentingFormatter, self).__init__(*args, **kwargs) + def get_message_start(self, formatted, levelno): + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return '' + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return '' + if levelno < logging.ERROR: + return 'WARNING: ' + + return 'ERROR: ' + def format(self, record): """ - Calls the standard formatter, but will indent all of the log messages - by our current indentation level. + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. """ formatted = super(IndentingFormatter, self).format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + prefix = '' if self.add_timestamp: - prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ") + # TODO: Use Formatter.default_time_format after dropping PY2. + t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S") + prefix = '{t},{record.msecs:03.0f} '.format(**locals()) prefix += " " * get_indentation() formatted = "".join([ prefix + line @@ -129,8 +178,8 @@ class ColorizedStreamHandler(logging.StreamHandler): if colorama: COLORS = [ # This needs to be in order from highest logging level to lowest. - (logging.ERROR, _color_wrap(colorama.Fore.RED)), - (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + (logging.ERROR, _color_wrap(Fore.RED)), + (logging.WARNING, _color_wrap(Fore.YELLOW)), ] else: COLORS = [] @@ -205,7 +254,7 @@ class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): return logging.handlers.RotatingFileHandler._open(self) -class MaxLevelFilter(logging.Filter): +class MaxLevelFilter(Filter): def __init__(self, level): self.level = level @@ -214,6 +263,18 @@ class MaxLevelFilter(logging.Filter): return record.levelno < self.level +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record): + # The base Filter class allows only records from a logger (or its + # children). + return not super(ExcludeLoggerFilter, self).filter(record) + + def setup_logging(verbosity, no_color, user_log_file): """Configures and sets up all of the logging @@ -257,6 +318,9 @@ def setup_logging(verbosity, no_color, user_log_file): "stream": "pip._internal.utils.logging.ColorizedStreamHandler", "file": "pip._internal.utils.logging.BetterRotatingFileHandler", } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) logging.config.dictConfig({ "version": 1, @@ -266,6 +330,14 @@ def setup_logging(verbosity, no_color, user_log_file): "()": "pip._internal.utils.logging.MaxLevelFilter", "level": logging.WARNING, }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, }, "formatters": { "indent": { @@ -284,7 +356,7 @@ def setup_logging(verbosity, no_color, user_log_file): "class": handler_classes["stream"], "no_color": no_color, "stream": log_streams["stdout"], - "filters": ["exclude_warnings"], + "filters": ["exclude_subprocess", "exclude_warnings"], "formatter": "indent", }, "console_errors": { @@ -292,6 +364,17 @@ def setup_logging(verbosity, no_color, user_log_file): "class": handler_classes["stream"], "no_color": no_color, "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["restrict_to_subprocess"], "formatter": "indent", }, "user_log": { @@ -304,9 +387,7 @@ def setup_logging(verbosity, no_color, user_log_file): }, "root": { "level": root_level, - "handlers": ["console", "console_errors"] + ( - ["user_log"] if include_user_log else [] - ), + "handlers": handlers, }, "loggers": { "pip._vendor": { diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/misc.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/misc.py index 84605ee3..24a74556 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/misc.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/misc.py @@ -1,41 +1,51 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import contextlib import errno +import getpass +import hashlib import io -import locale -# we have a submodule named 'logging' which would shadow this if we used the -# regular name: -import logging as std_logging +import logging import os import posixpath -import re import shutil import stat -import subprocess import sys -import tarfile -import zipfile from collections import deque +from itertools import tee from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import. from pip._vendor.retrying import retry # type: ignore -from pip._vendor.six import PY2 -from pip._vendor.six.moves import input +from pip._vendor.six import PY2, text_type +from pip._vendor.six.moves import filter, filterfalse, input, map, zip_longest from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote -from pip._internal.exceptions import CommandError, InstallationError +from pip import __version__ +from pip._internal.exceptions import CommandError from pip._internal.locations import ( - running_under_virtualenv, site_packages, user_site, virtualenv_no_global, - write_delete_marker_file, + get_major_minor_version, + site_packages, + user_site, ) from pip._internal.utils.compat import ( - WINDOWS, console_to_str, expanduser, stdlib_pkgs, + WINDOWS, + expanduser, + stdlib_pkgs, + str_to_display, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, ) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING if PY2: from io import BytesIO as StringIO @@ -43,51 +53,58 @@ else: from io import StringIO if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Iterable, List, Match, Union, Any, Mapping, Text, - AnyStr, Container + from typing import ( + Any, AnyStr, Callable, Container, Iterable, Iterator, List, Optional, + Text, Tuple, TypeVar, Union, ) - from pip._vendor.pkg_resources import Distribution # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 - from pip._internal.utils.ui import SpinnerInterface # noqa: F401 + from pip._vendor.pkg_resources import Distribution + + VersionInfo = Tuple[int, int, int] + T = TypeVar("T") __all__ = ['rmtree', 'display_path', 'backup_dir', 'ask', 'splitext', 'format_size', 'is_installable_dir', - 'is_svn_page', 'file_contents', - 'split_leading_dir', 'has_leading_dir', 'normalize_path', 'renames', 'get_prog', - 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', 'captured_stdout', 'ensure_dir', - 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION', 'get_installed_version', 'remove_auth_from_url'] -logger = std_logging.getLogger(__name__) +logger = logging.getLogger(__name__) + + +def get_pip_version(): + # type: () -> str + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return ( + 'pip {} from {} (python {})'.format( + __version__, pip_pkg_dir, get_major_minor_version(), + ) + ) + + +def normalize_version_info(py_version_info): + # type: (Tuple[int, ...]) -> Tuple[int, int, int] + """ + Convert a tuple of ints representing a Python version to one of length + three. -WHEEL_EXTENSION = '.whl' -BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') -XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') -ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) -TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') -ARCHIVE_EXTENSIONS = ( - ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) -SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. -try: - import bz2 # noqa - SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS -except ImportError: - logger.debug('bz2 module is not available') + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] -try: - # Only for Python 3.3+ - import lzma # noqa - SUPPORTED_EXTENSIONS += XZ_EXTENSIONS -except ImportError: - logger.debug('lzma module is not available') + return cast('VersionInfo', py_version_info) def ensure_dir(path): @@ -96,7 +113,8 @@ def ensure_dir(path): try: os.makedirs(path) except OSError as e: - if e.errno != errno.EEXIST: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: raise @@ -105,7 +123,7 @@ def get_prog(): try: prog = os.path.basename(sys.argv[0]) if prog in ('__main__.py', '-c'): - return "%s -m pip" % sys.executable + return "{} -m pip".format(sys.executable) else: return prog except (AttributeError, TypeError, IndexError): @@ -116,7 +134,7 @@ def get_prog(): # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): - # type: (str, bool) -> None + # type: (Text, bool) -> None shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) @@ -125,8 +143,13 @@ def rmtree_errorhandler(func, path, exc_info): """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the read-only attribute, and hopefully continue without problems.""" - # if file type currently read only - if os.stat(path).st_mode & stat.S_IREAD: + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except (IOError, OSError): + # it's equivalent to os.path.exists + return + + if has_attr_readonly: # convert to read/write os.chmod(path, stat.S_IWRITE) # use the original function to repeat the operation @@ -136,6 +159,40 @@ def rmtree_errorhandler(func, path, exc_info): raise +def path_to_display(path): + # type: (Optional[Union[str, Text]]) -> Optional[Text] + """ + Convert a bytes (or text) path to text (unicode in Python 2) for display + and logging purposes. + + This function should never error out. Also, this function is mainly needed + for Python 2 since in Python 3 str paths are already text. + """ + if path is None: + return None + if isinstance(path, text_type): + return path + # Otherwise, path is a bytes object (str in Python 2). + try: + display_path = path.decode(sys.getfilesystemencoding(), 'strict') + except UnicodeDecodeError: + # Include the full bytes to make troubleshooting easier, even though + # it may not be very human readable. + if PY2: + # Convert the bytes to a readable str representation using + # repr(), and then convert the str to unicode. + # Also, we add the prefix "b" to the repr() return value both + # to make the Python 2 output look like the Python 3 output, and + # to signal to the user that this is a bytes representation. + display_path = str_to_display('b{!r}'.format(path)) + else: + # Silence the "F821 undefined name 'ascii'" flake8 error since + # in Python 3 ascii() is a built-in. + display_path = ascii(path) # noqa: F821 + + return display_path + + def display_path(path): # type: (Union[str, Text]) -> str """Gives the display value for a given path, making it relative to cwd @@ -169,36 +226,71 @@ def ask_path_exists(message, options): return ask(message, options) +def _check_no_input(message): + # type: (str) -> None + """Raise an error if no input is allowed.""" + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: {}'.format( + message) + ) + + def ask(message, options): # type: (str, Iterable[str]) -> str """Ask the message interactively, with the given possible responses""" while 1: - if os.environ.get('PIP_NO_INPUT'): - raise Exception( - 'No input was expected ($PIP_NO_INPUT set); question: %s' % - message - ) + _check_no_input(message) response = input(message) response = response.strip().lower() if response not in options: print( - 'Your response (%r) was not one of the expected responses: ' - '%s' % (response, ', '.join(options)) + 'Your response ({!r}) was not one of the expected responses: ' + '{}'.format(response, ', '.join(options)) ) else: return response +def ask_input(message): + # type: (str) -> str + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message): + # type: (str) -> str + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + def format_size(bytes): # type: (float) -> str if bytes > 1000 * 1000: - return '%.1fMB' % (bytes / 1000.0 / 1000) + return '{:.1f} MB'.format(bytes / 1000.0 / 1000) elif bytes > 10 * 1000: - return '%ikB' % (bytes / 1000) + return '{} kB'.format(int(bytes / 1000)) elif bytes > 1000: - return '%.1fkB' % (bytes / 1000.0) + return '{:.1f} kB'.format(bytes / 1000.0) else: - return '%ibytes' % bytes + return '{} bytes'.format(int(bytes)) + + +def tabulate(rows): + # type: (Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]] + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue='')] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes def is_installable_dir(path): @@ -216,21 +308,6 @@ def is_installable_dir(path): return False -def is_svn_page(html): - # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]] - """ - Returns true if the page appears to be the index page of an svn repository - """ - return (re.search(r'[^<]*Revision \d+:', html) and - re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) - - -def file_contents(filename): - # type: (str) -> Text - with open(filename, 'rb') as fp: - return fp.read().decode('utf-8') - - def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: @@ -240,34 +317,6 @@ def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): yield chunk -def split_leading_dir(path): - # type: (Union[str, Text]) -> List[Union[str, Text]] - path = path.lstrip('/').lstrip('\\') - if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or - '\\' not in path): - return path.split('/', 1) - elif '\\' in path: - return path.split('\\', 1) - else: - return [path, ''] - - -def has_leading_dir(paths): - # type: (Iterable[Union[str, Text]]) -> bool - """Returns true if all the paths have the same leading path name - (i.e., everything is in one subdirectory in an archive)""" - common_prefix = None - for path in paths: - prefix, rest = split_leading_dir(path) - if not prefix: - return False - elif common_prefix is None: - common_prefix = prefix - elif prefix != common_prefix: - return False - return True - - def normalize_path(path, resolve_symlinks=True): # type: (str, bool) -> str """ @@ -317,10 +366,12 @@ def is_local(path): If we're not in a virtualenv, all paths are considered "local." + Caution: this function assumes the head of path has been normalized + with normalize_path. """ if not running_under_virtualenv(): return True - return normalize_path(path).startswith(normalize_path(sys.prefix)) + return path.startswith(normalize_path(sys.prefix)) def dist_is_local(dist): @@ -340,8 +391,7 @@ def dist_in_usersite(dist): """ Return True if given Distribution is installed in user site. """ - norm_path = normalize_path(dist_location(dist)) - return norm_path.startswith(normalize_path(user_site)) + return dist_location(dist).startswith(normalize_path(user_site)) def dist_in_site_packages(dist): @@ -350,9 +400,7 @@ def dist_in_site_packages(dist): Return True if given Distribution is installed in sysconfig.get_python_lib(). """ - return normalize_path( - dist_location(dist) - ).startswith(normalize_path(site_packages)) + return dist_location(dist).startswith(normalize_path(site_packages)) def dist_is_editable(dist): @@ -367,12 +415,15 @@ def dist_is_editable(dist): return False -def get_installed_distributions(local_only=True, - skip=stdlib_pkgs, - include_editables=True, - editables_only=False, - user_only=False): - # type: (bool, Container[str], bool, bool, bool) -> List[Distribution] +def get_installed_distributions( + local_only=True, # type: bool + skip=stdlib_pkgs, # type: Container[str] + include_editables=True, # type: bool + editables_only=False, # type: bool + user_only=False, # type: bool + paths=None # type: Optional[List[str]] +): + # type: (...) -> List[Distribution] """ Return a list of installed Distribution objects. @@ -389,7 +440,14 @@ def get_installed_distributions(local_only=True, If ``user_only`` is True , only report installations in the user site directory. + If ``paths`` is set, only report the distributions present at the + specified list of locations. """ + if paths: + working_set = pkg_resources.WorkingSet(paths) + else: + working_set = pkg_resources.working_set + if local_only: local_test = dist_is_local else: @@ -416,8 +474,7 @@ def get_installed_distributions(local_only=True, def user_test(d): return True - # because of pkg_resources vendoring, mypy cannot find stub in typeshed - return [d for d in pkg_resources.working_set # type: ignore + return [d for d in working_set if local_test(d) and d.key not in skip and editable_test(d) and @@ -426,6 +483,40 @@ def get_installed_distributions(local_only=True, ] +def search_distribution(req_name): + + # Canonicalize the name before searching in the list of + # installed distributions and also while creating the package + # dictionary to get the Distribution object + req_name = canonicalize_name(req_name) + packages = get_installed_distributions(skip=()) + pkg_dict = {canonicalize_name(p.key): p for p in packages} + return pkg_dict.get(req_name) + + +def get_distribution(req_name): + """Given a requirement name, return the installed Distribution object""" + + # Search the distribution by looking through the working set + dist = search_distribution(req_name) + + # If distribution could not be found, call working_set.require + # to update the working set, and try to find the distribution + # again. + # This might happen for e.g. when you install a package + # twice, once using setup.py develop and again using setup.py install. + # Now when run pip uninstall twice, the package gets removed + # from the working set in the first uninstall, so we have to populate + # the working set again so that pip knows about it and the packages + # gets picked up and is successfully uninstalled the second time too. + if not dist: + try: + pkg_resources.working_set.require(req_name) + except pkg_resources.DistributionNotFound: + return None + return search_distribution(req_name) + + def egg_link_path(dist): # type: (Distribution) -> Optional[str] """ @@ -447,12 +538,9 @@ def egg_link_path(dist): """ sites = [] if running_under_virtualenv(): - if virtualenv_no_global(): - sites.append(site_packages) - else: - sites.append(site_packages) - if user_site: - sites.append(user_site) + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) else: if user_site: sites.append(user_site) @@ -473,350 +561,28 @@ def dist_location(dist): packages, where dist.location is the source code location, and we want to know where the egg-link file is. + The returned location is normalized (in particular, with symlinks removed). """ egg_link = egg_link_path(dist) if egg_link: - return egg_link - return dist.location - - -def current_umask(): - """Get the current umask which involves having to set it temporarily.""" - mask = os.umask(0) - os.umask(mask) - return mask - - -def unzip_file(filename, location, flatten=True): - # type: (str, str, bool) -> None - """ - Unzip the file (with path `filename`) to the destination `location`. All - files are written based on system defaults and umask (i.e. permissions are - not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - zipfp = open(filename, 'rb') - try: - zip = zipfile.ZipFile(zipfp, allowZip64=True) - leading = has_leading_dir(zip.namelist()) and flatten - for info in zip.infolist(): - name = info.filename - fn = name - if leading: - fn = split_leading_dir(name)[1] - fn = os.path.join(location, fn) - dir = os.path.dirname(fn) - if fn.endswith('/') or fn.endswith('\\'): - # A directory - ensure_dir(fn) - else: - ensure_dir(dir) - # Don't use read() to avoid allocating an arbitrarily large - # chunk of memory for the file's content - fp = zip.open(name) - try: - with open(fn, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - finally: - fp.close() - mode = info.external_attr >> 16 - # if mode and regular file and any execute permissions for - # user/group/world? - if mode and stat.S_ISREG(mode) and mode & 0o111: - # make dest file have execute for user/group/world - # (chmod +x) no-op on windows per python docs - os.chmod(fn, (0o777 - current_umask() | 0o111)) - finally: - zipfp.close() - - -def untar_file(filename, location): - # type: (str, str) -> None - """ - Untar the file (with path `filename`) to the destination `location`. - All files are written based on system defaults and umask (i.e. permissions - are not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): - mode = 'r:gz' - elif filename.lower().endswith(BZ2_EXTENSIONS): - mode = 'r:bz2' - elif filename.lower().endswith(XZ_EXTENSIONS): - mode = 'r:xz' - elif filename.lower().endswith('.tar'): - mode = 'r' - else: - logger.warning( - 'Cannot determine compression type for file %s', filename, - ) - mode = 'r:*' - tar = tarfile.open(filename, mode) - try: - leading = has_leading_dir([ - member.name for member in tar.getmembers() - ]) - for member in tar.getmembers(): - fn = member.name - if leading: - # https://github.com/python/mypy/issues/1174 - fn = split_leading_dir(fn)[1] # type: ignore - path = os.path.join(location, fn) - if member.isdir(): - ensure_dir(path) - elif member.issym(): - try: - # https://github.com/python/typeshed/issues/2673 - tar._extract_member(member, path) # type: ignore - except Exception as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - else: - try: - fp = tar.extractfile(member) - except (KeyError, AttributeError) as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - ensure_dir(os.path.dirname(path)) - with open(path, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - fp.close() - # Update the timestamp (useful for cython compiled files) - # https://github.com/python/typeshed/issues/2673 - tar.utime(member, path) # type: ignore - # member have any execute permissions for user/group/world? - if member.mode & 0o111: - # make dest file have execute for user/group/world - # no-op on windows per python docs - os.chmod(path, (0o777 - current_umask() | 0o111)) - finally: - tar.close() - - -def unpack_file( - filename, # type: str - location, # type: str - content_type, # type: Optional[str] - link # type: Optional[Link] -): - # type: (...) -> None - filename = os.path.realpath(filename) - if (content_type == 'application/zip' or - filename.lower().endswith(ZIP_EXTENSIONS) or - zipfile.is_zipfile(filename)): - unzip_file( - filename, - location, - flatten=not filename.endswith('.whl') - ) - elif (content_type == 'application/x-gzip' or - tarfile.is_tarfile(filename) or - filename.lower().endswith( - TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): - untar_file(filename, location) - elif (content_type and content_type.startswith('text/html') and - is_svn_page(file_contents(filename))): - # We don't really care about this - from pip._internal.vcs.subversion import Subversion - Subversion('svn+' + link.url).unpack(location) - else: - # FIXME: handle? - # FIXME: magic signatures? - logger.critical( - 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' - 'cannot detect archive format', - filename, location, content_type, - ) - raise InstallationError( - 'Cannot determine archive format of %s' % location - ) - - -def call_subprocess( - cmd, # type: List[str] - show_stdout=True, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - unset_environ=None, # type: Optional[Iterable[str]] - spinner=None # type: Optional[SpinnerInterface] -): - # type: (...) -> Optional[Text] - """ - Args: - extra_ok_returncodes: an iterable of integer return codes that are - acceptable, in addition to 0. Defaults to None, which means []. - unset_environ: an iterable of environment variable names to unset - prior to calling subprocess.Popen(). - """ - if extra_ok_returncodes is None: - extra_ok_returncodes = [] - if unset_environ is None: - unset_environ = [] - # This function's handling of subprocess output is confusing and I - # previously broke it terribly, so as penance I will write a long comment - # explaining things. - # - # The obvious thing that affects output is the show_stdout= - # kwarg. show_stdout=True means, let the subprocess write directly to our - # stdout. Even though it is nominally the default, it is almost never used - # inside pip (and should not be used in new code without a very good - # reason); as of 2016-02-22 it is only used in a few places inside the VCS - # wrapper code. Ideally we should get rid of it entirely, because it - # creates a lot of complexity here for a rarely used feature. - # - # Most places in pip set show_stdout=False. What this means is: - # - We connect the child stdout to a pipe, which we read. - # - By default, we hide the output but show a spinner -- unless the - # subprocess exits with an error, in which case we show the output. - # - If the --verbose option was passed (= loglevel is DEBUG), then we show - # the output unconditionally. (But in this case we don't want to show - # the output a second time if it turns out that there was an error.) - # - # stderr is always merged with stdout (even if show_stdout=True). - if show_stdout: - stdout = None - else: - stdout = subprocess.PIPE - if command_desc is None: - cmd_parts = [] - for part in cmd: - if ' ' in part or '\n' in part or '"' in part or "'" in part: - part = '"%s"' % part.replace('"', '\\"') - cmd_parts.append(part) - command_desc = ' '.join(cmd_parts) - logger.debug("Running command %s", command_desc) - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - for name in unset_environ: - env.pop(name, None) - try: - proc = subprocess.Popen( - cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, - stdout=stdout, cwd=cwd, env=env, - ) - proc.stdin.close() - except Exception as exc: - logger.critical( - "Error %s while executing command %s", exc, command_desc, - ) - raise - all_output = [] - if stdout is not None: - while True: - line = console_to_str(proc.stdout.readline()) - if not line: - break - line = line.rstrip() - all_output.append(line + '\n') - if logger.getEffectiveLevel() <= std_logging.DEBUG: - # Show the line immediately - logger.debug(line) - else: - # Update the spinner - if spinner is not None: - spinner.spin() - try: - proc.wait() - finally: - if proc.stdout: - proc.stdout.close() - if spinner is not None: - if proc.returncode: - spinner.finish("error") - else: - spinner.finish("done") - if proc.returncode and proc.returncode not in extra_ok_returncodes: - if on_returncode == 'raise': - if (logger.getEffectiveLevel() > std_logging.DEBUG and - not show_stdout): - logger.info( - 'Complete output from command %s:', command_desc, - ) - logger.info( - ''.join(all_output) + - '\n----------------------------------------' - ) - raise InstallationError( - 'Command "%s" failed with error code %s in %s' - % (command_desc, proc.returncode, cwd)) - elif on_returncode == 'warn': - logger.warning( - 'Command "%s" had error code %s in %s', - command_desc, proc.returncode, cwd, - ) - elif on_returncode == 'ignore': - pass - else: - raise ValueError('Invalid value: on_returncode=%s' % - repr(on_returncode)) - if not show_stdout: - return ''.join(all_output) - return None - + return normalize_path(egg_link) + return normalize_path(dist.location) -def read_text_file(filename): - # type: (str) -> str - """Return the contents of *filename*. - - Try to decode the file contents with utf-8, the preferred system encoding - (e.g., cp1252 on some Windows machines), and latin1, in that order. - Decoding a byte string with latin1 will never raise an error. In the worst - case, the returned string will contain some garbage characters. - - """ - with open(filename, 'rb') as fp: - data = fp.read() - - encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] - for enc in encodings: - try: - # https://github.com/python/mypy/issues/1174 - data = data.decode(enc) # type: ignore - except UnicodeDecodeError: - continue - break - assert not isinstance(data, bytes) # Latin1 should have worked. - return data - - -def _make_build_dir(build_dir): - os.makedirs(build_dir) - write_delete_marker_file(build_dir) +def write_output(msg, *args): + # type: (Any, Any) -> None + logger.info(msg, *args) class FakeFile(object): """Wrap a list of lines in an object with readline() to make ConfigParser happy.""" def __init__(self, lines): - self._gen = (l for l in lines) + self._gen = iter(lines) def readline(self): try: - try: - return next(self._gen) - except NameError: - return self._gen.next() + return next(self._gen) except StopIteration: return '' @@ -871,26 +637,6 @@ def captured_stderr(): return captured_output('stderr') -class cached_property(object): - """A property that is only computed once per instance and then replaces - itself with an ordinary attribute. Deleting the attribute resets the - property. - - Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 - """ - - def __init__(self, func): - self.__doc__ = getattr(func, '__doc__') - self.func = func - - def __get__(self, obj, cls): - if obj is None: - # We're being accessed from the class itself, not from an object - return self - value = obj.__dict__[self.func.__name__] = self.func(obj) - return value - - def get_installed_version(dist_name, working_set=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. @@ -922,20 +668,38 @@ def enum(*sequential, **named): return type('Enum', (), enums) -def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): +def build_netloc(host, port): + # type: (str, Optional[int]) -> str """ - Return the URL for a VCS requirement. + Build a netloc from a host-port pair + """ + if port is None: + return host + if ':' in host: + # Only wrap host with square brackets when it is IPv6 + host = '[{}]'.format(host) + return '{}:{}'.format(host, port) + - Args: - repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). - project_name: the (unescaped) project name. +def build_url_from_netloc(netloc, scheme='https'): + # type: (str, str) -> str """ - egg_project_name = pkg_resources.to_filename(project_name) - req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) - if subdir: - req += '&subdirectory={}'.format(subdir) + Build a full URL from a netloc. + """ + if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = '[{}]'.format(netloc) + return '{}://{}'.format(scheme, netloc) - return req + +def parse_netloc(netloc): + # type: (str) -> Tuple[str, Optional[int]] + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib_parse.urlparse(url) + return parsed.hostname, parsed.port def split_auth_from_netloc(netloc): @@ -969,49 +733,127 @@ def split_auth_from_netloc(netloc): def redact_netloc(netloc): # type: (str) -> str """ - Replace the password in a netloc with "****", if it exists. + Replace the sensitive data in a netloc with "****", if it exists. - For example, "user:pass@example.com" returns "user:****@example.com". + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" """ netloc, (user, password) = split_auth_from_netloc(netloc) if user is None: return netloc - password = '' if password is None else ':****' - return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user), + if password is None: + user = '****' + password = '' + else: + user = urllib_parse.quote(user) + password = ':****' + return '{user}{password}@{netloc}'.format(user=user, password=password, netloc=netloc) def _transform_url(url, transform_netloc): + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ purl = urllib_parse.urlsplit(url) - netloc = transform_netloc(purl.netloc) + netloc_tuple = transform_netloc(purl.netloc) # stripped url url_pieces = ( - purl.scheme, netloc, purl.path, purl.query, purl.fragment + purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment ) surl = urllib_parse.urlunsplit(url_pieces) - return surl + return surl, netloc_tuple def _get_netloc(netloc): - return split_auth_from_netloc(netloc)[0] + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc): + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url): + # type: (str) -> Tuple[str, str, Tuple[str, str]] + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth def remove_auth_from_url(url): # type: (str) -> str - # Return a copy of url with 'username:password@' removed. + """Return a copy of url with 'username:password@' removed.""" # username/pass params are passed to subversion through flags # and are not recognized in the url. - return _transform_url(url, _get_netloc) + return _transform_url(url, _get_netloc)[0] -def redact_password_from_url(url): +def redact_auth_from_url(url): # type: (str) -> str """Replace the password in a given url with ****.""" - return _transform_url(url, redact_netloc) + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText(object): + def __init__( + self, + secret, # type: str + redacted, # type: str + ): + # type: (...) -> None + self.secret = secret + self.redacted = redacted + + def __repr__(self): + # type: (...) -> str + return '<HiddenText {!r}>'.format(str(self)) + + def __str__(self): + # type: (...) -> str + return self.redacted + + # This is useful for testing. + def __eq__(self, other): + # type: (Any) -> bool + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return (self.secret == other.secret) + + # We need to provide an explicit __ne__ implementation for Python 2. + # TODO: remove this when we drop PY2 support. + def __ne__(self, other): + # type: (Any) -> bool + return not self == other + + +def hide_value(value): + # type: (str) -> HiddenText + return HiddenText(value, redacted='****') + + +def hide_url(url): + # type: (str) -> HiddenText + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) def protect_pip_from_modification_on_windows(modifying_pip): + # type: (bool) -> None """Protection of pip.exe from modification on Windows On Windows, any operation modifying pip should be run as: @@ -1038,3 +880,63 @@ def protect_pip_from_modification_on_windows(modifying_pip): 'To modify pip, please run the following command:\n{}' .format(" ".join(new_command)) ) + + +def is_console_interactive(): + # type: () -> bool + """Is this console interactive? + """ + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path, blocksize=1 << 20): + # type: (Text, int) -> Tuple[Any, int] + """Return (hash, length) for path using hashlib.sha256() + """ + + h = hashlib.sha256() + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed(): + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True + + +def pairwise(iterable): + # type: (Iterable[Any]) -> Iterator[Tuple[Any, Any]] + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred, # type: Callable[[T], bool] + iterable, # type: Iterable[T] +): + # type: (...) -> Tuple[Iterable[T], Iterable[T]] + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/models.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/models.py index d5cb80a7..d1c2f226 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/models.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/models.py @@ -1,13 +1,17 @@ """Utilities for defining models """ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False import operator class KeyBasedCompareMixin(object): - """Provides comparision capabilities that is based on a key + """Provides comparison capabilities that is based on a key """ + __slots__ = ['_compare_key', '_defining_class'] + def __init__(self, key, defining_class): self._compare_key = key self._defining_class = defining_class diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/packaging.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/packaging.py index 7aaf7b5e..68aa86ed 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/packaging.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/packaging.py @@ -1,79 +1,88 @@ from __future__ import absolute_import import logging -import sys from email.parser import FeedParser from pip._vendor import pkg_resources from pip._vendor.packaging import specifiers, version -from pip._internal import exceptions +from pip._internal.exceptions import NoneMetadataError from pip._internal.utils.misc import display_path from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional # noqa: F401 - from email.message import Message # noqa: F401 - from pip._vendor.pkg_resources import Distribution # noqa: F401 + from typing import Optional, Tuple + from email.message import Message + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) -def check_requires_python(requires_python): - # type: (Optional[str]) -> bool +def check_requires_python(requires_python, version_info): + # type: (Optional[str], Tuple[int, ...]) -> bool """ - Check if the python version in use match the `requires_python` specifier. + Check if the given Python version matches a "Requires-Python" specifier. - Returns `True` if the version of python in use matches the requirement. - Returns `False` if the version of python in use does not matches the - requirement. + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). - Raises an InvalidSpecifier if `requires_python` have an invalid format. + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. """ if requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) - # We only use major.minor.micro - python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) + python_version = version.parse('.'.join(map(str, version_info))) return python_version in requires_python_specifier def get_metadata(dist): # type: (Distribution) -> Message + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + metadata_name = 'METADATA' if (isinstance(dist, pkg_resources.DistInfoDistribution) and - dist.has_metadata('METADATA')): - metadata = dist.get_metadata('METADATA') + dist.has_metadata(metadata_name)): + metadata = dist.get_metadata(metadata_name) elif dist.has_metadata('PKG-INFO'): - metadata = dist.get_metadata('PKG-INFO') + metadata_name = 'PKG-INFO' + metadata = dist.get_metadata(metadata_name) else: logger.warning("No metadata found in %s", display_path(dist.location)) metadata = '' + if metadata is None: + raise NoneMetadataError(dist, metadata_name) + feed_parser = FeedParser() + # The following line errors out if with a "NoneType" TypeError if + # passed metadata=None. feed_parser.feed(metadata) return feed_parser.close() -def check_dist_requires_python(dist): +def get_requires_python(dist): + # type: (pkg_resources.Distribution) -> Optional[str] + """ + Return the "Requires-Python" metadata for a distribution, or None + if not present. + """ pkg_info_dict = get_metadata(dist) requires_python = pkg_info_dict.get('Requires-Python') - try: - if not check_requires_python(requires_python): - raise exceptions.UnsupportedPythonVersion( - "%s requires Python '%s' but the running Python is %s" % ( - dist.project_name, - requires_python, - '.'.join(map(str, sys.version_info[:3])),) - ) - except specifiers.InvalidSpecifier as e: - logger.warning( - "Package %s has an invalid Requires-Python entry %s - %s", - dist.project_name, requires_python, e, - ) - return + + if requires_python is not None: + # Convert to a str to satisfy the type checker, since requires_python + # can be a Header object. + requires_python = str(requires_python) + + return requires_python def get_installer(dist): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/parallel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/parallel.py new file mode 100644 index 00000000..9fe1fe8b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/parallel.py @@ -0,0 +1,107 @@ +"""Convenient parallelization of higher order functions. + +This module provides two helper functions, with appropriate fallbacks on +Python 2 and on systems lacking support for synchronization mechanisms: + +- map_multiprocess +- map_multithread + +These helpers work like Python 3's map, with two differences: + +- They don't guarantee the order of processing of + the elements of the iterable. +- The underlying process/thread pools chop the iterable into + a number of chunks, so that for very long iterables using + a large value for chunksize can make the job complete much faster + than using the default value of 1. +""" + +__all__ = ['map_multiprocess', 'map_multithread'] + +from contextlib import contextmanager +from multiprocessing import Pool as ProcessPool +from multiprocessing.dummy import Pool as ThreadPool + +from pip._vendor.requests.adapters import DEFAULT_POOLSIZE +from pip._vendor.six import PY2 +from pip._vendor.six.moves import map + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Callable, Iterable, Iterator, Union, TypeVar + from multiprocessing import pool + + Pool = Union[pool.Pool, pool.ThreadPool] + S = TypeVar('S') + T = TypeVar('T') + +# On platforms without sem_open, multiprocessing[.dummy] Pool +# cannot be created. +try: + import multiprocessing.synchronize # noqa +except ImportError: + LACK_SEM_OPEN = True +else: + LACK_SEM_OPEN = False + +# Incredibly large timeout to work around bpo-8296 on Python 2. +TIMEOUT = 2000000 + + +@contextmanager +def closing(pool): + # type: (Pool) -> Iterator[Pool] + """Return a context manager making sure the pool closes properly.""" + try: + yield pool + finally: + # For Pool.imap*, close and join are needed + # for the returned iterator to begin yielding. + pool.close() + pool.join() + pool.terminate() + + +def _map_fallback(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Make an iterator applying func to each element in iterable. + + This function is the sequential fallback either on Python 2 + where Pool.imap* doesn't react to KeyboardInterrupt + or when sem_open is unavailable. + """ + return map(func, iterable) + + +def _map_multiprocess(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a process pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ProcessPool()) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +def _map_multithread(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a thread pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ThreadPool(DEFAULT_POOLSIZE)) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +if LACK_SEM_OPEN or PY2: + map_multiprocess = map_multithread = _map_fallback +else: + map_multiprocess = _map_multiprocess + map_multithread = _map_multithread diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/pkg_resources.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/pkg_resources.py new file mode 100644 index 00000000..0bc129ac --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/pkg_resources.py @@ -0,0 +1,44 @@ +from pip._vendor.pkg_resources import yield_lines +from pip._vendor.six import ensure_str + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterable, List + + +class DictMetadata(object): + """IMetadataProvider that reads metadata files from a dictionary. + """ + def __init__(self, metadata): + # type: (Dict[str, bytes]) -> None + self._metadata = metadata + + def has_metadata(self, name): + # type: (str) -> bool + return name in self._metadata + + def get_metadata(self, name): + # type: (str) -> str + try: + return ensure_str(self._metadata[name]) + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += " in {} file".format(name) + raise + + def get_metadata_lines(self, name): + # type: (str) -> Iterable[str] + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name): + # type: (str) -> bool + return False + + def metadata_listdir(self, name): + # type: (str) -> List[str] + return [] + + def run_script(self, script_name, namespace): + # type: (str, str) -> None + pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/setuptools_build.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/setuptools_build.py index 03973e97..2a664b00 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/setuptools_build.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/setuptools_build.py @@ -1,8 +1,181 @@ +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + # Shim to wrap setup.py invocation with setuptools -SETUPTOOLS_SHIM = ( - "import setuptools, tokenize;__file__=%r;" +# +# We set sys.argv[0] to the path to the underlying setup.py file so +# setuptools / distutils don't take the path to the setup.py to be "-c" when +# invoking via the shim. This avoids e.g. the following manifest_maker +# warning: "warning: manifest_maker: standard file '-c' not found". +_SETUPTOOLS_SHIM = ( + "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" "f=getattr(tokenize, 'open', open)(__file__);" "code=f.read().replace('\\r\\n', '\\n');" "f.close();" "exec(compile(code, __file__, 'exec'))" ) + + +def make_setuptools_shim_args( + setup_py_path, # type: str + global_options=None, # type: Sequence[str] + no_user_config=False, # type: bool + unbuffered_output=False # type: bool +): + # type: (...) -> List[str] + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + build_options, # type: Sequence[str] + destination_dir, # type: str +): + # type: (...) -> List[str] + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + no_user_config, # type: bool + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path, # type: str + egg_info_dir, # type: Optional[str] + no_user_config, # type: bool +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, no_user_config=no_user_config + ) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + header_dir, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + no_user_config, # type: bool + pycompile # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/subprocess.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 00000000..d398e68d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,280 @@ +from __future__ import absolute_import + +import logging +import os +import subprocess + +from pip._vendor.six.moves import shlex_quote + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationError +from pip._internal.utils.compat import console_to_str, str_to_display +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import HiddenText, path_to_display +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Mapping, Optional, Text, Union, + ) + + CommandArgs = List[Union[str, HiddenText]] + + +LOG_DIVIDER = '----------------------------------------' + + +def make_command(*args): + # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs + """ + Create a CommandArgs object. + """ + command_args = [] # type: CommandArgs + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args): + # type: (Union[List[str], CommandArgs]) -> str + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return ' '.join( + shlex_quote(str(arg)) if isinstance(arg, HiddenText) + else shlex_quote(arg) for arg in args + ) + + +def reveal_command_args(args): + # type: (Union[List[str], CommandArgs]) -> List[str] + """ + Return the arguments in their raw, unredacted form. + """ + return [ + arg.secret if isinstance(arg, HiddenText) else arg for arg in args + ] + + +def make_subprocess_output_error( + cmd_args, # type: Union[List[str], CommandArgs] + cwd, # type: Optional[str] + lines, # type: List[Text] + exit_status, # type: int +): + # type: (...) -> Text + """ + Create and return the error message to use to log a subprocess error + with command output. + + :param lines: A list of lines, each ending with a newline. + """ + command = format_command_args(cmd_args) + # Convert `command` and `cwd` to text (unicode in Python 2) so we can use + # them as arguments in the unicode format string below. This avoids + # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2 + # if either contains a non-ascii character. + command_display = str_to_display(command, desc='command bytes') + cwd_display = path_to_display(cwd) + + # We know the joined output value ends in a newline. + output = ''.join(lines) + msg = ( + # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' + # codec can't encode character ..." in Python 2 when a format + # argument (e.g. `output`) has a non-ascii character. + u'Command errored out with exit status {exit_status}:\n' + ' command: {command_display}\n' + ' cwd: {cwd_display}\n' + 'Complete output ({line_count} lines):\n{output}{divider}' + ).format( + exit_status=exit_status, + command_display=command_display, + cwd_display=cwd_display, + line_count=len(lines), + output=output, + divider=LOG_DIVIDER, + ) + return msg + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + show_stdout=False, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + unset_environ=None, # type: Optional[Iterable[str]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using DEBUG. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.debug + used_level = logging.DEBUG + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + if command_desc is None: + command_desc = format_command_args(cmd) + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stderr=subprocess.STDOUT, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, cwd=cwd, env=env, + ) + assert proc.stdin + assert proc.stdout + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == 'raise': + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise InstallationError(exc_msg) + elif on_returncode == 'warn': + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode={!r}'.format( + on_returncode)) + return ''.join(all_output) + + +def runner_with_spinner_message(message): + # type: (str) -> Callable[..., None] + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd, # type: List[str] + cwd=None, # type: Optional[str] + extra_environ=None # type: Optional[Mapping[str, Any]] + ): + # type: (...) -> None + with open_spinner(message) as spinner: + call_subprocess( + cmd, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/temp_dir.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/temp_dir.py index 2c81ad55..03aa8286 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/temp_dir.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/temp_dir.py @@ -5,12 +5,95 @@ import itertools import logging import os.path import tempfile +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack +from pip._vendor.six import ensure_text + +from pip._internal.utils.misc import enum, rmtree +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, Iterator, Optional, TypeVar, Union + + _T = TypeVar('_T', bound='TempDirectory') -from pip._internal.utils.misc import rmtree logger = logging.getLogger(__name__) +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager = None # type: Optional[ExitStack] + + +@contextmanager +def global_tempdir_manager(): + # type: () -> Iterator[None] + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry(object): + """Manages temp directory behavior + """ + + def __init__(self): + # type: () -> None + self._should_delete = {} # type: Dict[str, bool] + + def set_delete(self, kind, value): + # type: (str, bool) -> None + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind): + # type: (str) -> bool + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry] + + +@contextmanager +def tempdir_registry(): + # type: () -> Iterator[TempDirectoryTypeRegistry] + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default(object): + pass + + +_default = _Default() + + class TempDirectory(object): """Helper class that owns and cleans up a temporary directory. @@ -19,69 +102,101 @@ class TempDirectory(object): Attributes: path - Location to the created temporary directory or None + Location to the created temporary directory delete Whether the directory should be deleted when exiting (when used as a contextmanager) Methods: - create() - Creates a temporary directory and stores its path in the path - attribute. cleanup() - Deletes the temporary directory and sets path attribute to None + Deletes the temporary directory - When used as a context manager, a temporary directory is created on - entering the context and, if the delete attribute is True, on exiting the - context the created directory is deleted. + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. """ - def __init__(self, path=None, delete=None, kind="temp"): + def __init__( + self, + path=None, # type: Optional[str] + delete=_default, # type: Union[bool, None, _Default] + kind="temp", # type: str + globally_managed=False, # type: bool + ): super(TempDirectory, self).__init__() - if path is None and delete is None: - # If we were not given an explicit directory, and we were not given - # an explicit delete option, then we'll default to deleting. - delete = True + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + if path is None: + path = self._create(kind) - self.path = path + self._path = path + self._deleted = False self.delete = delete self.kind = kind + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self): + # type: () -> str + assert not self._deleted, ( + "Attempted to access deleted path: {}".format(self._path) + ) + return self._path + def __repr__(self): + # type: () -> str return "<{} {!r}>".format(self.__class__.__name__, self.path) def __enter__(self): - self.create() + # type: (_T) -> _T return self def __exit__(self, exc, value, tb): - if self.delete: + # type: (Any, Any, Any) -> None + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: self.cleanup() - def create(self): + def _create(self, kind): + # type: (str) -> str """Create a temporary directory and store its path in self.path """ - if self.path is not None: - logger.debug( - "Skipped creation of temporary directory: {}".format(self.path) - ) - return # We realpath here because some systems have their default tmpdir # symlinked to another directory. This tends to confuse build # scripts, so we canonicalize the path by traversing potential # symlinks here. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ) - logger.debug("Created temporary directory: {}".format(self.path)) + logger.debug("Created temporary directory: %s", path) + return path def cleanup(self): + # type: () -> None """Remove the temporary directory created and reset state """ - if self.path is not None and os.path.exists(self.path): - rmtree(self.path) - self.path = None + self._deleted = True + if os.path.exists(self._path): + # Make sure to pass unicode on Python 2 to make the contents also + # use unicode, ensuring non-ASCII names and can be represented. + rmtree(ensure_text(self._path)) class AdjacentTempDirectory(TempDirectory): @@ -106,11 +221,13 @@ class AdjacentTempDirectory(TempDirectory): LEADING_CHARS = "-~.=%0123456789" def __init__(self, original, delete=None): - super(AdjacentTempDirectory, self).__init__(delete=delete) + # type: (str, Optional[bool]) -> None self.original = original.rstrip('/\\') + super(AdjacentTempDirectory, self).__init__(delete=delete) @classmethod def _generate_names(cls, name): + # type: (str) -> Iterator[str] """Generates a series of temporary names. The algorithm replaces the leading characters in the name @@ -133,7 +250,8 @@ class AdjacentTempDirectory(TempDirectory): if new_name != name: yield new_name - def create(self): + def _create(self, kind): + # type: (str) -> str root, name = os.path.split(self.original) for candidate in self._generate_names(name): path = os.path.join(root, candidate) @@ -144,12 +262,13 @@ class AdjacentTempDirectory(TempDirectory): if ex.errno != errno.EEXIST: raise else: - self.path = os.path.realpath(path) + path = os.path.realpath(path) break - - if not self.path: + else: # Final fallback on the default behavior. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ) - logger.debug("Created temporary directory: {}".format(self.path)) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/typing.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/typing.py index e085cdfe..8505a29b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/typing.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/typing.py @@ -21,9 +21,18 @@ In pip, all static-typing related imports should be guarded as follows: from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ... # noqa: F401 + from typing import ... Ref: https://github.com/python/mypy/issues/3216 """ MYPY_CHECK_RUNNING = False + + +if MYPY_CHECK_RUNNING: + from typing import cast +else: + # typing's cast() is needed at runtime, but we don't want to import typing. + # Thus, we use a dummy no-op version, which we tell mypy to ignore. + def cast(type_, value): # type: ignore + return value diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/ui.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/ui.py deleted file mode 100644 index 433675d7..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/ui.py +++ /dev/null @@ -1,441 +0,0 @@ -from __future__ import absolute_import, division - -import contextlib -import itertools -import logging -import sys -import time -from signal import SIGINT, default_int_handler, signal - -from pip._vendor import six -from pip._vendor.progress.bar import ( - Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, - ShadyBar, -) -from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin -from pip._vendor.progress.spinner import Spinner - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import get_indentation -from pip._internal.utils.misc import format_size -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, Iterator, IO # noqa: F401 - -try: - from pip._vendor import colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None - -logger = logging.getLogger(__name__) - - -def _select_progress_class(preferred, fallback): - encoding = getattr(preferred.file, "encoding", None) - - # If we don't know what encoding this file is in, then we'll just assume - # that it doesn't support unicode and use the ASCII bar. - if not encoding: - return fallback - - # Collect all of the possible characters we want to use with the preferred - # bar. - characters = [ - getattr(preferred, "empty_fill", six.text_type()), - getattr(preferred, "fill", six.text_type()), - ] - characters += list(getattr(preferred, "phases", [])) - - # Try to decode the characters we're using for the bar using the encoding - # of the given file, if this works then we'll assume that we can use the - # fancier bar and if not we'll fall back to the plaintext bar. - try: - six.text_type().join(characters).encode(encoding) - except UnicodeEncodeError: - return fallback - else: - return preferred - - -_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any - - -class InterruptibleMixin(object): - """ - Helper to ensure that self.finish() gets called on keyboard interrupt. - - This allows downloads to be interrupted without leaving temporary state - (like hidden cursors) behind. - - This class is similar to the progress library's existing SigIntMixin - helper, but as of version 1.2, that helper has the following problems: - - 1. It calls sys.exit(). - 2. It discards the existing SIGINT handler completely. - 3. It leaves its own handler in place even after an uninterrupted finish, - which will have unexpected delayed effects if the user triggers an - unrelated keyboard interrupt some time after a progress-displaying - download has already completed, for example. - """ - - def __init__(self, *args, **kwargs): - """ - Save the original SIGINT handler for later. - """ - super(InterruptibleMixin, self).__init__(*args, **kwargs) - - self.original_handler = signal(SIGINT, self.handle_sigint) - - # If signal() returns None, the previous handler was not installed from - # Python, and we cannot restore it. This probably should not happen, - # but if it does, we must restore something sensible instead, at least. - # The least bad option should be Python's default SIGINT handler, which - # just raises KeyboardInterrupt. - if self.original_handler is None: - self.original_handler = default_int_handler - - def finish(self): - """ - Restore the original SIGINT handler after finishing. - - This should happen regardless of whether the progress display finishes - normally, or gets interrupted. - """ - super(InterruptibleMixin, self).finish() - signal(SIGINT, self.original_handler) - - def handle_sigint(self, signum, frame): - """ - Call self.finish() before delegating to the original SIGINT handler. - - This handler should only be in place while the progress display is - active. - """ - self.finish() - self.original_handler(signum, frame) - - -class SilentBar(Bar): - - def update(self): - pass - - -class BlueEmojiBar(IncrementalBar): - - suffix = "%(percent)d%%" - bar_prefix = " " - bar_suffix = " " - phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any - - -class DownloadProgressMixin(object): - - def __init__(self, *args, **kwargs): - super(DownloadProgressMixin, self).__init__(*args, **kwargs) - self.message = (" " * (get_indentation() + 2)) + self.message - - @property - def downloaded(self): - return format_size(self.index) - - @property - def download_speed(self): - # Avoid zero division errors... - if self.avg == 0.0: - return "..." - return format_size(1 / self.avg) + "/s" - - @property - def pretty_eta(self): - if self.eta: - return "eta %s" % self.eta_td - return "" - - def iter(self, it, n=1): - for x in it: - yield x - self.next(n) - self.finish() - - -class WindowsMixin(object): - - def __init__(self, *args, **kwargs): - # The Windows terminal does not support the hide/show cursor ANSI codes - # even with colorama. So we'll ensure that hide_cursor is False on - # Windows. - # This call neds to go before the super() call, so that hide_cursor - # is set in time. The base progress bar class writes the "hide cursor" - # code to the terminal in its init, so if we don't set this soon - # enough, we get a "hide" with no corresponding "show"... - if WINDOWS and self.hide_cursor: - self.hide_cursor = False - - super(WindowsMixin, self).__init__(*args, **kwargs) - - # Check if we are running on Windows and we have the colorama module, - # if we do then wrap our file with it. - if WINDOWS and colorama: - self.file = colorama.AnsiToWin32(self.file) - # The progress code expects to be able to call self.file.isatty() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.isatty = lambda: self.file.wrapped.isatty() - # The progress code expects to be able to call self.file.flush() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.flush = lambda: self.file.wrapped.flush() - - -class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin): - - file = sys.stdout - message = "%(percent)d%%" - suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" - -# NOTE: The "type: ignore" comments on the following classes are there to -# work around https://github.com/python/typing/issues/241 - - -class DefaultDownloadProgressBar(BaseDownloadProgressBar, - _BaseBar): - pass - - -class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore - pass - - -class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore - IncrementalBar): - pass - - -class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore - ChargingBar): - pass - - -class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore - pass - - -class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore - FillingSquaresBar): - pass - - -class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore - FillingCirclesBar): - pass - - -class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore - BlueEmojiBar): - pass - - -class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin, WritelnMixin, Spinner): - - file = sys.stdout - suffix = "%(downloaded)s %(download_speed)s" - - def next_phase(self): - if not hasattr(self, "_phaser"): - self._phaser = itertools.cycle(self.phases) - return next(self._phaser) - - def update(self): - message = self.message % self - phase = self.next_phase() - suffix = self.suffix % self - line = ''.join([ - message, - " " if message else "", - phase, - " " if suffix else "", - suffix, - ]) - - self.writeln(line) - - -BAR_TYPES = { - "off": (DownloadSilentBar, DownloadSilentBar), - "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), - "ascii": (DownloadIncrementalBar, DownloadProgressSpinner), - "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), - "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) -} - - -def DownloadProgressProvider(progress_bar, max=None): - if max is None or max == 0: - return BAR_TYPES[progress_bar][1]().iter - else: - return BAR_TYPES[progress_bar][0](max=max).iter - - -################################################################ -# Generic "something is happening" spinners -# -# We don't even try using progress.spinner.Spinner here because it's actually -# simpler to reimplement from scratch than to coerce their code into doing -# what we need. -################################################################ - -@contextlib.contextmanager -def hidden_cursor(file): - # type: (IO) -> Iterator[None] - # The Windows terminal does not support the hide/show cursor ANSI codes, - # even via colorama. So don't even try. - if WINDOWS: - yield - # We don't want to clutter the output with control characters if we're - # writing to a file, or if the user is running with --quiet. - # See https://github.com/pypa/pip/issues/3418 - elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: - yield - else: - file.write(HIDE_CURSOR) - try: - yield - finally: - file.write(SHOW_CURSOR) - - -class RateLimiter(object): - def __init__(self, min_update_interval_seconds): - # type: (float) -> None - self._min_update_interval_seconds = min_update_interval_seconds - self._last_update = 0 # type: float - - def ready(self): - # type: () -> bool - now = time.time() - delta = now - self._last_update - return delta >= self._min_update_interval_seconds - - def reset(self): - # type: () -> None - self._last_update = time.time() - - -class SpinnerInterface(object): - def spin(self): - # type: () -> None - raise NotImplementedError() - - def finish(self, final_status): - # type: (str) -> None - raise NotImplementedError() - - -class InteractiveSpinner(SpinnerInterface): - def __init__(self, message, file=None, spin_chars="-\\|/", - # Empirically, 8 updates/second looks nice - min_update_interval_seconds=0.125): - self._message = message - if file is None: - file = sys.stdout - self._file = file - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._finished = False - - self._spin_cycle = itertools.cycle(spin_chars) - - self._file.write(" " * get_indentation() + self._message + " ... ") - self._width = 0 - - def _write(self, status): - assert not self._finished - # Erase what we wrote before by backspacing to the beginning, writing - # spaces to overwrite the old text, and then backspacing again - backup = "\b" * self._width - self._file.write(backup + " " * self._width + backup) - # Now we have a blank slate to add our status - self._file.write(status) - self._width = len(status) - self._file.flush() - self._rate_limiter.reset() - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._write(next(self._spin_cycle)) - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._write(final_status) - self._file.write("\n") - self._file.flush() - self._finished = True - - -# Used for dumb terminals, non-interactive installs (no tty), etc. -# We still print updates occasionally (once every 60 seconds by default) to -# act as a keep-alive for systems like Travis-CI that take lack-of-output as -# an indication that a task has frozen. -class NonInteractiveSpinner(SpinnerInterface): - def __init__(self, message, min_update_interval_seconds=60): - # type: (str, float) -> None - self._message = message - self._finished = False - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._update("started") - - def _update(self, status): - assert not self._finished - self._rate_limiter.reset() - logger.info("%s: %s", self._message, status) - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._update("still running...") - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._update("finished with status '%s'" % (final_status,)) - self._finished = True - - -@contextlib.contextmanager -def open_spinner(message): - # type: (str) -> Iterator[SpinnerInterface] - # Interactive spinner goes directly to sys.stdout rather than being routed - # through the logging system, but it acts like it has level INFO, - # i.e. it's only displayed if we're at level INFO or better. - # Non-interactive spinner goes through the logging system, so it is always - # in sync with logging configuration. - if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: - spinner = InteractiveSpinner(message) # type: SpinnerInterface - else: - spinner = NonInteractiveSpinner(message) - try: - with hidden_cursor(sys.stdout): - yield spinner - except KeyboardInterrupt: - spinner.finish("canceled") - raise - except Exception: - spinner.finish("error") - raise - else: - spinner.finish("done") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/unpacking.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 00000000..620f31eb --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,281 @@ +"""Utilities related archives. +""" + +from __future__ import absolute_import + +import logging +import os +import shutil +import stat +import tarfile +import zipfile + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterable, List, Optional, Text, Union + from zipfile import ZipInfo + + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def current_umask(): + # type: () -> int + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path): + # type: (Union[str, Text]) -> List[Union[str, Text]] + path = path.lstrip('/').lstrip('\\') + if ( + '/' in path and ( + ('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path + ) + ): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return [path, ''] + + +def has_leading_dir(paths): + # type: (Iterable[Union[str, Text]]) -> bool + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory, target): + # type: ((Union[str, Text]), (Union[str, Text])) -> bool + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def set_extracted_file_to_default_mode_plus_executable(path): + # type: (Union[str, Text]) -> None + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, (0o777 & ~current_umask() | 0o111)) + + +def zip_item_is_executable(info): + # type: (ZipInfo) -> bool + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename, location, flatten=True): + # type: (str, str, bool) -> None + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + 'The zip file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename, location): + # type: (str, str) -> None + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + leading = has_leading_dir([ + member.name for member in tar.getmembers() + ]) + for member in tar.getmembers(): + fn = member.name + if leading: + # https://github.com/python/mypy/issues/1174 + fn = split_leading_dir(fn)[1] # type: ignore + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + 'The tar file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError( + message.format(filename, path, location) + ) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + # https://github.com/python/typeshed/issues/2673 + tar._extract_member(member, path) # type: ignore + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + # https://github.com/python/typeshed/issues/2673 + tar.utime(member, path) # type: ignore + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + finally: + tar.close() + + +def unpack_file( + filename, # type: str + location, # type: str + content_type=None, # type: Optional[str] +): + # type: (...) -> None + filename = os.path.realpath(filename) + if ( + content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename) + ): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif ( + content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS + ) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of {}'.format(location) + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/urls.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 00000000..f37bc8f9 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,55 @@ +import os +import sys + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Text, Union + + +def get_url_scheme(url): + # type: (Union[str, Text]) -> Optional[Text] + if ':' not in url: + return None + return url.split(':', 1)[0].lower() + + +def path_to_url(path): + # type: (Union[str, Text]) -> str + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def url_to_path(url): + # type: (str) -> str + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not {url!r})" + .format(**locals())) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + if not netloc or netloc == 'localhost': + # According to RFC 8089, same as empty authority. + netloc = '' + elif sys.platform == 'win32': + # If we have a UNC path, prepend UNC share notation. + netloc = '\\\\' + netloc + else: + raise ValueError( + 'non-local file URIs are not supported on this platform: {url!r}' + .format(**locals()) + ) + + path = urllib_request.url2pathname(netloc + path) + return path diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/virtualenv.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 00000000..596a69a7 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,116 @@ +from __future__ import absolute_import + +import logging +import os +import re +import site +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?P<value>true|false)" +) + + +def _running_under_venv(): + # type: () -> bool + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv(): + # type: () -> bool + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, 'real_prefix') + + +def running_under_virtualenv(): + # type: () -> bool + """Return True if we're running inside a virtualenv, False otherwise. + """ + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines(): + # type: () -> Optional[List[str]] + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg') + try: + with open(pyvenv_cfg_file) as f: + return f.read().splitlines() # avoids trailing newlines + except IOError: + return None + + +def _no_global_under_venv(): + # type: () -> bool + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group('value') == 'false': + return True + return False + + +def _no_global_under_regular_virtualenv(): + # type: () -> bool + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, 'no-global-site-packages.txt', + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global(): + # type: () -> bool + """Returns a boolean, whether running in venv with no system site-packages. + """ + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + return False diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 00000000..9ce371c7 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,225 @@ +"""Support functions for working with wheel files. +""" + +from __future__ import absolute_import + +import logging +from email.parser import Parser +from zipfile import ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import DistInfoDistribution +from pip._vendor.six import PY2, ensure_str + +from pip._internal.exceptions import UnsupportedWheel +from pip._internal.utils.pkg_resources import DictMetadata +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import Dict, Tuple + + from pip._vendor.pkg_resources import Distribution + +if PY2: + from zipfile import BadZipfile as BadZipFile +else: + from zipfile import BadZipFile + + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + def __init__(self, metadata, wheel_name): + # type: (Dict[str, bytes], str) -> None + super(WheelMetadata, self).__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name): + # type: (str) -> str + try: + return super(WheelMetadata, self).get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + "Error decoding metadata for {}: {}".format( + self._wheel_name, e + ) + ) + + +def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + # type: (ZipFile, str, str) -> Distribution + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [ + p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) + ] + + metadata_text = {} # type: Dict[str, bytes] + for path in metadata_files: + # If a flag is set, namelist entries may be unicode in Python 2. + # We coerce them to native str type to match the types used in the rest + # of the code. This cannot fail because unicode can always be encoded + # with UTF-8. + full_path = ensure_str(path) + _, metadata_name = full_path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file( + wheel_zip, full_path + ) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution( + location=location, metadata=metadata, project_name=name + ) + + +def parse_wheel(wheel_zip, name): + # type: (ZipFile, str) -> Tuple[str, Message] + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source, name): + # type: (ZipFile, str) -> str + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = set(p.split("/", 1)[0] for p in source.namelist()) + + info_dirs = [s for s in subdirs if s.endswith('.dist-info')] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format( + ", ".join(info_dirs) + ) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + # Zip file paths can be unicode or str depending on the zip entry flags, + # so normalize it. + return ensure_str(info_dir) + + +def read_wheel_metadata_file(source, path): + # type: (ZipFile, str) -> bytes + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel( + "could not read {!r} file: {!r}".format(path, e) + ) + + +def wheel_metadata(source, dist_info_dir): + # type: (ZipFile, str) -> Message + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = "{}/WHEEL".format(dist_info_dir) + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = ensure_str(wheel_contents) + except UnicodeDecodeError as e: + raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data): + # type: (Message) -> Tuple[int, ...] + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split('.'))) + except ValueError: + raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version)) + + +def check_compatibility(version, name): + # type: (Tuple[int, ...], str) -> None + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/__init__.py index 9cba7646..2a4eb137 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/__init__.py @@ -1,534 +1,15 @@ -"""Handles all VCS (version control) support""" -from __future__ import absolute_import - -import errno -import logging -import os -import shutil -import sys - -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.exceptions import BadCommand -from pip._internal.utils.misc import ( - display_path, backup_dir, call_subprocess, rmtree, ask_path_exists, +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory and imports protected by MYPY_CHECK_RUNNING may +# still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + is_url, + make_vcs_requirement_url, + vcs, ) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type - ) - from pip._internal.utils.ui import SpinnerInterface # noqa: F401 - - AuthInfo = Tuple[Optional[str], Optional[str]] - -__all__ = ['vcs'] - - -logger = logging.getLogger(__name__) - - -class RemoteNotFoundError(Exception): - pass - - -class RevOptions(object): - - """ - Encapsulates a VCS-specific revision to install, along with any VCS - install options. - - Instances of this class should be treated as if immutable. - """ - - def __init__(self, vcs, rev=None, extra_args=None): - # type: (VersionControl, Optional[str], Optional[List[str]]) -> None - """ - Args: - vcs: a VersionControl object. - rev: the name of the revision to install. - extra_args: a list of extra options. - """ - if extra_args is None: - extra_args = [] - - self.extra_args = extra_args - self.rev = rev - self.vcs = vcs - - def __repr__(self): - return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev) - - @property - def arg_rev(self): - # type: () -> Optional[str] - if self.rev is None: - return self.vcs.default_arg_rev - - return self.rev - - def to_args(self): - # type: () -> List[str] - """ - Return the VCS-specific command arguments. - """ - args = [] # type: List[str] - rev = self.arg_rev - if rev is not None: - args += self.vcs.get_base_rev_args(rev) - args += self.extra_args - - return args - - def to_display(self): - # type: () -> str - if not self.rev: - return '' - - return ' (to revision {})'.format(self.rev) - - def make_new(self, rev): - # type: (str) -> RevOptions - """ - Make a copy of the current instance, but with a new rev. - - Args: - rev: the name of the revision for the new object. - """ - return self.vcs.make_rev_options(rev, extra_args=self.extra_args) - - -class VcsSupport(object): - _registry = {} # type: Dict[str, Type[VersionControl]] - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] - - def __init__(self): - # type: () -> None - # Register more schemes with urlparse for various version control - # systems - urllib_parse.uses_netloc.extend(self.schemes) - # Python >= 2.7.4, 3.3 doesn't have uses_fragment - if getattr(urllib_parse, 'uses_fragment', None): - urllib_parse.uses_fragment.extend(self.schemes) - super(VcsSupport, self).__init__() - - def __iter__(self): - return self._registry.__iter__() - - @property - def backends(self): - # type: () -> List[Type[VersionControl]] - return list(self._registry.values()) - - @property - def dirnames(self): - # type: () -> List[str] - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self): - # type: () -> List[str] - schemes = [] # type: List[str] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls): - # type: (Type[VersionControl]) -> None - if not hasattr(cls, 'name'): - logger.warning('Cannot register VCS %s', cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls - logger.debug('Registered VCS backend: %s', cls.name) - - def unregister(self, cls=None, name=None): - # type: (Optional[Type[VersionControl]], Optional[str]) -> None - if name in self._registry: - del self._registry[name] - elif cls in self._registry.values(): - del self._registry[cls.name] - else: - logger.warning('Cannot unregister because no class or name given') - - def get_backend_type(self, location): - # type: (str) -> Optional[Type[VersionControl]] - """ - Return the type of the version control backend if found at given - location, e.g. vcs.get_backend_type('/path/to/vcs/checkout') - """ - for vc_type in self._registry.values(): - if vc_type.controls_location(location): - logger.debug('Determine that %s uses VCS: %s', - location, vc_type.name) - return vc_type - return None - - def get_backend(self, name): - # type: (str) -> Optional[Type[VersionControl]] - name = name.lower() - if name in self._registry: - return self._registry[name] - return None - - -vcs = VcsSupport() - - -class VersionControl(object): - name = '' - dirname = '' - repo_name = '' - # List of supported schemes for this Version Control - schemes = () # type: Tuple[str, ...] - # Iterable of environment variable names to pass to call_subprocess(). - unset_environ = () # type: Tuple[str, ...] - default_arg_rev = None # type: Optional[str] - - def __init__(self, url=None, *args, **kwargs): - self.url = url - super(VersionControl, self).__init__(*args, **kwargs) - - def get_base_rev_args(self, rev): - """ - Return the base revision arguments for a vcs command. - - Args: - rev: the name of a revision to install. Cannot be None. - """ - raise NotImplementedError - - def make_rev_options(self, rev=None, extra_args=None): - # type: (Optional[str], Optional[List[str]]) -> RevOptions - """ - Return a RevOptions object. - - Args: - rev: the name of a revision to install. - extra_args: a list of extra options. - """ - return RevOptions(self, rev, extra_args=extra_args) - - @classmethod - def _is_local_repository(cls, repo): - # type: (str) -> bool - """ - posix absolute paths start with os.path.sep, - win32 ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or bool(drive) - - def export(self, location): - """ - Export the repository at the url to the destination location - i.e. only download the files, without vcs informations - """ - raise NotImplementedError - - def get_netloc_and_auth(self, netloc, scheme): - """ - Parse the repository URL's netloc, and return the new netloc to use - along with auth information. - - Args: - netloc: the original repository URL netloc. - scheme: the repository URL's scheme without the vcs prefix. - - This is mainly for the Subversion class to override, so that auth - information can be provided via the --username and --password options - instead of through the URL. For other subclasses like Git without - such an option, auth information must stay in the URL. - - Returns: (netloc, (username, password)). - """ - return netloc, (None, None) - - def get_url_rev_and_auth(self, url): - # type: (str) -> Tuple[str, Optional[str], AuthInfo] - """ - Parse the repository URL to use, and return the URL, revision, - and auth info to use. - - Returns: (url, rev, (username, password)). - """ - scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) - if '+' not in scheme: - raise ValueError( - "Sorry, {!r} is a malformed VCS url. " - "The format is <vcs>+<protocol>://<url>, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) - ) - # Remove the vcs prefix. - scheme = scheme.split('+', 1)[1] - netloc, user_pass = self.get_netloc_and_auth(netloc, scheme) - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) - return url, rev, user_pass - - def make_rev_args(self, username, password): - """ - Return the RevOptions "extra arguments" to use in obtain(). - """ - return [] - - def get_url_rev_options(self, url): - # type: (str) -> Tuple[str, RevOptions] - """ - Return the URL and RevOptions object to use in obtain() and in - some cases export(), as a tuple (url, rev_options). - """ - url, rev, user_pass = self.get_url_rev_and_auth(url) - username, password = user_pass - extra_args = self.make_rev_args(username, password) - rev_options = self.make_rev_options(rev, extra_args=extra_args) - - return url, rev_options - - def normalize_url(self, url): - # type: (str) -> str - """ - Normalize a URL for comparison by unquoting it and removing any - trailing slash. - """ - return urllib_parse.unquote(url).rstrip('/') - - def compare_urls(self, url1, url2): - # type: (str, str) -> bool - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return (self.normalize_url(url1) == self.normalize_url(url2)) - - def fetch_new(self, dest, url, rev_options): - """ - Fetch a revision from a repository, in the case that this is the - first fetch from the repository. - - Args: - dest: the directory to fetch the repository to. - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def switch(self, dest, url, rev_options): - """ - Switch the repo at ``dest`` to point to ``URL``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def update(self, dest, url, rev_options): - """ - Update an already-existing repo to the given ``rev_options``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def is_commit_id_equal(self, dest, name): - """ - Return whether the id of the current commit equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - raise NotImplementedError - - def obtain(self, dest): - # type: (str) -> None - """ - Install or update in editable mode the package represented by this - VersionControl object. - - Args: - dest: the repository directory in which to install or update. - """ - url, rev_options = self.get_url_rev_options(self.url) - - if not os.path.exists(dest): - self.fetch_new(dest, url, rev_options) - return - - rev_display = rev_options.to_display() - if self.is_repository_directory(dest): - existing_url = self.get_remote_url(dest) - if self.compare_urls(existing_url, url): - logger.debug( - '%s in %s exists, and has correct URL (%s)', - self.repo_name.title(), - display_path(dest), - url, - ) - if not self.is_commit_id_equal(dest, rev_options.rev): - logger.info( - 'Updating %s %s%s', - display_path(dest), - self.repo_name, - rev_display, - ) - self.update(dest, url, rev_options) - else: - logger.info('Skipping because already up-to-date.') - return - - logger.warning( - '%s %s in %s exists with URL %s', - self.name, - self.repo_name, - display_path(dest), - existing_url, - ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) - else: - logger.warning( - 'Directory %s already exists, and is not a %s %s.', - dest, - self.name, - self.repo_name, - ) - # https://github.com/python/mypy/issues/1174 - prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore - ('i', 'w', 'b')) - - logger.warning( - 'The plan is to install the %s repository %s', - self.name, - url, - ) - response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) - - if response == 'a': - sys.exit(-1) - - if response == 'w': - logger.warning('Deleting %s', display_path(dest)) - rmtree(dest) - self.fetch_new(dest, url, rev_options) - return - - if response == 'b': - dest_dir = backup_dir(dest) - logger.warning( - 'Backing up %s to %s', display_path(dest), dest_dir, - ) - shutil.move(dest, dest_dir) - self.fetch_new(dest, url, rev_options) - return - - # Do nothing if the response is "i". - if response == 's': - logger.info( - 'Switching %s %s to %s%s', - self.repo_name, - display_path(dest), - url, - rev_display, - ) - self.switch(dest, url, rev_options) - - def unpack(self, location): - # type: (str) -> None - """ - Clean up current location and download the url repository - (and vcs infos) into location - """ - if os.path.exists(location): - rmtree(location) - self.obtain(location) - - @classmethod - def get_src_requirement(cls, location, project_name): - """ - Return a string representing the requirement needed to - redownload the files currently present in location, something - like: - {repository_url}@{revision}#egg={project_name}-{version_identifier} - """ - raise NotImplementedError - - @classmethod - def get_remote_url(cls, location): - """ - Return the url used at location - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - raise NotImplementedError - - @classmethod - def get_revision(cls, location): - """ - Return the current commit id of the files at the given location. - """ - raise NotImplementedError - - @classmethod - def run_command( - cls, - cmd, # type: List[str] - show_stdout=True, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - spinner=None # type: Optional[SpinnerInterface] - ): - # type: (...) -> Optional[Text] - """ - Run a VCS subcommand - This is simply a wrapper around call_subprocess that adds the VCS - command name, and checks that the VCS is available - """ - cmd = [cls.name] + cmd - try: - return call_subprocess(cmd, show_stdout, cwd, - on_returncode=on_returncode, - extra_ok_returncodes=extra_ok_returncodes, - command_desc=command_desc, - extra_environ=extra_environ, - unset_environ=cls.unset_environ, - spinner=spinner) - except OSError as e: - # errno.ENOENT = no such file or directory - # In other words, the VCS executable isn't available - if e.errno == errno.ENOENT: - raise BadCommand( - 'Cannot find command %r - do you have ' - '%r installed and in your ' - 'PATH?' % (cls.name, cls.name)) - else: - raise # re-raise exception if a different error occurred - - @classmethod - def is_repository_directory(cls, path): - # type: (str) -> bool - """ - Return whether a directory path is a repository directory. - """ - logger.debug('Checking in %s for %s (%s)...', - path, cls.dirname, cls.name) - return os.path.exists(os.path.join(path, cls.dirname)) - - @classmethod - def controls_location(cls, location): - # type: (str) -> bool - """ - Check if a location is controlled by the vcs. - It is meant to be overridden to implement smarter detection - mechanisms for specific vcs. - - This can do more than is_repository_directory() alone. For example, - the Git override checks that Git is actually available. - """ - return cls.is_repository_directory(location) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/bazaar.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/bazaar.py index 4c6ac79d..94408c52 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/bazaar.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/bazaar.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -5,12 +8,17 @@ import os from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._internal.download import path_to_url -from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, rmtree, -) -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.misc import display_path, rmtree +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import VersionControl, vcs + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + logger = logging.getLogger(__name__) @@ -24,17 +32,19 @@ class Bazaar(VersionControl): 'bzr+lp', ) - def __init__(self, url=None, *args, **kwargs): - super(Bazaar, self).__init__(url, *args, **kwargs) + def __init__(self, *args, **kwargs): + super(Bazaar, self).__init__(*args, **kwargs) # This is only needed for python <2.7.5 # Register lp but do not expose as a scheme to support bzr+lp. if getattr(urllib_parse, 'uses_fragment', None): urllib_parse.uses_fragment.extend(['lp']) - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return ['-r', rev] - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """ Export the Bazaar repository at the url to the destination location """ @@ -42,15 +52,13 @@ class Bazaar(VersionControl): if os.path.exists(location): rmtree(location) - with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) - - self.run_command( - ['export', location], - cwd=temp_dir.path, show_stdout=False, - ) + url, rev_options = self.get_url_rev_options(url) + self.run_command( + make_command('export', location, url, rev_options.to_args()) + ) def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info( 'Checking out %s%s to %s', @@ -58,26 +66,32 @@ class Bazaar(VersionControl): rev_display, display_path(dest), ) - cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] + cmd_args = ( + make_command('branch', '-q', rev_options.to_args(), url, dest) + ) self.run_command(cmd_args) def switch(self, dest, url, rev_options): - self.run_command(['switch', url], cwd=dest) + # type: (str, HiddenText, RevOptions) -> None + self.run_command(make_command('switch', url), cwd=dest) def update(self, dest, url, rev_options): - cmd_args = ['pull', '-q'] + rev_options.to_args() + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command('pull', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) - def get_url_rev_and_auth(self, url): + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url) if url.startswith('ssh://'): url = 'bzr+' + url return url, rev, user_pass @classmethod def get_remote_url(cls, location): - urls = cls.run_command(['info'], show_stdout=False, cwd=location) + urls = cls.run_command(['info'], cwd=location) for line in urls.splitlines(): line = line.strip() for x in ('checkout of branch: ', @@ -92,21 +106,12 @@ class Bazaar(VersionControl): @classmethod def get_revision(cls, location): revision = cls.run_command( - ['revno'], show_stdout=False, cwd=location, + ['revno'], cwd=location, ) return revision.splitlines()[-1] @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo: - return None - if not repo.lower().startswith('bzr:'): - repo = 'bzr+' + repo - current_rev = cls.get_revision(location) - return make_vcs_requirement_url(repo, current_rev, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/git.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/git.py index dd2bd61e..a9c7fb66 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/git.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/git.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -8,13 +11,23 @@ from pip._vendor.packaging.version import parse as parse_version from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request -from pip._internal.exceptions import BadCommand -from pip._internal.utils.compat import samefile -from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, redact_password_from_url, -) +from pip._internal.exceptions import BadCommand, SubProcessError +from pip._internal.utils.misc import display_path, hide_url +from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import ( + RemoteNotFoundError, + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + urlsplit = urllib_parse.urlsplit urlunsplit = urllib_parse.urlunsplit @@ -23,7 +36,7 @@ urlunsplit = urllib_parse.urlunsplit logger = logging.getLogger(__name__) -HASH_REGEX = re.compile('[a-fA-F0-9]{40}') +HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') def looks_like_hash(sha): @@ -42,44 +55,42 @@ class Git(VersionControl): unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') default_arg_rev = 'HEAD' - def __init__(self, url=None, *args, **kwargs): - - # Works around an apparent Git bug - # (see https://article.gmane.org/gmane.comp.version-control.git/146500) - if url: - scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith('file'): - initial_slashes = path[:-len(path.lstrip('/'))] - newpath = ( - initial_slashes + - urllib_request.url2pathname(path) - .replace('\\', '/').lstrip('/') - ) - url = urlunsplit((scheme, netloc, newpath, query, fragment)) - after_plus = scheme.find('+') + 1 - url = scheme[:after_plus] + urlunsplit( - (scheme[after_plus:], netloc, newpath, query, fragment), - ) - - super(Git, self).__init__(url, *args, **kwargs) - - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return [rev] + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool( + self.get_revision_sha(dest, rev_options.rev)[0] + ) + return not is_tag_or_branch + def get_git_version(self): VERSION_PFX = 'git version ' - version = self.run_command(['version'], show_stdout=False) + version = self.run_command(['version']) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX):].split()[0] else: version = '' - # get first 3 positions of the git version becasue + # get first 3 positions of the git version because # on windows it is x.y.z.windows.t, and this parses as # LegacyVersion which always smaller than a Version. version = '.'.join(version.split('.')[:3]) return parse_version(version) - def get_current_branch(self, location): + @classmethod + def get_current_branch(cls, location): """ Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). @@ -89,8 +100,8 @@ class Git(VersionControl): # command to exit with status code 1 instead of 128 in this case # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] - output = self.run_command( - args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + output = cls.run_command( + args, extra_ok_returncodes=(1, ), cwd=location, ) ref = output.strip() @@ -99,19 +110,21 @@ class Git(VersionControl): return None - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" if not location.endswith('/'): location = location + '/' with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) + self.unpack(temp_dir.path, url=url) self.run_command( ['checkout-index', '-a', '-f', '--prefix', location], - show_stdout=False, cwd=temp_dir.path + cwd=temp_dir.path ) - def get_revision_sha(self, dest, rev): + @classmethod + def get_revision_sha(cls, dest, rev): """ Return (sha_or_none, is_branch), where sha_or_none is a commit hash if the revision names a remote branch or tag, otherwise None. @@ -121,8 +134,13 @@ class Git(VersionControl): rev: the revision name. """ # Pass rev to pre-filter the list. - output = self.run_command(['show-ref', rev], cwd=dest, - show_stdout=False, on_returncode='ignore') + + output = '' + try: + output = cls.run_command(['show-ref', rev], cwd=dest) + except SubProcessError: + pass + refs = {} for line in output.strip().splitlines(): try: @@ -145,7 +163,9 @@ class Git(VersionControl): return (sha, False) - def resolve_revision(self, dest, url, rev_options): + @classmethod + def resolve_revision(cls, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> RevOptions """ Resolve a revision to a new RevOptions object with the SHA1 of the branch, tag, or ref if found. @@ -154,7 +174,11 @@ class Git(VersionControl): rev_options: a RevOptions object. """ rev = rev_options.arg_rev - sha, is_branch = self.get_revision_sha(dest, rev) + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) if sha is not None: rev_options = rev_options.make_new(sha) @@ -174,17 +198,18 @@ class Git(VersionControl): return rev_options # If it looks like a ref, we have to fetch it explicitly. - self.run_command( - ['fetch', '-q', url] + rev_options.to_args(), + cls.run_command( + make_command('fetch', '-q', url, rev_options.to_args()), cwd=dest, ) # Change the revision to the SHA of the ref we fetched - sha = self.get_revision(dest, rev='FETCH_HEAD') + sha = cls.get_revision(dest, rev='FETCH_HEAD') rev_options = rev_options.make_new(sha) return rev_options - def is_commit_id_equal(self, dest, name): + @classmethod + def is_commit_id_equal(cls, dest, name): """ Return whether the current commit hash equals the given name. @@ -196,15 +221,13 @@ class Git(VersionControl): # Then avoid an unnecessary subprocess call. return False - return self.get_revision(dest) == name + return cls.get_revision(dest) == name def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info( - 'Cloning %s%s to %s', redact_password_from_url(url), - rev_display, display_path(dest), - ) - self.run_command(['clone', '-q', url, dest]) + logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) + self.run_command(make_command('clone', '-q', url, dest)) if rev_options.rev: # Then a specific revision was requested. @@ -214,7 +237,9 @@ class Git(VersionControl): # Only do a checkout if the current commit id doesn't match # the requested revision. if not self.is_commit_id_equal(dest, rev_options.rev): - cmd_args = ['checkout', '-q'] + rev_options.to_args() + cmd_args = make_command( + 'checkout', '-q', rev_options.to_args(), + ) self.run_command(cmd_args, cwd=dest) elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch @@ -229,13 +254,18 @@ class Git(VersionControl): self.update_submodules(dest) def switch(self, dest, url, rev_options): - self.run_command(['config', 'remote.origin.url', url], cwd=dest) - cmd_args = ['checkout', '-q'] + rev_options.to_args() + # type: (str, HiddenText, RevOptions) -> None + self.run_command( + make_command('config', 'remote.origin.url', url), + cwd=dest, + ) + cmd_args = make_command('checkout', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) self.update_submodules(dest) def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote if self.get_git_version() >= parse_version('1.9.0'): # fetch tags in addition to everything else @@ -244,7 +274,7 @@ class Git(VersionControl): self.run_command(['fetch', '-q'], cwd=dest) # Then reset to wanted revision (maybe even origin/master) rev_options = self.resolve_revision(dest, url, rev_options) - cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() + cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) #: update submodules self.update_submodules(dest) @@ -261,7 +291,7 @@ class Git(VersionControl): # exits with return code 1 if there are no matching lines. stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], - extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + extra_ok_returncodes=(1, ), cwd=location, ) remotes = stdout.splitlines() try: @@ -281,89 +311,87 @@ class Git(VersionControl): if rev is None: rev = 'HEAD' current_rev = cls.run_command( - ['rev-parse', rev], show_stdout=False, cwd=location, + ['rev-parse', rev], cwd=location, ) return current_rev.strip() @classmethod - def _get_subdirectory(cls, location): - """Return the relative path of setup.py to the git repo root.""" + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ # find the repo root - git_dir = cls.run_command(['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() + git_dir = cls.run_command( + ['rev-parse', '--git-dir'], + cwd=location).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) - root_dir = os.path.join(git_dir, '..') - # find setup.py - orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without - # finding setup.py - logger.warning( - "Could not find setup.py for directory %s (tried all " - "parent directories)", - orig_location, - ) - return None - # relative path of setup.py to repo root - if samefile(root_dir, location): - return None - return os.path.relpath(location, root_dir) + repo_root = os.path.abspath(os.path.join(git_dir, '..')) + return find_path_to_setup_from_repo_root(location, repo_root) @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo.lower().startswith('git:'): - repo = 'git+' + repo - current_rev = cls.get_revision(location) - subdir = cls._get_subdirectory(location) - req = make_vcs_requirement_url(repo, current_rev, project_name, - subdir=subdir) - - return req - - def get_url_rev_and_auth(self, url): + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + if '://' not in url: assert 'file:' not in url url = url.replace('git+', 'git+ssh://') - url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) url = url.replace('ssh://', '') else: - url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) return url, rev, user_pass - def update_submodules(self, location): + @classmethod + def update_submodules(cls, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return - self.run_command( + cls.run_command( ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location, ) @classmethod - def controls_location(cls, location): - if super(Git, cls).controls_location(location): - return True + def get_repository_root(cls, location): + loc = super(Git, cls).get_repository_root(location) + if loc: + return loc try: - r = cls.run_command(['rev-parse'], - cwd=location, - show_stdout=False, - on_returncode='ignore') - return not r + r = cls.run_command( + ['rev-parse', '--show-toplevel'], + cwd=location, + log_failed_cmd=False, + ) except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) - return False + return None + except SubProcessError: + return None + return os.path.normpath(r.rstrip('\r\n')) vcs.register(Git) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/mercurial.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/mercurial.py index 26e75dee..69763fea 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/mercurial.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/mercurial.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -5,10 +8,22 @@ import os from pip._vendor.six.moves import configparser -from pip._internal.download import path_to_url -from pip._internal.utils.misc import display_path, make_vcs_requirement_url +from pip._internal.exceptions import BadCommand, SubProcessError +from pip._internal.utils.misc import display_path +from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import RevOptions + logger = logging.getLogger(__name__) @@ -17,21 +32,26 @@ class Mercurial(VersionControl): name = 'hg' dirname = '.hg' repo_name = 'clone' - schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + schemes = ( + 'hg', 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http', + ) - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return [rev] - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """Export the Hg repository at the url to the destination location""" with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) + self.unpack(temp_dir.path, url=url) self.run_command( - ['archive', location], show_stdout=False, cwd=temp_dir.path + ['archive', location], cwd=temp_dir.path ) def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info( 'Cloning hg %s%s to %s', @@ -39,16 +59,19 @@ class Mercurial(VersionControl): rev_display, display_path(dest), ) - self.run_command(['clone', '--noupdate', '-q', url, dest]) - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) + self.run_command(make_command('clone', '--noupdate', '-q', url, dest)) + self.run_command( + make_command('update', '-q', rev_options.to_args()), + cwd=dest, + ) def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None repo_config = os.path.join(dest, self.dirname, 'hgrc') - config = configparser.SafeConfigParser() + config = configparser.RawConfigParser() try: config.read(repo_config) - config.set('paths', 'default', url) + config.set('paths', 'default', url.secret) with open(repo_config, 'w') as config_file: config.write(config_file) except (OSError, configparser.NoSectionError) as exc: @@ -56,48 +79,80 @@ class Mercurial(VersionControl): 'Could not switch Mercurial repository to %s: %s', url, exc, ) else: - cmd_args = ['update', '-q'] + rev_options.to_args() + cmd_args = make_command('update', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None self.run_command(['pull', '-q'], cwd=dest) - cmd_args = ['update', '-q'] + rev_options.to_args() + cmd_args = make_command('update', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) @classmethod def get_remote_url(cls, location): url = cls.run_command( ['showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() if cls._is_local_repository(url): url = path_to_url(url) return url.strip() @classmethod def get_revision(cls, location): + """ + Return the repository-local changeset revision number, as an integer. + """ current_revision = cls.run_command( - ['parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() + ['parents', '--template={rev}'], cwd=location).strip() return current_revision @classmethod - def get_revision_hash(cls, location): + def get_requirement_revision(cls, location): + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ current_rev_hash = cls.run_command( ['parents', '--template={node}'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() return current_rev_hash @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo.lower().startswith('hg:'): - repo = 'hg+' + repo - current_rev_hash = cls.get_revision_hash(location) - return make_vcs_requirement_url(repo, current_rev_hash, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False + @classmethod + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ['root'], cwd=location).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location): + loc = super(Mercurial, cls).get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ['root'], + cwd=location, + log_failed_cmd=False, + ) + except BadCommand: + logger.debug("could not determine if %s is under hg control " + "because hg is not available", location) + return None + except SubProcessError: + return None + return os.path.normpath(r.rstrip('\r\n')) + vcs.register(Mercurial) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/subversion.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/subversion.py index 42ac5ac3..14825f79 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/subversion.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/subversion.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -6,9 +9,14 @@ import re from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc, + display_path, + is_console_interactive, + rmtree, + split_auth_from_netloc, ) -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import VersionControl, vcs _svn_xml_url_re = re.compile('url="([^"]+)"') _svn_rev_re = re.compile(r'committed-rev="(\d+)"') @@ -16,6 +24,13 @@ _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') _svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.subprocess import CommandArgs + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + + logger = logging.getLogger(__name__) @@ -25,40 +40,13 @@ class Subversion(VersionControl): repo_name = 'checkout' schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') - def get_base_rev_args(self, rev): - return ['-r', rev] - - def export(self, location): - """Export the svn repository at the url to the destination location""" - url, rev_options = self.get_url_rev_options(self.url) - - logger.info('Exporting svn repository %s to %s', url, location) - with indent_log(): - if os.path.exists(location): - # Subversion doesn't like to check out over an existing - # directory --force fixes this, but was only added in svn 1.5 - rmtree(location) - cmd_args = ['export'] + rev_options.to_args() + [url, location] - self.run_command(cmd_args, show_stdout=False) - - def fetch_new(self, dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Checking out %s%s to %s', - url, - rev_display, - display_path(dest), - ) - cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) - - def switch(self, dest, url, rev_options): - cmd_args = ['switch'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + return True - def update(self, dest, url, rev_options): - cmd_args = ['update'] + rev_options.to_args() + [dest] - self.run_command(cmd_args) + @staticmethod + def get_base_rev_args(rev): + return ['-r', rev] @classmethod def get_revision(cls, location): @@ -68,7 +56,7 @@ class Subversion(VersionControl): # Note: taken from setuptools.command.egg_info revision = 0 - for base, dirs, files in os.walk(location): + for base, dirs, _ in os.walk(location): if cls.dirname not in dirs: dirs[:] = [] continue # no sense walking uncontrolled subdirs @@ -88,7 +76,8 @@ class Subversion(VersionControl): revision = max(revision, localrev) return revision - def get_netloc_and_auth(self, netloc, scheme): + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): """ This override allows the auth information to be passed to svn via the --username and --password options instead of via the URL. @@ -96,20 +85,23 @@ class Subversion(VersionControl): if scheme == 'ssh': # The --username and --password options can't be used for # svn+ssh URLs, so keep the auth information in the URL. - return super(Subversion, self).get_netloc_and_auth( - netloc, scheme) + return super(Subversion, cls).get_netloc_and_auth(netloc, scheme) return split_auth_from_netloc(netloc) - def get_url_rev_and_auth(self, url): + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url) if url.startswith('ssh://'): url = 'svn+' + url return url, rev, user_pass - def make_rev_args(self, username, password): - extra_args = [] + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + extra_args = [] # type: CommandArgs if username: extra_args += ['--username', username] if password: @@ -140,7 +132,7 @@ class Subversion(VersionControl): @classmethod def _get_svn_url_rev(cls, location): - from pip._internal.exceptions import InstallationError + from pip._internal.exceptions import SubProcessError entries_path = os.path.join(location, cls.dirname, 'entries') if os.path.exists(entries_path): @@ -159,21 +151,26 @@ class Subversion(VersionControl): elif data.startswith('<?xml'): match = _svn_xml_url_re.search(data) if not match: - raise ValueError('Badly formatted data: %r' % data) + raise ValueError( + 'Badly formatted data: {data!r}'.format(**locals())) url = match.group(1) # get repository URL revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] else: try: # subversion >= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. xml = cls.run_command( ['info', '--xml', location], - show_stdout=False, ) url = _svn_info_xml_url_re.search(xml).group(1) revs = [ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) ] - except InstallationError: + except SubProcessError: url, revs = None, [] if revs: @@ -184,17 +181,154 @@ class Subversion(VersionControl): return url, rev @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if repo is None: - return None - repo = 'svn+' + repo - rev = cls.get_revision(location) - return make_vcs_requirement_url(repo, rev, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False + def __init__(self, use_interactive=None): + # type: (bool) -> None + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version = None # type: Optional[Tuple[int, ...]] + + super(Subversion, self).__init__() + + def call_vcs_version(self): + # type: () -> Tuple[int, ...] + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + version_prefix = 'svn, version ' + version = self.run_command(['--version']) + + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix):].split()[0] + version_list = version.split('.') + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self): + # type: () -> Tuple[int, ...] + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self): + # type: () -> CommandArgs + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - export + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ['--non-interactive'] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ['--force-interactive'] + + return [] + + def export(self, location, url): + # type: (str, HiddenText) -> None + """Export the svn repository at the url to the destination location""" + url, rev_options = self.get_url_rev_options(url) + + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + cmd_args = make_command( + 'export', self.get_remote_call_options(), + rev_options.to_args(), url, location, + ) + self.run_command(cmd_args) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = make_command( + 'checkout', '-q', self.get_remote_call_options(), + rev_options.to_args(), url, dest, + ) + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'switch', self.get_remote_call_options(), rev_options.to_args(), + url, dest, + ) + self.run_command(cmd_args) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'update', self.get_remote_call_options(), rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + vcs.register(Subversion) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/versioncontrol.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 00000000..96f830f9 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,811 @@ +"""Handles all VCS (version control) support""" + +from __future__ import absolute_import + +import errno +import logging +import os +import shutil +import subprocess +import sys + +from pip._vendor import pkg_resources +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.exceptions import ( + BadCommand, + InstallationError, + SubProcessError, +) +from pip._internal.utils.compat import console_to_str, samefile +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + rmtree, +) +from pip._internal.utils.subprocess import ( + format_command_args, + make_command, + make_subprocess_output_error, + reveal_command_args, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme + +if MYPY_CHECK_RUNNING: + from typing import ( + Dict, Iterable, Iterator, List, Optional, Text, Tuple, + Type, Union, Mapping, Any + ) + from pip._internal.utils.misc import HiddenText + from pip._internal.utils.subprocess import CommandArgs + + AuthInfo = Tuple[Optional[str], Optional[str]] + + +__all__ = ['vcs'] + + +logger = logging.getLogger(__name__) + + +def is_url(name): + # type: (Union[str, Text]) -> bool + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + # type: (str, str, str, Optional[str]) -> str + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = pkg_resources.to_filename(project_name) + req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) + if subdir: + req += '&subdirectory={}'.format(subdir) + + return req + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + log_failed_cmd: if false, failed commands are not logged, + only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + + # log the subprocess output at DEBUG level. + log_subprocess = subprocess_logger.debug + + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + # Whether the subprocess will be visible in the console. + showing_subprocess = True + + command_desc = format_command_args(cmd) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd + ) + if proc.stdin: + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = None + if proc.stdout: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if proc_had_error: + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise SubProcessError(exc_msg) + return ''.join(all_output) + + +def find_path_to_setup_from_repo_root(location, repo_root): + # type: (str, str) -> Optional[str] + """ + Find the path to `setup.py` by searching up the filesystem from `location`. + Return the path to `setup.py` relative to `repo_root`. + Return None if `setup.py` is in `repo_root` or cannot be found. + """ + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RevOptions(object): + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class, # type: Type[VersionControl] + rev=None, # type: Optional[str] + extra_args=None, # type: Optional[CommandArgs] + ): + # type: (...) -> None + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name = None # type: Optional[str] + + def __repr__(self): + # type: () -> str + return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev) + + @property + def arg_rev(self): + # type: () -> Optional[str] + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self): + # type: () -> CommandArgs + """ + Return the VCS-specific command arguments. + """ + args = [] # type: CommandArgs + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self): + # type: () -> str + if not self.rev: + return '' + + return ' (to revision {})'.format(self.rev) + + def make_new(self, rev): + # type: (str) -> RevOptions + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport(object): + _registry = {} # type: Dict[str, VersionControl] + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # type: () -> None + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + # type: () -> Iterator[str] + return self._registry.__iter__() + + @property + def backends(self): + # type: () -> List[VersionControl] + return list(self._registry.values()) + + @property + def dirnames(self): + # type: () -> List[str] + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + # type: () -> List[str] + schemes = [] # type: List[str] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + # type: (Type[VersionControl]) -> None + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, name): + # type: (str) -> None + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug('Determine that %s uses VCS: %s', + location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + repo_name = '' + # List of supported schemes for this Version Control + schemes = () # type: Tuple[str, ...] + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ = () # type: Tuple[str, ...] + default_arg_rev = None # type: Optional[str] + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith('{}:'.format(cls.name)) + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir): + # type: (str) -> str + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir, project_name): + # type: (str, str) -> Optional[str] + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + if repo_url is None: + return None + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = '{}+{}'.format(cls.name, repo_url) + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, + subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options(cls, rev=None, extra_args=None): + # type: (Optional[str], Optional[CommandArgs]) -> RevOptions + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo): + # type: (str) -> bool + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + def export(self, location, url): + # type: (str, HiddenText) -> None + """ + Export the repository at the url to the destination location + i.e. only download the files, without vcs informations + + :param url: the repository URL starting with a vcs prefix. + """ + raise NotImplementedError + + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) + if '+' not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split('+', 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + if not rev: + raise InstallationError( + "The URL {!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL.".format(url) + ) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev, user_pass + + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url): + # type: (HiddenText) -> Tuple[HiddenText, RevOptions] + """ + Return the URL and RevOptions object to use in obtain() and in + some cases export(), as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password = None # type: Optional[HiddenText] + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url): + # type: (str) -> str + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib_parse.unquote(url).rstrip('/') + + @classmethod + def compare_urls(cls, url1, url2): + # type: (str, str) -> bool + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (cls.normalize_url(url1) == cls.normalize_url(url2)) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest, url): + # type: (str, HiddenText) -> None + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info('Skipping because already up-to-date.') + return + + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore + ('i', 'w', 'b')) + + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? {}'.format( + prompt[0]), prompt[1]) + + if response == 'a': + sys.exit(-1) + + if response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location, url): + # type: (str, HiddenText) -> None + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd, # type: Union[List[str], CommandArgs] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + log_failed_cmd=True # type: bool + ): + # type: (...) -> Text + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + try: + return call_subprocess(cmd, cwd, + extra_environ=extra_environ, + extra_ok_returncodes=extra_ok_returncodes, + log_failed_cmd=log_failed_cmd) + except OSError as e: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + if e.errno == errno.ENOENT: + raise BadCommand( + 'Cannot find command {cls.name!r} - do you have ' + '{cls.name!r} installed and in your ' + 'PATH?'.format(**locals())) + else: + raise # re-raise exception if a different error occurred + + @classmethod + def is_repository_directory(cls, path): + # type: (str) -> bool + """ + Return whether a directory path is a repository directory. + """ + logger.debug('Checking in %s for %s (%s)...', + path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location): + # type: (str) -> Optional[str] + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel.py deleted file mode 100644 index 67bcc7f7..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel.py +++ /dev/null @@ -1,1095 +0,0 @@ -""" -Support for installing and building the "wheel" binary package format. -""" -from __future__ import absolute_import - -import collections -import compileall -import csv -import hashlib -import logging -import os.path -import re -import shutil -import stat -import sys -import warnings -from base64 import urlsafe_b64encode -from email.parser import Parser - -from pip._vendor import pkg_resources -from pip._vendor.distlib.scripts import ScriptMaker -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.six import StringIO - -from pip._internal import pep425tags -from pip._internal.download import path_to_url, unpack_url -from pip._internal.exceptions import ( - InstallationError, InvalidWheelFilename, UnsupportedWheel, -) -from pip._internal.locations import ( - PIP_DELETE_MARKER_FILENAME, distutils_scheme, -) -from pip._internal.models.link import Link -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - call_subprocess, captured_stdout, ensure_dir, read_chunks, -) -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, - Union, Iterable - ) - from pip._vendor.packaging.requirements import Requirement # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.index import FormatControl, PackageFinder # noqa: F401 - from pip._internal.operations.prepare import ( # noqa: F401 - RequirementPreparer - ) - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.pep425tags import Pep425Tag # noqa: F401 - - InstalledCSVRow = Tuple[str, ...] - - -VERSION_COMPATIBLE = (1, 0) - - -logger = logging.getLogger(__name__) - - -def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') - - -def rehash(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[str, str] - """Return (hash, length) for path using hashlib.sha256()""" - h = hashlib.sha256() - length = 0 - with open(path, 'rb') as f: - for block in read_chunks(f, size=blocksize): - length += len(block) - h.update(block) - digest = 'sha256=' + urlsafe_b64encode( - h.digest() - ).decode('latin1').rstrip('=') - # unicode/str python2 issues - return (digest, str(length)) # type: ignore - - -def open_for_csv(name, mode): - # type: (str, Text) -> IO - if sys.version_info[0] < 3: - nl = {} # type: Dict[str, Any] - bin = 'b' - else: - nl = {'newline': ''} # type: Dict[str, Any] - bin = '' - return open(name, mode + bin, **nl) - - -def replace_python_tag(wheelname, new_tag): - # type: (str, str) -> str - """Replace the Python tag in a wheel file name with a new value. - """ - parts = wheelname.split('-') - parts[-3] = new_tag - return '-'.join(parts) - - -def fix_script(path): - # type: (str) -> Optional[bool] - """Replace #!python with #!/path/to/python - Return True if file was changed.""" - # XXX RECORD hashes will need to be updated - if os.path.isfile(path): - with open(path, 'rb') as script: - firstline = script.readline() - if not firstline.startswith(b'#!python'): - return False - exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, 'wb') as script: - script.write(firstline) - script.write(rest) - return True - return None - - -dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) - \.dist-info$""", re.VERBOSE) - - -def root_is_purelib(name, wheeldir): - # type: (str, str) -> bool - """ - Return True if the extracted wheel in wheeldir should go into purelib. - """ - name_folded = name.replace("-", "_") - for item in os.listdir(wheeldir): - match = dist_info_re.match(item) - if match and match.group('name') == name_folded: - with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: - for line in wheel: - line = line.lower().rstrip() - if line == "root-is-purelib: true": - return True - return False - - -def get_entrypoints(filename): - # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] - if not os.path.exists(filename): - return {}, {} - - # This is done because you can pass a string to entry_points wrappers which - # means that they may or may not be valid INI files. The attempt here is to - # strip leading and trailing whitespace in order to make them valid INI - # files. - with open(filename) as fp: - data = StringIO() - for line in fp: - data.write(line.strip()) - data.write("\n") - data.seek(0) - - # get the entry points and then the script names - entry_points = pkg_resources.EntryPoint.parse_map(data) - console = entry_points.get('console_scripts', {}) - gui = entry_points.get('gui_scripts', {}) - - def _split_ep(s): - """get the string representation of EntryPoint, remove space and split - on '='""" - return str(s).replace(" ", "").split("=") - - # convert the EntryPoint objects into strings with module:function - console = dict(_split_ep(v) for v in console.values()) - gui = dict(_split_ep(v) for v in gui.values()) - return console, gui - - -def message_about_scripts_not_on_PATH(scripts): - # type: (Sequence[str]) -> Optional[str] - """Determine if any scripts are not on PATH and format a warning. - - Returns a warning message if one or more scripts are not on PATH, - otherwise None. - """ - if not scripts: - return None - - # Group scripts by the path they were installed in - grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] - for destfile in scripts: - parent_dir = os.path.dirname(destfile) - script_name = os.path.basename(destfile) - grouped_by_dir[parent_dir].add(script_name) - - # We don't want to warn for directories that are on PATH. - not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) for i in - os.environ.get("PATH", "").split(os.pathsep) - ] - # If an executable sits with sys.executable, we don't warn for it. - # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) - warn_for = { - parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs - } - if not warn_for: - return None - - # Format a message - msg_lines = [] - for parent_dir, scripts in warn_for.items(): - scripts = sorted(scripts) - if len(scripts) == 1: - start_text = "script {} is".format(scripts[0]) - else: - start_text = "scripts {} are".format( - ", ".join(scripts[:-1]) + " and " + scripts[-1] - ) - - msg_lines.append( - "The {} installed in '{}' which is not on PATH." - .format(start_text, parent_dir) - ) - - last_line_fmt = ( - "Consider adding {} to PATH or, if you prefer " - "to suppress this warning, use --no-warn-script-location." - ) - if len(msg_lines) == 1: - msg_lines.append(last_line_fmt.format("this directory")) - else: - msg_lines.append(last_line_fmt.format("these directories")) - - # Returns the formatted multiline message - return "\n".join(msg_lines) - - -def sorted_outrows(outrows): - # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] - """ - Return the given rows of a RECORD file in sorted order. - - Each row is a 3-tuple (path, hash, size) and corresponds to a record of - a RECORD file (see PEP 376 and PEP 427 for details). For the rows - passed to this function, the size can be an integer as an int or string, - or the empty string. - """ - # Normally, there should only be one row per path, in which case the - # second and third elements don't come into play when sorting. - # However, in cases in the wild where a path might happen to occur twice, - # we don't want the sort operation to trigger an error (but still want - # determinism). Since the third element can be an int or string, we - # coerce each element to a string to avoid a TypeError in this case. - # For additional background, see-- - # https://github.com/pypa/pip/issues/5868 - return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) - - -def get_csv_rows_for_installed( - old_csv_rows, # type: Iterable[List[str]] - installed, # type: Dict[str, str] - changed, # type: set - generated, # type: List[str] - lib_dir, # type: str -): - # type: (...) -> List[InstalledCSVRow] - """ - :param installed: A map from archive RECORD path to installation RECORD - path. - """ - installed_rows = [] # type: List[InstalledCSVRow] - for row in old_csv_rows: - if len(row) > 3: - logger.warning( - 'RECORD line has more than three elements: {}'.format(row) - ) - # Make a copy because we are mutating the row. - row = list(row) - old_path = row[0] - new_path = installed.pop(old_path, old_path) - row[0] = new_path - if new_path in changed: - digest, length = rehash(new_path) - row[1] = digest - row[2] = length - installed_rows.append(tuple(row)) - for f in generated: - digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) - return installed_rows - - -def move_wheel_files( - name, # type: str - req, # type: Requirement - wheeldir, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - pycompile=True, # type: bool - scheme=None, # type: Optional[Mapping[str, str]] - isolated=False, # type: bool - prefix=None, # type: Optional[str] - warn_script_location=True # type: bool -): - # type: (...) -> None - """Install a wheel""" - # TODO: Investigate and break this up. - # TODO: Look into moving this into a dedicated class for representing an - # installation. - - if not scheme: - scheme = distutils_scheme( - name, user=user, home=home, root=root, isolated=isolated, - prefix=prefix, - ) - - if root_is_purelib(name, wheeldir): - lib_dir = scheme['purelib'] - else: - lib_dir = scheme['platlib'] - - info_dir = [] # type: List[str] - data_dirs = [] - source = wheeldir.rstrip(os.path.sep) + os.path.sep - - # Record details of the files moved - # installed = files copied from the wheel to the destination - # changed = files changed while installing (scripts #! line typically) - # generated = files newly generated during the install (script wrappers) - installed = {} # type: Dict[str, str] - changed = set() - generated = [] # type: List[str] - - # Compile all of the pyc files that we're going to be installing - if pycompile: - with captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore') - compileall.compile_dir(source, force=True, quiet=True) - logger.debug(stdout.getvalue()) - - def record_installed(srcfile, destfile, modified=False): - """Map archive RECORD paths to installation RECORD paths.""" - oldpath = normpath(srcfile, wheeldir) - newpath = normpath(destfile, lib_dir) - installed[oldpath] = newpath - if modified: - changed.add(destfile) - - def clobber(source, dest, is_base, fixer=None, filter=None): - ensure_dir(dest) # common for the 'include' path - - for dir, subdirs, files in os.walk(source): - basedir = dir[len(source):].lstrip(os.path.sep) - destdir = os.path.join(dest, basedir) - if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): - continue - for s in subdirs: - destsubdir = os.path.join(dest, basedir, s) - if is_base and basedir == '' and destsubdir.endswith('.data'): - data_dirs.append(s) - continue - elif (is_base and - s.endswith('.dist-info') and - canonicalize_name(s).startswith( - canonicalize_name(req.name))): - assert not info_dir, ('Multiple .dist-info directories: ' + - destsubdir + ', ' + - ', '.join(info_dir)) - info_dir.append(destsubdir) - for f in files: - # Skip unwanted files - if filter and filter(f): - continue - srcfile = os.path.join(dir, f) - destfile = os.path.join(dest, basedir, f) - # directory creation is lazy and after the file filtering above - # to ensure we don't install empty dirs; empty dirs can't be - # uninstalled. - ensure_dir(destdir) - - # copyfile (called below) truncates the destination if it - # exists and then writes the new contents. This is fine in most - # cases, but can cause a segfault if pip has loaded a shared - # object (e.g. from pyopenssl through its vendored urllib3) - # Since the shared object is mmap'd an attempt to call a - # symbol in it will then cause a segfault. Unlinking the file - # allows writing of new contents while allowing the process to - # continue to use the old copy. - if os.path.exists(destfile): - os.unlink(destfile) - - # We use copyfile (not move, copy, or copy2) to be extra sure - # that we are not moving directories over (copyfile fails for - # directories) as well as to ensure that we are not copying - # over any metadata because we want more control over what - # metadata we actually copy over. - shutil.copyfile(srcfile, destfile) - - # Copy over the metadata for the file, currently this only - # includes the atime and mtime. - st = os.stat(srcfile) - if hasattr(os, "utime"): - os.utime(destfile, (st.st_atime, st.st_mtime)) - - # If our file is executable, then make our destination file - # executable. - if os.access(srcfile, os.X_OK): - st = os.stat(srcfile) - permissions = ( - st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - ) - os.chmod(destfile, permissions) - - changed = False - if fixer: - changed = fixer(destfile) - record_installed(srcfile, destfile, changed) - - clobber(source, lib_dir, True) - - assert info_dir, "%s .dist-info directory not found" % req - - # Get the defined entry points - ep_file = os.path.join(info_dir[0], 'entry_points.txt') - console, gui = get_entrypoints(ep_file) - - def is_entrypoint_wrapper(name): - # EP, EP.exe and EP-script.py are scripts generated for - # entry point EP by setuptools - if name.lower().endswith('.exe'): - matchname = name[:-4] - elif name.lower().endswith('-script.py'): - matchname = name[:-10] - elif name.lower().endswith(".pya"): - matchname = name[:-4] - else: - matchname = name - # Ignore setuptools-generated scripts - return (matchname in console or matchname in gui) - - for datadir in data_dirs: - fixer = None - filter = None - for subdir in os.listdir(os.path.join(wheeldir, datadir)): - fixer = None - if subdir == 'scripts': - fixer = fix_script - filter = is_entrypoint_wrapper - source = os.path.join(wheeldir, datadir, subdir) - dest = scheme[subdir] - clobber(source, dest, False, fixer=fixer, filter=filter) - - maker = ScriptMaker(None, scheme['scripts']) - - # Ensure old scripts are overwritten. - # See https://github.com/pypa/pip/issues/1800 - maker.clobber = True - - # Ensure we don't generate any variants for scripts because this is almost - # never what somebody wants. - # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {''} - - # This is required because otherwise distlib creates scripts that are not - # executable. - # See https://bitbucket.org/pypa/distlib/issue/32/ - maker.set_mode = True - - # Simplify the script and fix the fact that the default script swallows - # every single stack trace. - # See https://bitbucket.org/pypa/distlib/issue/34/ - # See https://bitbucket.org/pypa/distlib/issue/33/ - def _get_script_text(entry): - if entry.suffix is None: - raise InstallationError( - "Invalid script entry point: %s for req: %s - A callable " - "suffix is required. Cf https://packaging.python.org/en/" - "latest/distributing.html#console-scripts for more " - "information." % (entry, req) - ) - return maker.script_template % { - "module": entry.prefix, - "import_name": entry.suffix.split(".")[0], - "func": entry.suffix, - } - # ignore type, because mypy disallows assigning to a method, - # see https://github.com/python/mypy/issues/2427 - maker._get_script_text = _get_script_text # type: ignore - maker.script_template = r"""# -*- coding: utf-8 -*- -import re -import sys - -from %(module)s import %(import_name)s - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(%(func)s()) -""" - - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop('pip', None) - if pip_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'pip = ' + pip_script - generated.extend(maker.make(spec)) - - if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - spec = 'pip%s = %s' % (sys.version[:1], pip_script) - generated.extend(maker.make(spec)) - - spec = 'pip%s = %s' % (sys.version[:3], pip_script) - generated.extend(maker.make(spec)) - # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] - for k in pip_ep: - del console[k] - easy_install_script = console.pop('easy_install', None) - if easy_install_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'easy_install = ' + easy_install_script - generated.extend(maker.make(spec)) - - spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) - generated.extend(maker.make(spec)) - # Delete any other versioned easy_install entry points - easy_install_ep = [ - k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) - ] - for k in easy_install_ep: - del console[k] - - # Generate the console and GUI entry points specified in the wheel - if len(console) > 0: - generated_console_scripts = maker.make_multiple( - ['%s = %s' % kv for kv in console.items()] - ) - generated.extend(generated_console_scripts) - - if warn_script_location: - msg = message_about_scripts_not_on_PATH(generated_console_scripts) - if msg is not None: - logger.warning(msg) - - if len(gui) > 0: - generated.extend( - maker.make_multiple( - ['%s = %s' % kv for kv in gui.items()], - {'gui': True} - ) - ) - - # Record pip as the installer - installer = os.path.join(info_dir[0], 'INSTALLER') - temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') - with open(temp_installer, 'wb') as installer_file: - installer_file.write(b'pip\n') - shutil.move(temp_installer, installer) - generated.append(installer) - - # Record details of all files installed - record = os.path.join(info_dir[0], 'RECORD') - temp_record = os.path.join(info_dir[0], 'RECORD.pip') - with open_for_csv(record, 'r') as record_in: - with open_for_csv(temp_record, 'w+') as record_out: - reader = csv.reader(record_in) - outrows = get_csv_rows_for_installed( - reader, installed=installed, changed=changed, - generated=generated, lib_dir=lib_dir, - ) - writer = csv.writer(record_out) - # Sort to simplify testing. - for row in sorted_outrows(outrows): - writer.writerow(row) - shutil.move(temp_record, record) - - -def wheel_version(source_dir): - # type: (Optional[str]) -> Optional[Tuple[int, ...]] - """ - Return the Wheel-Version of an extracted wheel, if possible. - - Otherwise, return None if we couldn't parse / extract it. - """ - try: - dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] - - wheel_data = dist.get_metadata('WHEEL') - wheel_data = Parser().parsestr(wheel_data) - - version = wheel_data['Wheel-Version'].strip() - version = tuple(map(int, version.split('.'))) - return version - except Exception: - return None - - -def check_compatibility(version, name): - # type: (Optional[Tuple[int, ...]], str) -> None - """ - Raises errors or warns if called with an incompatible Wheel-Version. - - Pip should refuse to install a Wheel-Version that's a major series - ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when - installing a version only minor version ahead (e.g 1.2 > 1.1). - - version: a 2-tuple representing a Wheel-Version (Major, Minor) - name: name of wheel or package to raise exception about - - :raises UnsupportedWheel: when an incompatible Wheel-Version is given - """ - if not version: - raise UnsupportedWheel( - "%s is in an unsupported or invalid wheel" % name - ) - if version[0] > VERSION_COMPATIBLE[0]: - raise UnsupportedWheel( - "%s's Wheel-Version (%s) is not compatible with this version " - "of pip" % (name, '.'.join(map(str, version))) - ) - elif version > VERSION_COMPATIBLE: - logger.warning( - 'Installing from a newer Wheel-Version (%s)', - '.'.join(map(str, version)), - ) - - -class Wheel(object): - """A wheel file""" - - # TODO: Maybe move the class into the models sub-package - # TODO: Maybe move the install code into this class - - wheel_file_re = re.compile( - r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) - ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) - \.whl|\.dist-info)$""", - re.VERBOSE - ) - - def __init__(self, filename): - # type: (str) -> None - """ - :raises InvalidWheelFilename: when the filename is invalid for a wheel - """ - wheel_info = self.wheel_file_re.match(filename) - if not wheel_info: - raise InvalidWheelFilename( - "%s is not a valid wheel filename." % filename - ) - self.filename = filename - self.name = wheel_info.group('name').replace('_', '-') - # we'll assume "_" means "-" due to wheel naming scheme - # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group('ver').replace('_', '-') - self.build_tag = wheel_info.group('build') - self.pyversions = wheel_info.group('pyver').split('.') - self.abis = wheel_info.group('abi').split('.') - self.plats = wheel_info.group('plat').split('.') - - # All the tag combinations from this file - self.file_tags = { - (x, y, z) for x in self.pyversions - for y in self.abis for z in self.plats - } - - def support_index_min(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> Optional[int] - """ - Return the lowest index that one of the wheel's file_tag combinations - achieves in the supported_tags list e.g. if there are 8 supported tags, - and one of the file tags is first in the list, then return 0. Returns - None is the wheel is not supported. - """ - if tags is None: # for mock - tags = pep425tags.get_supported() - indexes = [tags.index(c) for c in self.file_tags if c in tags] - return min(indexes) if indexes else None - - def supported(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> bool - """Is this wheel supported on this system?""" - if tags is None: # for mock - tags = pep425tags.get_supported() - return bool(set(tags).intersection(self.file_tags)) - - -def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): - """Determine whether the string looks like an egg_info. - - :param s: The string to parse. E.g. foo-2.1 - """ - return bool(_egg_info_re.search(s)) - - -def should_use_ephemeral_cache( - req, # type: InstallRequirement - format_control, # type: FormatControl - autobuilding, # type: bool - cache_available # type: bool -): - # type: (...) -> Optional[bool] - """ - Return whether to build an InstallRequirement object using the - ephemeral cache. - - :param cache_available: whether a cache directory is available for the - autobuilding=True case. - - :return: True or False to build the requirement with ephem_cache=True - or False, respectively; or None not to build the requirement. - """ - if req.constraint: - return None - if req.is_wheel: - if not autobuilding: - logger.info( - 'Skipping %s, due to already being wheel.', req.name, - ) - return None - if not autobuilding: - return False - - if req.editable or not req.source_dir: - return None - - if req.link and not req.link.is_artifact: - # VCS checkout. Build wheel just for this run. - return True - - if "binary" not in format_control.get_allowed_formats( - canonicalize_name(req.name)): - logger.info( - "Skipping bdist_wheel for %s, due to binaries " - "being disabled for it.", req.name, - ) - return None - - link = req.link - base, ext = link.splitext() - if cache_available and _contains_egg_info(base): - return False - - # Otherwise, build the wheel just for this run using the ephemeral - # cache since we are either in the case of e.g. a local directory, or - # no cache directory is available to use. - return True - - -def format_command( - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> str - """ - Format command information for logging. - """ - text = 'Command arguments: {}\n'.format(command_args) - - if not command_output: - text += 'Command output: None' - elif logger.getEffectiveLevel() > logging.DEBUG: - text += 'Command output: [use --verbose to show]' - else: - if not command_output.endswith('\n'): - command_output += '\n' - text += ( - 'Command output:\n{}' - '-----------------------------------------' - ).format(command_output) - - return text - - -def get_legacy_build_wheel_path( - names, # type: List[str] - temp_dir, # type: str - req, # type: InstallRequirement - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> Optional[str] - """ - Return the path to the wheel in the temporary build directory. - """ - # Sort for determinism. - names = sorted(names) - if not names: - msg = ( - 'Legacy build of wheel for {!r} created no files.\n' - ).format(req.name) - msg += format_command(command_args, command_output) - logger.warning(msg) - return None - - if len(names) > 1: - msg = ( - 'Legacy build of wheel for {!r} created more than one file.\n' - 'Filenames (choosing first): {}\n' - ).format(req.name, names) - msg += format_command(command_args, command_output) - logger.warning(msg) - - return os.path.join(temp_dir, names[0]) - - -class WheelBuilder(object): - """Build wheels from a RequirementSet.""" - - def __init__( - self, - finder, # type: PackageFinder - preparer, # type: RequirementPreparer - wheel_cache, # type: WheelCache - build_options=None, # type: Optional[List[str]] - global_options=None, # type: Optional[List[str]] - no_clean=False # type: bool - ): - # type: (...) -> None - self.finder = finder - self.preparer = preparer - self.wheel_cache = wheel_cache - - self._wheel_dir = preparer.wheel_download_dir - - self.build_options = build_options or [] - self.global_options = global_options or [] - self.no_clean = no_clean - - def _build_one(self, req, output_dir, python_tag=None): - """Build one wheel. - - :return: The filename of the built wheel, or None if the build failed. - """ - # Install build deps into temporary directory (PEP 518) - with req.build_env: - return self._build_one_inside_env(req, output_dir, - python_tag=python_tag) - - def _build_one_inside_env(self, req, output_dir, python_tag=None): - with TempDirectory(kind="wheel") as temp_dir: - if req.use_pep517: - builder = self._build_one_pep517 - else: - builder = self._build_one_legacy - wheel_path = builder(req, temp_dir.path, python_tag=python_tag) - if wheel_path is not None: - wheel_name = os.path.basename(wheel_path) - dest_path = os.path.join(output_dir, wheel_name) - try: - shutil.move(wheel_path, dest_path) - logger.info('Stored in directory: %s', output_dir) - return dest_path - except Exception: - pass - # Ignore return, we can't do anything else useful. - self._clean_one(req) - return None - - def _base_setup_args(self, req): - # NOTE: Eventually, we'd want to also -S to the flags here, when we're - # isolating. Currently, it breaks Python in virtualenvs, because it - # relies on site.py to find parts of the standard library outside the - # virtualenv. - return [ - sys.executable, '-u', '-c', - SETUPTOOLS_SHIM % req.setup_py - ] + list(self.global_options) - - def _build_one_pep517(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the PEP 517 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert req.metadata_directory is not None - try: - req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,) - logger.debug('Destination directory: %s', tempd) - wheel_name = req.pep517_backend.build_wheel( - tempd, - metadata_directory=req.metadata_directory - ) - if python_tag: - # General PEP 517 backends don't necessarily support - # a "--python-tag" option, so we rename the wheel - # file directly. - new_name = replace_python_tag(wheel_name, python_tag) - os.rename( - os.path.join(tempd, wheel_name), - os.path.join(tempd, new_name) - ) - # Reassign to simplify the return at the end of function - wheel_name = new_name - except Exception: - logger.error('Failed building wheel for %s', req.name) - return None - return os.path.join(tempd, wheel_name) - - def _build_one_legacy(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the "legacy" build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - base_args = self._base_setup_args(req) - - spin_message = 'Building wheel for %s (setup.py)' % (req.name,) - with open_spinner(spin_message) as spinner: - logger.debug('Destination directory: %s', tempd) - wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ - + self.build_options - - if python_tag is not None: - wheel_args += ["--python-tag", python_tag] - - try: - output = call_subprocess(wheel_args, cwd=req.setup_py_dir, - show_stdout=False, spinner=spinner) - except Exception: - spinner.finish("error") - logger.error('Failed building wheel for %s', req.name) - return None - names = os.listdir(tempd) - wheel_path = get_legacy_build_wheel_path( - names=names, - temp_dir=tempd, - req=req, - command_args=wheel_args, - command_output=output, - ) - return wheel_path - - def _clean_one(self, req): - base_args = self._base_setup_args(req) - - logger.info('Running setup.py clean for %s', req.name) - clean_args = base_args + ['clean', '--all'] - try: - call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) - return True - except Exception: - logger.error('Failed cleaning build dir for %s', req.name) - return False - - def build( - self, - requirements, # type: Iterable[InstallRequirement] - session, # type: PipSession - autobuilding=False # type: bool - ): - # type: (...) -> List[InstallRequirement] - """Build wheels. - - :param unpack: If True, replace the sdist we built from with the - newly built wheel, in preparation for installation. - :return: True if all the wheels built correctly. - """ - buildset = [] - format_control = self.finder.format_control - # Whether a cache directory is available for autobuilding=True. - cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) - - for req in requirements: - ephem_cache = should_use_ephemeral_cache( - req, format_control=format_control, autobuilding=autobuilding, - cache_available=cache_available, - ) - if ephem_cache is None: - continue - - buildset.append((req, ephem_cache)) - - if not buildset: - return [] - - # Is any wheel build not using the ephemeral cache? - if any(not ephem_cache for _, ephem_cache in buildset): - have_directory_for_build = self._wheel_dir or ( - autobuilding and self.wheel_cache.cache_dir - ) - assert have_directory_for_build - - # TODO by @pradyunsg - # Should break up this method into 2 separate methods. - - # Build the wheels. - logger.info( - 'Building wheels for collected packages: %s', - ', '.join([req.name for (req, _) in buildset]), - ) - _cache = self.wheel_cache # shorter name - with indent_log(): - build_success, build_failure = [], [] - for req, ephem in buildset: - python_tag = None - if autobuilding: - python_tag = pep425tags.implementation_tag - if ephem: - output_dir = _cache.get_ephem_path_for_link(req.link) - else: - output_dir = _cache.get_path_for_link(req.link) - try: - ensure_dir(output_dir) - except OSError as e: - logger.warning("Building wheel for %s failed: %s", - req.name, e) - build_failure.append(req) - continue - else: - output_dir = self._wheel_dir - wheel_file = self._build_one( - req, output_dir, - python_tag=python_tag, - ) - if wheel_file: - build_success.append(req) - if autobuilding: - # XXX: This is mildly duplicative with prepare_files, - # but not close enough to pull out to a single common - # method. - # The code below assumes temporary source dirs - - # prevent it doing bad things. - if req.source_dir and not os.path.exists(os.path.join( - req.source_dir, PIP_DELETE_MARKER_FILENAME)): - raise AssertionError( - "bad source dir - missing marker") - # Delete the source we built the wheel from - req.remove_temporary_source() - # set the build directory again - name is known from - # the work prepare_files did. - req.source_dir = req.build_location( - self.preparer.build_dir - ) - # Update the link for this. - req.link = Link(path_to_url(wheel_file)) - assert req.link.is_wheel - # extract the wheel into the dir - unpack_url( - req.link, req.source_dir, None, False, - session=session, - ) - else: - build_failure.append(req) - - # notify success/failure - if build_success: - logger.info( - 'Successfully built %s', - ' '.join([req.name for req in build_success]), - ) - if build_failure: - logger.info( - 'Failed to build %s', - ' '.join([req.name for req in build_failure]), - ) - # Return a list of requirements that failed to build - return build_failure diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel_builder.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 00000000..fa08016b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,308 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil + +from pip._internal.models.link import Link +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Optional, Tuple, + ) + + from pip._internal.cache import WheelCache + from pip._internal.req.req_install import InstallRequirement + + BinaryAllowedPredicate = Callable[[InstallRequirement], bool] + BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.IGNORECASE) + + +def _contains_egg_info(s): + # type: (str) -> bool + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req, # type: InstallRequirement + need_wheel, # type: bool + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if req.editable or not req.source_dir: + return False + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries " + "being disabled for it.", req.name, + ) + return False + + if not req.use_pep517 and not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + logger.info( + "Using legacy 'setup.py install' for %s, " + "since package 'wheel' is not installed.", req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req, # type: InstallRequirement +): + # type: (...) -> bool + return _should_build( + req, need_wheel=True, check_binary_allowed=_always_true + ) + + +def should_build_for_install_command( + req, # type: InstallRequirement + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req, # type: InstallRequirement +): + # type: (...) -> Optional[bool] + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req, # type: InstallRequirement + wheel_cache, # type: WheelCache +): + # type: (...) -> str + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_): + # type: (Any) -> bool + return True + + +def _build_one( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + return _build_one_inside_env( + req, output_dir, build_options, global_options + ) + + +def _build_one_inside_env( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + build_options=build_options, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info('Created wheel for %s: ' + 'filename=%s size=%d sha256=%s', + req.name, wheel_name, length, + wheel_hash.hexdigest()) + logger.info('Stored in directory: %s', output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req, global_options): + # type: (InstallRequirement, List[str]) -> bool + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info('Running setup.py clean for %s', req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + +def build( + requirements, # type: Iterable[InstallRequirement] + wheel_cache, # type: WheelCache + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> BuildResult + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, cache_dir, build_options, global_options + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/__init__.py index b919b540..581db54c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/__init__.py @@ -30,24 +30,17 @@ def vendored(modulename): vendored_name = "{0}.{1}".format(__name__, modulename) try: - __import__(vendored_name, globals(), locals(), level=0) + __import__(modulename, globals(), locals(), level=0) except ImportError: - try: - __import__(modulename, globals(), locals(), level=0) - except ImportError: - # We can just silently allow import failures to pass here. If we - # got to this point it means that ``import pip._vendor.whatever`` - # failed and so did ``import whatever``. Since we're importing this - # upfront in an attempt to alias imports, not erroring here will - # just mean we get a regular import error whenever pip *actually* - # tries to import one of these modules to use it, which actually - # gives us a better error message than we would have otherwise - # gotten. - pass - else: - sys.modules[vendored_name] = sys.modules[modulename] - base, head = vendored_name.rsplit(".", 1) - setattr(sys.modules[base], head, sys.modules[modulename]) + # This error used to be silenced in earlier variants of this file, to instead + # raise the error when pip actually tries to use the missing module. + # Based on inputs in #5354, this was changed to explicitly raise the error. + # Re-raising the exception without modifying it is an intentional choice. + raise + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger @@ -61,12 +54,14 @@ if DEBUNDLED: sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path # Actually alias all of our vendored dependencies. + vendored("appdirs") vendored("cachecontrol") + vendored("certifi") vendored("colorama") + vendored("contextlib2") vendored("distlib") vendored("distro") vendored("html5lib") - vendored("lockfile") vendored("six") vendored("six.moves") vendored("six.moves.urllib") @@ -77,9 +72,9 @@ if DEBUNDLED: vendored("pep517") vendored("pkg_resources") vendored("progress") - vendored("pytoml") vendored("retrying") vendored("requests") + vendored("requests.exceptions") vendored("requests.packages") vendored("requests.packages.urllib3") vendored("requests.packages.urllib3._collections") @@ -108,4 +103,8 @@ if DEBUNDLED: vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("toml") + vendored("toml.encoder") + vendored("toml.decoder") vendored("urllib3") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/appdirs.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/appdirs.py index 2bd39110..33a3b774 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/appdirs.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/appdirs.py @@ -13,8 +13,8 @@ See <http://github.com/ActiveState/appdirs> for details and usage. # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) import sys @@ -37,6 +37,10 @@ if sys.platform.startswith('java'): # are actually checked for and the rest of the module expects # *sys.platform* style strings. system = 'linux2' +elif sys.platform == 'cli' and os.name == 'nt': + # Detect Windows in IronPython to match pip._internal.utils.compat.WINDOWS + # Discussion: <https://github.com/pypa/pip/pull/7501> + system = 'win32' else: system = sys.platform @@ -64,7 +68,7 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): for a discussion of issues. Typical user data directories are: - Mac OS X: ~/Library/Application Support/<AppName> + Mac OS X: ~/Library/Application Support/<AppName> # or ~/.config/<AppName>, if the other does not exist Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> @@ -150,7 +154,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) @@ -203,6 +207,8 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): return path +# for the discussion regarding site_config_dir locations +# see <https://github.com/pypa/pip/issues/1733> def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. @@ -238,14 +244,15 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False) if appname and version: path = os.path.join(path, version) else: - # XDG default for $XDG_CONFIG_DIRS + # XDG default for $XDG_CONFIG_DIRS (missing or empty) + # see <https://github.com/pypa/pip/pull/7501#discussion_r360624829> # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + path = os.getenv('XDG_CONFIG_DIRS') or '/etc/xdg' + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) if x] if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) @@ -291,6 +298,10 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if not PY3 and isinstance(path, unicode): + path = _win_path_to_bytes(path) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) @@ -567,6 +578,24 @@ if system == "win32": _get_win_folder = _get_win_folder_from_registry +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + <https://github.com/pypa/pip/issues/3463>. + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path + + #---- self test code if __name__ == "__main__": diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py index 8fdee66f..a1bbbbe3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -4,7 +4,7 @@ Make it easy to import from cachecontrol without long namespaces. """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.5" +__version__ = "0.12.6" from .wrapper import CacheControl from .adapter import CacheControlAdapter diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py index 780eb288..815650e8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -24,7 +24,7 @@ class CacheControlAdapter(HTTPAdapter): **kw ): super(CacheControlAdapter, self).__init__(*args, **kw) - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py index 1ba00806..607b9452 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -69,8 +69,8 @@ class FileCache(BaseCache): raise ValueError("Cannot use use_dir_lock and lock_class together") try: - from pip._vendor.lockfile import LockFile - from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile except ImportError: notice = dedent( """ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/controller.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/controller.py index 1b2b943c..dafe55ca 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/controller.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/controller.py @@ -34,7 +34,7 @@ class CacheController(object): def __init__( self, cache=None, cache_etags=True, serializer=None, status_codes=None ): - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags self.serializer = serializer or Serializer() self.cacheable_status_codes = status_codes or (200, 203, 300, 301) @@ -293,6 +293,15 @@ class CacheController(object): if no_store: return + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + # If we've been given an etag, then keep the response if self.cache_etags and "etag" in response_headers: logger.debug("Caching due to etag") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py index ec43ff27..3b6ec2de 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -107,6 +107,8 @@ class Serializer(object): """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): return @@ -179,7 +181,7 @@ class Serializer(object): def _loads_v4(self, request, data): try: - cached = msgpack.loads(data, encoding="utf-8") + cached = msgpack.loads(data, raw=False) except ValueError: return diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py index 265bfc8b..d8e6fc6a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -13,7 +13,7 @@ def CacheControl( cacheable_methods=None, ): - cache = cache or DictCache() + cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( cache, diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__init__.py index ef71f3af..5d52a62e 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__init__.py @@ -1,3 +1,3 @@ -from .core import where +from .core import contents, where -__version__ = "2018.11.29" +__version__ = "2020.06.20" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__main__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__main__.py index ae2aff5c..00376349 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__main__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/__main__.py @@ -1,2 +1,12 @@ -from pip._vendor.certifi import where -print(where()) +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/cacert.pem b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/cacert.pem index db68797e..0fd855f4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/cacert.pem +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/cacert.pem @@ -58,38 +58,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -152,39 +120,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -771,36 +706,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep +OkuE6N36B9K -----END CERTIFICATE----- -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - # Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. # Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. # Label: "DST Root CA X3" @@ -1219,36 +1124,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - # Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc # Subject: CN=Cybertrust Global Root O=Cybertrust, Inc # Label: "Cybertrust Global Root" @@ -1559,47 +1434,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -2200,6 +2034,45 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + # Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Label: "Hellenic Academic and Research Institutions RootCA 2011" @@ -3453,46 +3326,6 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ 5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su -----END CERTIFICATE----- -# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Root CA" -# Serial: 1 -# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f -# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 -# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 ------BEGIN CERTIFICATE----- -MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb -BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz -MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx -FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g -Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 -fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl -LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV -WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF -TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb -5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc -CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri -wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ -wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG -m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 -F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng -WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 -2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF -AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ -0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw -F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS -g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj -qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN -h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ -ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V -btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj -Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ -8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW -gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= ------END CERTIFICATE----- - # Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed # Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed # Label: "OISTE WISeKey Global Root GB CA" @@ -3849,47 +3682,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW 1KyLa2tJElMzrdfkviT8tQp21KW8EA== -----END CERTIFICATE----- -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" @@ -4510,3 +4302,319 @@ Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw 3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= -----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/core.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/core.py index 2d02ea44..8987449f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/core.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/certifi/core.py @@ -1,20 +1,60 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- """ certifi.py ~~~~~~~~~~ -This module returns the installation location of cacert.pem. +This module returns the installation location of cacert.pem or its contents. """ import os +try: + from importlib.resources import path as get_path, read_text -def where(): - f = os.path.dirname(__file__) + _CACERT_CTX = None + _CACERT_PATH = None - return os.path.join(f, 'cacert.pem') + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + return _CACERT_PATH -if __name__ == '__main__': - print(where()) + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/colorama/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/colorama/__init__.py index 2a3bf471..34c263cc 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/colorama/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/colorama/__init__.py @@ -3,4 +3,4 @@ from .initialise import init, deinit, reinit, colorama_text from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.4.1' +__version__ = '0.4.3' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/contextlib2.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/contextlib2.py new file mode 100644 index 00000000..3aae8f41 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/contextlib2.py @@ -0,0 +1,518 @@ +"""contextlib2 - backports and enhancements to the contextlib module""" + +import abc +import sys +import warnings +from collections import deque +from functools import wraps + +__all__ = ["contextmanager", "closing", "nullcontext", + "AbstractContextManager", + "ContextDecorator", "ExitStack", + "redirect_stdout", "redirect_stderr", "suppress"] + +# Backwards compatibility +__all__ += ["ContextStack"] + + +# Backport abc.ABC +if sys.version_info[:2] >= (3, 4): + _abc_ABC = abc.ABC +else: + _abc_ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) + + +# Backport classic class MRO +def _classic_mro(C, result): + if C in result: + return + result.append(C) + for B in C.__bases__: + _classic_mro(B, result) + return result + + +# Backport _collections_abc._check_methods +def _check_methods(C, *methods): + try: + mro = C.__mro__ + except AttributeError: + mro = tuple(_classic_mro(C, [])) + + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + return True + + +class AbstractContextManager(_abc_ABC): + """An abstract base class for context managers.""" + + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + @classmethod + def __subclasshook__(cls, C): + """Check whether subclass is considered a subclass of this ABC.""" + if cls is AbstractContextManager: + return _check_methods(C, "__enter__", "__exit__") + return NotImplemented + + +class ContextDecorator(object): + """A base class or mixin that enables context managers to work as decorators.""" + + def refresh_cm(self): + """Returns the context manager used to actually wrap the call to the + decorated function. + + The default implementation just returns *self*. + + Overriding this method allows otherwise one-shot context managers + like _GeneratorContextManager to support use as decorators via + implicit recreation. + + DEPRECATED: refresh_cm was never added to the standard library's + ContextDecorator API + """ + warnings.warn("refresh_cm was never added to the standard library", + DeprecationWarning) + return self._recreate_cm() + + def _recreate_cm(self): + """Return a recreated instance of self. + + Allows an otherwise one-shot context manager like + _GeneratorContextManager to support use as + a decorator via implicit recreation. + + This is a private interface just for _GeneratorContextManager. + See issue #11647 for details. + """ + return self + + def __call__(self, func): + @wraps(func) + def inner(*args, **kwds): + with self._recreate_cm(): + return func(*args, **kwds) + return inner + + +class _GeneratorContextManager(ContextDecorator): + """Helper for @contextmanager decorator.""" + + def __init__(self, func, args, kwds): + self.gen = func(*args, **kwds) + self.func, self.args, self.kwds = func, args, kwds + # Issue 19330: ensure context manager instances have good docstrings + doc = getattr(func, "__doc__", None) + if doc is None: + doc = type(self).__doc__ + self.__doc__ = doc + # Unfortunately, this still doesn't provide good help output when + # inspecting the created context manager instances, since pydoc + # currently bypasses the instance docstring and shows the docstring + # for the class instead. + # See http://bugs.python.org/issue19404 for more details. + + def _recreate_cm(self): + # _GCM instances are one-shot context managers, so the + # CM must be recreated each time a decorated function is + # called + return self.__class__(self.func, self.args, self.kwds) + + def __enter__(self): + try: + return next(self.gen) + except StopIteration: + raise RuntimeError("generator didn't yield") + + def __exit__(self, type, value, traceback): + if type is None: + try: + next(self.gen) + except StopIteration: + return + else: + raise RuntimeError("generator didn't stop") + else: + if value is None: + # Need to force instantiation so we can reliably + # tell if we get the same exception back + value = type() + try: + self.gen.throw(type, value, traceback) + raise RuntimeError("generator didn't stop after throw()") + except StopIteration as exc: + # Suppress StopIteration *unless* it's the same exception that + # was passed to throw(). This prevents a StopIteration + # raised inside the "with" statement from being suppressed. + return exc is not value + except RuntimeError as exc: + # Don't re-raise the passed in exception + if exc is value: + return False + # Likewise, avoid suppressing if a StopIteration exception + # was passed to throw() and later wrapped into a RuntimeError + # (see PEP 479). + if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value: + return False + raise + except: + # only re-raise if it's *not* the exception that was + # passed to throw(), because __exit__() must not raise + # an exception unless __exit__() itself failed. But throw() + # has to raise the exception to signal propagation, so this + # fixes the impedance mismatch between the throw() protocol + # and the __exit__() protocol. + # + if sys.exc_info()[1] is not value: + raise + + +def contextmanager(func): + """@contextmanager decorator. + + Typical usage: + + @contextmanager + def some_generator(<arguments>): + <setup> + try: + yield <value> + finally: + <cleanup> + + This makes this: + + with some_generator(<arguments>) as <variable>: + <body> + + equivalent to this: + + <setup> + try: + <variable> = <value> + <body> + finally: + <cleanup> + + """ + @wraps(func) + def helper(*args, **kwds): + return _GeneratorContextManager(func, args, kwds) + return helper + + +class closing(object): + """Context to automatically close something at the end of a block. + + Code like this: + + with closing(<module>.open(<arguments>)) as f: + <block> + + is equivalent to this: + + f = <module>.open(<arguments>) + try: + <block> + finally: + f.close() + + """ + def __init__(self, thing): + self.thing = thing + + def __enter__(self): + return self.thing + + def __exit__(self, *exc_info): + self.thing.close() + + +class _RedirectStream(object): + + _stream = None + + def __init__(self, new_target): + self._new_target = new_target + # We use a list of old targets to make this CM re-entrant + self._old_targets = [] + + def __enter__(self): + self._old_targets.append(getattr(sys, self._stream)) + setattr(sys, self._stream, self._new_target) + return self._new_target + + def __exit__(self, exctype, excinst, exctb): + setattr(sys, self._stream, self._old_targets.pop()) + + +class redirect_stdout(_RedirectStream): + """Context manager for temporarily redirecting stdout to another file. + + # How to send help() to stderr + with redirect_stdout(sys.stderr): + help(dir) + + # How to write help() to a file + with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) + """ + + _stream = "stdout" + + +class redirect_stderr(_RedirectStream): + """Context manager for temporarily redirecting stderr to another file.""" + + _stream = "stderr" + + +class suppress(object): + """Context manager to suppress specified exceptions + + After the exception is suppressed, execution proceeds with the next + statement following the with statement. + + with suppress(FileNotFoundError): + os.remove(somefile) + # Execution still resumes here if the file was already removed + """ + + def __init__(self, *exceptions): + self._exceptions = exceptions + + def __enter__(self): + pass + + def __exit__(self, exctype, excinst, exctb): + # Unlike isinstance and issubclass, CPython exception handling + # currently only looks at the concrete type hierarchy (ignoring + # the instance and subclass checking hooks). While Guido considers + # that a bug rather than a feature, it's a fairly hard one to fix + # due to various internal implementation details. suppress provides + # the simpler issubclass based semantics, rather than trying to + # exactly reproduce the limitations of the CPython interpreter. + # + # See http://bugs.python.org/issue12029 for more details + return exctype is not None and issubclass(exctype, self._exceptions) + + +# Context manipulation is Python 3 only +_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3 +if _HAVE_EXCEPTION_CHAINING: + def _make_context_fixer(frame_exc): + def _fix_exception_context(new_exc, old_exc): + # Context may not be correct, so find the end of the chain + while 1: + exc_context = new_exc.__context__ + if exc_context is old_exc: + # Context is already set correctly (see issue 20317) + return + if exc_context is None or exc_context is frame_exc: + break + new_exc = exc_context + # Change the end of the chain to point to the exception + # we expect it to reference + new_exc.__context__ = old_exc + return _fix_exception_context + + def _reraise_with_existing_context(exc_details): + try: + # bare "raise exc_details[1]" replaces our carefully + # set-up context + fixed_ctx = exc_details[1].__context__ + raise exc_details[1] + except BaseException: + exc_details[1].__context__ = fixed_ctx + raise +else: + # No exception context in Python 2 + def _make_context_fixer(frame_exc): + return lambda new_exc, old_exc: None + + # Use 3 argument raise in Python 2, + # but use exec to avoid SyntaxError in Python 3 + def _reraise_with_existing_context(exc_details): + exc_type, exc_value, exc_tb = exc_details + exec("raise exc_type, exc_value, exc_tb") + +# Handle old-style classes if they exist +try: + from types import InstanceType +except ImportError: + # Python 3 doesn't have old-style classes + _get_type = type +else: + # Need to handle old-style context managers on Python 2 + def _get_type(obj): + obj_type = type(obj) + if obj_type is InstanceType: + return obj.__class__ # Old-style class + return obj_type # New-style class + + +# Inspired by discussions on http://bugs.python.org/issue13585 +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = _get_type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with statement + _cm_type = _get_type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + received_exc = exc_details[0] is not None + + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + _fix_exception_context = _make_context_fixer(frame_exc) + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + pending_raise = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + pending_raise = False + exc_details = (None, None, None) + except: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + pending_raise = True + exc_details = new_exc_details + if pending_raise: + _reraise_with_existing_context(exc_details) + return received_exc and suppressed_exc + + +# Preserve backwards compatibility +class ContextStack(ExitStack): + """Backwards compatibility alias for ExitStack""" + + def __init__(self): + warnings.warn("ContextStack has been renamed to ExitStack", + DeprecationWarning) + super(ContextStack, self).__init__() + + def register_exit(self, callback): + return self.push(callback) + + def register(self, callback, *args, **kwds): + return self.callback(callback, *args, **kwds) + + def preserve(self): + return self.pop_all() + + +class nullcontext(AbstractContextManager): + """Context manager that does no additional processing. + Used as a stand-in for a normal context manager, when a particular + block of code is only sometimes used with a normal context manager: + cm = optional_cm if condition else nullcontext() + with cm: + # Perform operation, using optional_cm if condition is True + """ + + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, *excinfo): + pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/__init__.py index a786b4d3..63d916e3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/__init__.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 Vinay Sajip. +# Copyright (C) 2012-2019 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import logging -__version__ = '0.2.8' +__version__ = '0.3.1' class DistlibException(Exception): pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py index 159e49ee..10ed3625 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -14,7 +14,10 @@ import sys import stat from os.path import abspath import fnmatch -import collections +try: + from collections.abc import Callable +except ImportError: + from collections import Callable import errno from . import tarfile @@ -528,7 +531,7 @@ def register_archive_format(name, function, extra_args=None, description=''): """ if extra_args is None: extra_args = [] - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') @@ -621,7 +624,7 @@ def _check_unpack_options(extensions, function, extra_args): raise RegistryError(msg % (extension, existing_extensions[extension])) - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The registered function must be a callable') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py index 1df3aba1..b470a373 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -119,11 +119,9 @@ def _expand_globals(config): #_expand_globals(_SCHEMES) - # FIXME don't rely on sys.version here, its format is an implementation detail - # of CPython, use sys.version_info or sys.hexversion -_PY_VERSION = sys.version.split()[0] -_PY_VERSION_SHORT = sys.version[:3] -_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] _PREFIX = os.path.normpath(sys.prefix) _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _CONFIG_VARS = None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/compat.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/compat.py index ff328c8e..c316fd97 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/compat.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/compat.py @@ -319,7 +319,7 @@ except ImportError: # pragma: no cover try: callable = callable except NameError: # pragma: no cover - from collections import Callable + from collections.abc import Callable def callable(obj): return isinstance(obj, Callable) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/database.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/database.py index b13cdac9..0a90c300 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/database.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/database.py @@ -550,7 +550,7 @@ class InstalledDistribution(BaseInstalledDistribution): r = finder.find(WHEEL_METADATA_FILENAME) # Temporary - for legacy support if r is None: - r = finder.find('METADATA') + r = finder.find(LEGACY_METADATA_FILENAME) if r is None: raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) @@ -567,7 +567,7 @@ class InstalledDistribution(BaseInstalledDistribution): p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: - data = f.read() + data = f.read().decode('utf-8') self.modules = data.splitlines() def __repr__(self): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/index.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/index.py index 2406be21..7a87cdcf 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/index.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/index.py @@ -22,7 +22,7 @@ from .util import cached_property, zip_dir, ServerProxy logger = logging.getLogger(__name__) -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' DEFAULT_REALM = 'pypi' class PackageIndex(object): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/locators.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/locators.py index 5c655c3e..12a1d063 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/locators.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/locators.py @@ -36,7 +36,7 @@ logger = logging.getLogger(__name__) HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' def get_all_distribution_names(url=None): """ @@ -197,7 +197,7 @@ class Locator(object): is_downloadable = basename.endswith(self.downloadable_extensions) if is_wheel: compatible = is_compatible(Wheel(basename), self.wheel_tags) - return (t.scheme == 'https', 'pypi.python.org' in t.netloc, + return (t.scheme == 'https', 'pypi.org' in t.netloc, is_downloadable, is_wheel, compatible, basename) def prefer_url(self, url1, url2): @@ -304,18 +304,25 @@ class Locator(object): def _get_digest(self, info): """ - Get a digest from a dictionary by looking at keys of the form - 'algo_digest'. + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """ result = None - for algo in ('sha256', 'md5'): - key = '%s_digest' % algo - if key in info: - result = (algo, info[key]) - break + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break return result def _update_version_data(self, result, info): @@ -1049,7 +1056,7 @@ class AggregatingLocator(Locator): # versions which don't conform to PEP 426 / PEP 440. default_locator = AggregatingLocator( JSONLocator(), - SimpleScrapingLocator('https://pypi.python.org/simple/', + SimpleScrapingLocator('https://pypi.org/simple/', timeout=3.0), scheme='legacy') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/metadata.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/metadata.py index 77eed7f9..6d5e2360 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/metadata.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/metadata.py @@ -5,7 +5,7 @@ # """Implementation of the Metadata for Python packages PEPs. -Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). """ from __future__ import unicode_literals @@ -91,9 +91,11 @@ _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') -# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include -# it in the tuple literal below to allow it (for now) -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires') +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides') _566_MARKERS = ('Description-Content-Type',) @@ -192,38 +194,12 @@ def _best_version(fields): return '2.0' +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 _ATTR2FIELD = { - 'metadata_version': 'Metadata-Version', - 'name': 'Name', - 'version': 'Version', - 'platform': 'Platform', - 'supported_platform': 'Supported-Platform', - 'summary': 'Summary', - 'description': 'Description', - 'keywords': 'Keywords', - 'home_page': 'Home-page', - 'author': 'Author', - 'author_email': 'Author-email', - 'maintainer': 'Maintainer', - 'maintainer_email': 'Maintainer-email', - 'license': 'License', - 'classifier': 'Classifier', - 'download_url': 'Download-URL', - 'obsoletes_dist': 'Obsoletes-Dist', - 'provides_dist': 'Provides-Dist', - 'requires_dist': 'Requires-Dist', - 'setup_requires_dist': 'Setup-Requires-Dist', - 'requires_python': 'Requires-Python', - 'requires_external': 'Requires-External', - 'requires': 'Requires', - 'provides': 'Provides', - 'obsoletes': 'Obsoletes', - 'project_url': 'Project-URL', - 'private_version': 'Private-Version', - 'obsoleted_by': 'Obsoleted-By', - 'extension': 'Extension', - 'provides_extra': 'Provides-Extra', + name.lower().replace("-", "_"): name for name in _ALL_FIELDS } +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') _VERSIONS_FIELDS = ('Requires-Python',) @@ -260,7 +236,7 @@ def _get_name_and_version(name, version, for_filename=False): class LegacyMetadata(object): """The legacy metadata of a release. - Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can instantiate the class with one of these arguments (or none): - *path*, the path to a metadata file - *fileobj* give a file-like object with metadata as content @@ -379,6 +355,11 @@ class LegacyMetadata(object): value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] # logger.debug('Attempting to set metadata for %s', self) # self.set_metadata_version() @@ -565,57 +546,21 @@ class LegacyMetadata(object): Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. """ self.set_metadata_version() - mapping_1_0 = ( - ('metadata_version', 'Metadata-Version'), - ('name', 'Name'), - ('version', 'Version'), - ('summary', 'Summary'), - ('home_page', 'Home-page'), - ('author', 'Author'), - ('author_email', 'Author-email'), - ('license', 'License'), - ('description', 'Description'), - ('keywords', 'Keywords'), - ('platform', 'Platform'), - ('classifiers', 'Classifier'), - ('download_url', 'Download-URL'), - ) + fields = _version2fieldlist(self['Metadata-Version']) data = {} - for key, field_name in mapping_1_0: + + for field_name in fields: if not skip_missing or field_name in self._fields: - data[key] = self[field_name] - - if self['Metadata-Version'] == '1.2': - mapping_1_2 = ( - ('requires_dist', 'Requires-Dist'), - ('requires_python', 'Requires-Python'), - ('requires_external', 'Requires-External'), - ('provides_dist', 'Provides-Dist'), - ('obsoletes_dist', 'Obsoletes-Dist'), - ('project_url', 'Project-URL'), - ('maintainer', 'Maintainer'), - ('maintainer_email', 'Maintainer-email'), - ) - for key, field_name in mapping_1_2: - if not skip_missing or field_name in self._fields: - if key != 'project_url': - data[key] = self[field_name] - else: - data[key] = [','.join(u) for u in self[field_name]] - - elif self['Metadata-Version'] == '1.1': - mapping_1_1 = ( - ('provides', 'Provides'), - ('requires', 'Requires'), - ('obsoletes', 'Obsoletes'), - ) - for key, field_name in mapping_1_1: - if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] return data @@ -1001,10 +946,14 @@ class Metadata(object): LEGACY_MAPPING = { 'name': 'Name', 'version': 'Version', - 'license': 'License', + ('extensions', 'python.details', 'license'): 'License', 'summary': 'Summary', 'description': 'Description', - 'classifiers': 'Classifier', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', } def _to_legacy(self): @@ -1032,16 +981,29 @@ class Metadata(object): assert self._data and not self._legacy result = LegacyMetadata() nmd = self._data + # import pdb; pdb.set_trace() for nk, ok in self.LEGACY_MAPPING.items(): - if nk in nmd: - result[ok] = nmd[nk] + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d r1 = process_entries(self.run_requires + self.meta_requires) r2 = process_entries(self.build_requires + self.dev_requires) if self.extras: result['Provides-Extra'] = sorted(self.extras) result['Requires-Dist'] = sorted(r1) result['Setup-Requires-Dist'] = sorted(r2) - # TODO: other fields such as contacts + # TODO: any other fields wanted return result def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/scripts.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/scripts.py index f598a413..03f8f21e 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/scripts.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/scripts.py @@ -39,31 +39,16 @@ _DEFAULT_MANIFEST = ''' # check if Python is called on the first line with this expression FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s if __name__ == '__main__': - import sys, re - - def _resolve(module, func): - __import__(module) - mod = sys.modules[module] - parts = func.split('.') - result = getattr(mod, parts.pop(0)) - for p in parts: - result = getattr(result, p) - return result - - try: - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - - func = _resolve('%(module)s', '%(func)s') - rc = func() # None interpreted as 0 - except Exception as e: # only supporting Python >= 2.6 - sys.stderr.write('%%s\n' %% e) - rc = 1 - sys.exit(rc) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) ''' -def _enquote_executable(executable): +def enquote_executable(executable): if ' ' in executable: # make sure we quote only the executable in case of env # for example /usr/bin/env "/dir with spaces/bin/jython" @@ -78,6 +63,8 @@ def _enquote_executable(executable): executable = '"%s"' % executable return executable +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable class ScriptMaker(object): """ @@ -103,6 +90,7 @@ class ScriptMaker(object): self._is_nt = os.name == 'nt' or ( os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): if options.get('gui', False) and self._is_nt: # pragma: no cover @@ -173,7 +161,7 @@ class ScriptMaker(object): executable = self.executable enquote = False # assume this will be taken care of elif not sysconfig.is_python_build(): - executable = os.path.join(os.path.basename(get_executable())) + executable = get_executable() elif in_venv(): # pragma: no cover executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) @@ -187,12 +175,20 @@ class ScriptMaker(object): if sys.platform.startswith('java'): # pragma: no cover executable = self._fix_jython_executable(executable) - # Normalise case for Windows - executable = os.path.normcase(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + # If the user didn't specify an executable, it may be necessary to # cater for executable paths with spaces (not uncommon on Windows) if enquote: - executable = _enquote_executable(executable) + executable = enquote_executable(executable) # Issue #51: don't use fsencode, since we later try to # check that the shebang is decodable using utf-8. executable = executable.encode('utf-8') @@ -225,6 +221,7 @@ class ScriptMaker(object): def _get_script_text(self, entry): return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], func=entry.suffix) manifest = _DEFAULT_MANIFEST @@ -299,9 +296,10 @@ class ScriptMaker(object): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, sys.version[0])) + scriptnames.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - scriptnames.add('%s-%s' % (name, sys.version[:3])) + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: @@ -381,8 +379,12 @@ class ScriptMaker(object): # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] - result = finder(distlib_package).find(name).bytes - return result + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes # Public API follows diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/t32.exe b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/t32.exe index a09d926872d84ae22a617dfe9ebb560d420b37de..8932a18e4596952373a38c60b81b7116d4ef9ee8 100644 GIT binary patch delta 28766 zcmd?Sdtg(=_AkDZz5=AAKnsPIw$K*JJ8hCSY1%XhEd^T)6k4i01!{~ahzX*Hl5)~2 zaZ?4y2R={`P*jcwdHVnhMG6Xn$Wav3dgS2LaI^?Q)yn>T)+8-@&i&r|`{Vw1i)+uC zHEXYVtywd(W_GMO9Z_>7Vs)OXbxr;l@2WSCEje=XrSuNC?S<U*qlkY}+9kb3gn8*r zBGjb6iSWRd%=F_3kDT0?{*<6=L;U7YSRlg5cg}T^OzA)V9EFlpqKc5bgMXinq*RgI zEU8u9R9z+MQ$*Jbk`$JOFb2@dhN_agoK#6txFqGYv&pJK-Sx35DUbMOtE5eiH3NES z8&uNDE_nHpBqgpHkkH$qQ%QE>==f`uq-m-(1NyO6l_tC2X^=@cWeBl<{7Zq<k&EZe zanF&Y?!%FSH>Ht)FW5+Ra@Q0Tgx=JsScN*N3ko1jMl_x+SNH3Fp*v6%MR2%wysW-e zwMMUbJUr5?k)#c^14m1S_x*+KzG{u+DXcGyaVQ>beO>!CMR6)&lH{baIQSijlH@xS z8%%IG^SX|fxJw6GgWbXRb)!VBOQpm~t#(N+Z8sd}ccbl&Hcpi<uDBQu5&xYNO9<~X z<E8QrxxXoj!?~2?N*!_!#{DnkhIPo5OEnb+rM{@$q1zwPFmklyn8a708BmhC26w_+ z4I|lO;i>lW3g=o#tuJghD9+hdu=%Uo{nf4fP6#p_wKnOVIN)3lj!8UOQ52WDLh-co zO9_%h;gxuHDLmn3(Ap?^C`fjOqdi@!GJYaflFH;(Zo!+rLn+oKcOSWRSYudWn=9g( z%KGZlmpA(hx%^XL`F?ajcsZXQ&E`d<c;^J=l%vT!SNJ`Uf)H{(y{;&7tD(VqP4{Gj z^Ieq6@44vUQ;A%7aaiFMR|I5Ng87*n!nqSQ@f{j2m!1zRZFWgRO1a{}f9<}nBz~=* zsK|R%;tDFV3YmFP?;{Bf`<s!$wKL@o{`2pOQYClW+YW;7P+FB$<B9OkXbnt}3wcC4 za(_T=XmCl;jduQWP@)jFtCu9nZ76S3LW2;r6@nn3er&Q5=w2nqyhReLtUcUO#n#6A zeh7s53tJVBx1A^V$D4(%m9u6FCGlO<mb*qO9*yCu-_yqb1>LNyL^t*?cRKS&2eq&q z$>mC%1}cUWcTwQp3a8YKy7r0#-Rt0KqE)ZRqN|<}st{|v?ipWS*yeaI2AtHjfs}IR zot-jKqZ=|kvBX(X&S#>Hphniip3{La!_^|Hb|i*!<t+Jdu)O7Rs}dKBlK2KmF11VL zlFCVxLy0S<5ztz9)uUD7?5!+0Dt=nwXew|QYQMNdBpVv+GiYS^&bO_&c&>989`e_w zok2`4)RdJ&wJR$8W786qI5SB+Q$IE>#^G9oV5!5^rA&#_x3b5g;-cbFF9+|%UXSYU z1*6j};sFdN*VvimU^iAcc-9Q6P^{mbmLZ$+z~;KDeCj~rJbnee+Z3KQO}w@fuT@sM zehwx&`DejIw@7?Cn0O_axPZSMOk6DzA4FmtUR62&(aSD1jZK>_LF1ITv8`;nCV@(3 zZSv$(lh~z-x5%hWs(&Jl!!}Kx<Ul{7x0JXvB-YoaHG}F<+-dR7ZIE5TU;LGnDuyWX z*tE2=jZU<v5|;vQ(9l99z+6fYk0%+_R>5cnqR#NaT56<&kW*O+dQ8wz9YY^XLQ~86 z#h2{TI~m&1><3MndR-3|+hy2nBi<?FbI>7%qje3i5@k*YNRxOr#MJ*KUbiR8cPPFJ zJ=@Eraq!VSLMj`9?hRnLlsTUexkic7Ykf^>yg}^UiM?kJ_F|Vl6MjLxNzpwT-(hu? zCDQYqZcikoH-{438bd>sKN6`Gd{n!llwCXHJO;T5ic6Zd;yY4gm<k7f?D4LByizn( z^WI++CHT@IE#C7rs*$%#=PX8Q8NVw^=r+ocT;=;q(m;1Sczw2kWn^)jLVz9up#CTc z6?YI7$JS9C;`T=dL>-275W688aI?R3?d{b={taV~)>Y!d(FavN^FVjFD<u%&8t7{X zubkxIIZy&ocWqPM={7})MP~=n>KCa4Jt)h!F%1YC5R#6vc)s)RBF>;S+fUZK+yzY- zE;7ic__Tp;jeplUFa@HU>L|P&R3BH%Pg2Biw0>PP!YaS^ro+KmgMW#*e8$jFcb1x9 zuV{k)pjFoU$Z8s{R>~h*&#=3?CFh+Ki5D<AR#sNj*Ij|Na~zJ8hFvZ%b7`C{zmpOh zQ5dq}hO^eoB(Pn)v;7)7+%3)UH_XPOQo;UC@_Z*+Kh>Gh9Ub`uI?^SCyVSFSZM>{I z>#yzWoz{_@Ba(w@B^|=LhFzo1vNn2necc%p-BIpeM<FkR%8-GsQ>ZAbAjQG2TqXUm z2u9~#5=;v86{s%VZDkcgPxyD}92j!6`4Y^UURc?>ix}c!=5z4s;LIGP!K#&D{8ah8 zNdrB@Trs!Rk4te4ba1U2whRqXcNH06h>Lbuifmwsgcj9@&vJxB)YqMd(m|2~lIr(S zL~g2gsH<d+uR&eqtvl-ZdDy{^+vHYRl)JtdNimdEcM2&7K5p~ZouqygUH;#x@)23+ zm=GB+-a8wV7<MGAp5An+>)n}}az2(y-Kh;%M2j7|`DaY<LiH4(TgonK-3n)h4jNcq z#xpTF_zvmI_#hPgSj%f@b!#-Otvu?>jV(j^V6~ui#>8N8V^3l$qI*Ywg6XeG>J3B3 zm-J_wq6dtB3e&2;Py;I&&7Tpe@kk9E#cHL>V)dl(+Rj&8*Xs*g9Zr(ZcO;A99N2*A zCk7>Xst@blJ$}enU~a^O#OFjn;TmNHrmkG}XfQAJ;)9Xs>OIr1=N~4rTe_#IuO%{1 z_mN(dQBP%zMY^E@+y+G~L{O$U{u)+*p%RzUV(^F(xEWehtwrZz-Sf2{6Dc>-kVc8i zdCwGTMp{o|($$u8)c~sAd6e)8r1%acHWkvkLg8pR%y{Sd`9yE1$aj#CV3BE6>K=i5 zV3eCuvdHSs#_)nx#+UQHC<aZ#G^IHBQB3}o{>Yg#f^Sap&2N{=X<2~msUhybx>;_e z#o<*?yrr<+-HV^WD==_f`3WF$DT~u3PFQM2Ig2!M@dAH$x!CES7%q3WMmvX%mfSt% z=#(Lt%8M6yqWIu!@I}f+;ns+Z3?%lJ@lt2HhCeit{Y6(4-5-)et=7<weWY95b40Er z>Grfz4`M!W@cy}Ma*Qp*fyznco(IsRg)zF?71S!CyZUegG$SPA)4ptP%nC2Hgyf0m zt8D1VB>rh0*69k~;1?`tgvz?J?r^ygD;F%zZ#3LIvP*4sif52ZJJS-e(~MF_m+{q* zK@-rcfhb<u7nRcUA`!?<vt|Zj%lJZK;8~zGNm^uLXz<=Pqj9g4GFoapcI?=OhH|pP zu`?kfYOjRR6Nt1Pu8xuqZ=@7gIOZ}tm1^UrB3cJ|l3d~CeiNnfravV_xcW_-*=RWR z7Ksi-Y#@HWR_!~G;yW0=;+Gnz`?Bv{*Jm1foO&w;EEV#>18os<cwJ2)E9{wC1=6yQ zW*Hi0HuAY+B&jBU88wqTf{#L=m`EIt!N~l>a1gbywk{sH;`(fezrUaS(Xb;2eu?lM zh?&GUjFqGn6SYzeN+0OzJDh^Wr?ZuP)U(etH<IK+?TX@HDs-+;>uX4X{c@s}Ln?@M zoT!TRz~bb5AwGV^Y^~x|lwk)@a9=}=gHJ|5D`zwY-(Aqk@_VK9N&Pd8<e{C9u|>TO z+is^NOoxh3`r|w+UoxDnuwM4W@@kls;P4&*tYx?gwWq<EP{`b|lZ|Vs>_@P4@t3w) zKlJqBWQLlgq3BxvGwix7wY&k`QmVPHhcMIp>p1pnY<ku@dhw98(Vf^N9Xp18?&07q zv?c_pOOy2Fml)R#8o9JhK2vuVR%~S)n-teK`3kCo+%V93l!<ce4sI9a)_|5f;+Q`! zsrOLKYDYsu{$dO)S$%0MFK7GWCaa%~Vm*56qo2Z(JTwR_v20@R!KzZWr1wJg^b+=6 z?_0ZkaWmS{!M~iqCiLm2=3`h@AA6S)%2>;X_F{kUlM&@0)8^>KzUq^n3T{d{-m_Ea zBA|u_KDC!HLh*b)8x)_d+Q4SSrw?63Z*C@rv;G<9VHvA6*5AnNHDU~!Z^$^?gKdpZ z=-G%lpwd61{pvWa-LtTQkDb6ik5AHI%^HY!Yw0ki#1HU_$lj$eD_8AlHSSpcGM0~_ z`i%$bD?P7RW}Nq=R`68n6WEIw2bxb?!MmgIu%y_gY!<Wde_7I>@xHMAOQ|3sbGTQ5 zfvB<+YCPeFhNJAozP<X6z|6lVd~~2cSxJ|wzEFdw61S+8eb_fOx&@27P=Qv(u;2P7 zMmOA`%#C4b{oXJ{+@NfsP5Tk0Wj?g6981+=Ds(0Ga+X=))nWcw;n;hY^KYlJh=hDq zAsdsBqn@E-%Mx-E(G-~X<gf!5BCn3shu63tz=jGM+s1oOWv3Dh>Tjm7sKkk*R#PvW z!jgwfqz26oTN1|?fDkIC4k%4a8D#FGSZ!ij_oE<*rYOO(qmnHwNztipys#-+lGqC+ z@wzX7goIQR?fsHj1FB(ED=U3%ihH0^dVXn?T-dxaKG37Su-Q?@XCsjnmX6RR-FWi= zj3Zl7I&$(iK!Va*WExZ-Bd&N76;Hh4iM9T=WT<Q&Q&#Rwudz#f4dy@$4qqF#seB6< zu}t>N;4c94o3Z_luOLRIn%LjFIFve=J1I3jkW|<3K!3y0Dt~u+J%&#Lxk(yGSzhTT znqt;WSZXo%BXergSPZSueE4%P=TJJW4df#279Zqv5prM@L~GvRYyQz!9iw7jB}T=Z zz;IW=)Xk5PD$DsTCiZipzDq0FD+k{>iH+>vd-OKyrZ18IpStOO5JD~7FIcuff$WK? zyXx-hyRLFa_^zuxUr@iPF0;D+L%ift5*=s1+^NI9{2G{Wsa!TKfMF*+4%}aX*?gqM zG!F##9sHI#lC+VJL;>!cO;h^=UHIp-2=Uw;d<prD7HbMC<@FNMD&@97hJ!y%sI_TH zKRL3>m$#ISNa~_~e<m|0rRq;og$~2T;)>!t8*JpFXb)O}>tqEE!{$z+bg`uRnaN4| zoL^^3QvD*8OI^+*e-OJVe-u_zO)$5=VZ{@iNqH+!nFezpp$uLatHgdvO3#}DS&bF+ z$Gd?6*_N7+ZLlM$qflvglw7UFTpq_)5sS!K4~*S1AuGCsNCPYV8u?P-T{bFt#CTs8 zwObocLj#rbVzi=zPeBu5SB+MR@SRWsJ>{ZA*ywH!{_;f>xGGVq^i^pkS_RB##LBRh znjx8OPBwQL2KkNUJinD)NKTvhNb4VZ=pq_ljVaeQLQFdR;b9$1lczIK2TWbASZ7QR zi25sStgl*3>(Qs4Zsl-=VR~KK%BH21cr&o{MW<<8iF^juh?V?!dWaCiO9Wx+d<+42 zWt8EfpEa%ozGf&gP~zOJc4_1DG}k{n587~^wBa9fLtGku7jf~4Xa`hW<Lbh1C4x)E zGpT&m!)`3eyyJ+rfs|^a6Ay=CnvOs*O~aF?wgmgLu-W1{?}>Eq6GJfKuHq3G2`8Pb zk-PXy&%qhFo^%S`HlNFd=lq4uzUuQ5@$Vx3f8Y^X0>iTK`><<n<i}9rmM5=cg&+p) zii;Uxs6rdoFaGE&^o(nzXa9@zG|{@opMM_Ds#S3@dJ3Y$MbN)l5F^fsApML8Mw~|Q z{9_Rsu*ikd#&$&I!tjj};;{W2wM4IL#sbOP6j6*s75;6Y)Q#<E#HNT=JiCNT4NVbO zfT>^~#QMWucLwilSc&zMzwR7RC2l44fgK{yu1}&)m9RZ=wIDj*#r95#s{!iECk6?f ztm5|)kCS8zr&=62%9erW#*$z|ymiN}f$ylI4nY8a!^Q4T8&-4-csaj2*dNdrI10M1 z_>jEkfzjBGpApFu|Ck&@$!aMu&=uvpD3V9DFrGF}efmz8nchn^k`<;87^NruoQ9@1 zoHZ!^>hmb!eWOr9%;_}to2_E?s;mEinqX5Adoq2Hci~7dz)e*9M34w|F>Wj8tG|c( zR*I#p9KP+UUF&me9p(HxK`sick=r0YFxO?~y-DZfXtC%Ge-lfpiO%(yQ#)P_t`JwD zQR*55&C?4R@ZbLh`H1&H<0ozlBp{O@C#^$4XOjjQZ}VYM-YAQD_%JR*mITYwjj>SZ z&<qe%wm+;W<r$g^-*1XLs*LBsR8;yllWs_^671E1z?;cExvY86n-=PB7xosxK86PZ z$>>4qW%No6%#=1HXq>RPrR}o}$8Y27|H(cW+&=+aq3)g;nyXAyEG6!VR;JF#Qw?MV z83{Sit{IIHW1(HH?v>=cPCbCd=8Q&8n^EKvxwkD98xqhOd2R=(J(aD=7&Vwy&-in$ zw%Db43T!E?E<A+(1!*z$fMLz>Qla9kIin1@8HWt$sr?2+aH1A_y08-lt(mtsvpa^| z+tc41B$o?vE`OkzeLp010L@D76z5tpI-kHW`3{A{lIrBC(Cx`e+}a{Ga%ewD9zQg_ z=jE^eEaKa*S<TR7B(5Kt-hG?3vw&2dPllH0UaFF$w`i=%V>Enc6&pEhq&nhjHgniK zuY#<|&15)gIAu_j3)teLA}(pNvpmlZKjr>cBuT6QrpgP_G?OR6@fpi^iD_Ru<Df?B zbPAO77mPF`oP<E(oLa^QehVYlkYa5W)2xp76mdJ^v$3mGBAQ2zPL-||X|*VtXJT1J zeO_HWI?Y!gy+f{CPf8hoJr&c@^A%H!De2fpRcRHP2dm=AWA3ZcNR=Hsbqor-JJPol zJN<sxtZVor6odV^wV!vz#iijPBO}hJgkxOxXtB|et7$1yU%v$l5?CF2?rWy!x_8C1 zO*~JE=N0ihBc6N3^E95^hMwTpv7hqSJ&(9$9ZB&F<pGeyQpGFrM}eEykyqT^Rj%8H z&^LS0@3hXn(p8eIm))myd%7D=`PXqI`PY$$%)jmsJ?qcoS!MlDSNkS7UFzGyF8lTo zm*n5ofIzqE8PMxTH&^-Errod8?Kz;^Uy~uB37d7~J=X27^&Tun+!b}8E&Nu0-Fm!a zJ*ay)1+OV-b$!R%(fPi5svK&t7J<J`)UHm{x=z%$u1(OXzAfK%A;-f3A5U>l@bNHr zwvTJv>BAb`afs>ajv(1rPyCvecLnaHNO;S(&IDD<Cg3XsQw>zI<w-=ltRuFTM}SuO zei`Z+?rRDU_J!<i_wS>^!1?HRbxH}pwvLD`(?F3$Vee|_s@rq4eyrvsoOqO!Pt2RW zR5z)Gd}~7Tpg&8dRFd37NEYqT5+P&~{zsV<Z}}eU4XS?Icz1=b&FL=kwJmUC;eEfZ z?gWy8wUU6fG~fY`_yVt|PuS%U9XcKCx0a1a4YpHE%hR2BN{b8}PFP=Cwg-;&<!YlR z-gh`W^g3EA^!hw{rOgoJ$qMpprWRDZ=s}?xEq1{(&#SA=5!ju&+K~cTsH;sEh(}kO zD3GPP+Fk;grmO8r$Y#<Lv@0p|r21W?aZPon@gy6<m%%c(e1o-yXo-2c$3|gl(A75J z>1!*~J(d72_i`T}?T#7N=yg{@J2b-@b#-4yfZ;$&lh8Ag6x8ovJJ@~usD-8ab?pd* z3&_8&6{ughjCJ*oLwr*m@kEl=TtxJm&`do_49x>EC@H1oUEm#Q%=xC)yN+JN?BR)c zhuR1#a8<mT@&_HGymYnKU@I`yblcF3jjg|qh`!A<A|-hjMd03lGzRr}B(fIMbR@8# z`Oxidz+proEkWqSQB_o=h;mzKIL}a)ooqP^$v%!2{uw1lbBH;9*g;+GRTxuhp0~k3 z$3acuI4tZa-!5wPkX5SN1l)bo%DN*6>+81Q>5%KTuu0pKyZ`leNEhpHoqyL}wtRb9 z6iLFH?A7fVUXnsB-f{^G65nQuN-f_3|3f_;e9;^fzvU#cv@Oy-)`mB9was|?+UDz4 z?HBYrbdR>s@od|j?t6W0_qa2BZ3}gEFCi^h<eH{BGFs>p$vr2exN0w!wNvGs&Y{PD zm`(Gou68aOFjQ5ywo0xeO{0?`O%U}*HVJbiihhL}P?!ySFb~);Nfv}_2c#M*>nmWC zqvVQkUz4Wg6GS|{t9<*;|2A4t?(obAY7fZ@OtJ_~S{~^a;R>O98<Da+N8Uxsvf}Uz zFqZRsunMU3*BxT2ow43Cfv!Or#EE=NPhi=Bto(JW@rpdI-Cy?vkWkksyVfG2+b`Nf z5mJL4=}>l&M%9bA%CIkp$4XfFR5>Z{qcIG(_KNRN9m(B4G$wGQOug+@;Jh)Vg_tWV zMb)Tg@zB^Hi{MD<<fyTZ@bn0E0;)46IHJm0BB64@novLapL`eD2GV@|K3(md7~G_t zt7wWRll~4~Y^o!UX@kN1r(T9ZL#r<Ut4lwv%3pU3T(aQxHB3VMBgAorglEe*Frl09 zhOVxZWMd2-NO8wgldu<Nr$*Amd-VX@wCfJl8K&JmAeGeUnaPfdGCq1Xdj5dkzit&s z@)`M8-!GQskzw`kGRN+WxwA0d;3%#!>)lh7dK#p%7`MRzcv#X0^2LZg-#Ma#={GD| zpzF;zS(MySIMRXB@uy+peL4-Ie)8?x%f8s1<E3hl^2lO38n%zphV3J&A1bCOTeP0E zKvK|Xx>ABx^A`RS4t;`iwo>U1uT#8V;n)Y<^1kQkDb@@1qTz6gMe5J<aCi`GWld$J zdsGk&_skh2i);e@T>7AJx$zeM#D9war*EGa-JZK-v684eyL^tkk6Pu1#Xv;awiQ*W z7bc~`aWjvYf#uSrJ#o?Jh*Ma6p7As5wRgBqP=ZCfD+gQ?_xAQuegC_z?_K%Ny88AV z0;lBerR*YvU;6|cn+2^>4lZH#U<{HtIwc+@KS?u>ELLIik(RqqJFbRnAmsJg{G+1O z?A!SpW;v;)2NEel^8c`|3R(_aADT6j_6P5MOitm|ui5R=6E?ea7?4_Emlgq*0U7}x zBF%-{A<#|QVm^;SEe`9SJg||@NaO>-9R;om)g5&wxS9>8n0|N{^&LMl>+rOm!2=~a z7r_Q1%a^~?%<dh2pL)Y!c42t3Dvd>s$cx9mL##cq#I5644LX!1;Y!^4R(9)%QuSAZ z*sCLksr$`hpN>dUAD_Xlj!0E+nZXi9Zc04+5tSZ}69?@kGyaX{c!pLdZ5vXCu(KnR z)%Jh0_L1=xIy}mTL#H3yA6tS&(XF^HrGXq(&PSc3{T-b!`NuhBlR!M@!MdrHm5v%- zc01Dj{nKdaef_#8sVRa`QgtP68-$r>w7U!8k@wA6^m`!Bcl~!y&p)zk1xw(p?|QqZ zC+_0N_HasM$3~@MKhioXTb02E>kV-`MX7YP<&axn$Z?=Y^<SuuQ=^R@(ig_Qor-#u zo`*;MuA_SFkL%yGXRxpJiFm(VUyNsA=G}v+V7j^~WIv85oqc@jL(xjH&PCKp$=|ax znfG`@&Co+Ga^?pc!oj1Cqapk=on><VCX`3{>}t^>wXW`aRP|~E9m}b7bzdUl!s#j8 z;+1s#Cyxo=Xe2clquFQ{p>|^)LPG;wZoamN>X<6IWK0b10u*L|Y!7geQjxDhO{=3H zDTz;9xiV!CrL>5Yd}`Y2m`7H{_Xo0aWyWAiyP$}E>FmB<qQ5#zKJgfpynjXpOEe6+ zMf7VKk0&CZ#5=}EAlx^<8Sa>7{uyYEzWi0Rsr2-8{m#qLWR*=3L_nq}d{C#$;0J{g ztTEiI8p6Ibl&ZJ1vf;*YB4S{#7?aiJk?e?ZVA1^_z-H{Gg%$5RofbLrLpu?HhoO%p z-qXv$%Wp6|b{jD$ySEXy?FQZO0n?k3y*)vZA3BD__iFGM*5u&IQ6ilT#V#SH>~27e zUjT|_jr`DQ#PT!HL+%92TFRO7e6j*6x|@>tJ}_2R)-QK76>4aGxWe0(M%+E*D3q}q z$_SU-T4GAI%xcFe?6~k7^c6(+HN_~U8c$O>Uwo8ZG8L$XvdrxGUaLu^UV(#z$E%^B ztu*kaXQ#!z4H5<>e~NDGke(<e=P>qU_DJt)XjY>Vw->>|aO{uE71JUd(I|26f+8+; zB`Bt`P!)1c5T%o$fue_JHf~q|emXShjm1GpC#Q1K7fmV329ihV-Qn{%gqM8LB8Qf3 z@b#3#X|;_C{rY*_v?%!*Y94ol1bZ}1Mbo!w@Fq6S24$Rod8~i=qQei-%6ZJ;hi-;m zjA<%z(pZ81KLu+Ttk)8!FcfQZ(pAIQ{+tB$ob&8#&HxRam@B3Q!&ta^h?R<ED*i)J zVa;(^P|n*(@!(NCNyVHPu3&N3rJP4Jfoix4BXF#hIn8O_GgRVv@DGUdYF%v`3MCie z<_Nh`_Z|kfTr@lwdo36<24l|$V|l^YqrsRx7^?}!#s*`HgR$aZY+f)nJ)c^{9f4fR z4zn0b-`Gy`sRXS?1s`;n<y%ILLKWSyE0onk5o+P1@I+Pcp_qLtJ1!w4q0Hs{qxV7L z8~(|jvkdV5q50|1ybCvC?QCe6)rfI-8ZF{niT<fjK6NgmaHE|X@Kfg^5fLi#snaPU zqG>;cyAvFeY$8qBNSfj9?aL=doWlF^iAlmDqrsE5fs}1yHYIH!4QYxHvdipn`4XFW zIjE!?L)xUPJ%9?3YO;HCV=IYYHjy%8X)5$JRJ>q6ZwwYMsGl2y#S=pLg>JOAch8mc zNxhv7Q%Oxk?w=_qH$q>&%1utbp)VkUJArg%IM$v)U15pV9QDdWY^v3$Zu*GH)^yeV z>`iM%e8yEs6Z(vo;+*k_Vho&aWtXf2RIRL6-r#<Ze5okqJaq<jMtAWHgF!0i7jI=# z@(d0#iSEA6)d>0HMNo1G>I#pM5=W#;K6wHT8<xVY>zs`T-vC=gTRP8_@(Sl0Pl0kA zGWSkH+WzTyRI-9aab92k6C`)mOm~8lFrE>TtaZ`NP8#Z<8X9nNk>Zqkg5k5TuxQrs zU$BW0!we2FOGGW&TvvMr-LSC+=p%(OFVXb_U$qvq-g?nHD<LPi@lZaM&-iDY#61mX zxMHd;Vhi)l-H=Lj5#OGl)@2!rxT5;}%1i9){P-@bM67xBrN?bU)U{b`m~D`{<x5s( zTROPmx*f|XyRWVH;vcA-a`^j)K<f1CwyT5_js0XB;w`|r6b?02y4omEU15AA5zt1^ z!5XY}knjTvyW)B~p1Rs>Fge>5v}kM?jQ&d0sDeLtiYz-WS?lWdf=gTp+y6F_%FC)Y z9yFpF;zHg#zrpH4a<cOn1mN&@?kU9^e9^5nG&tywPd-aMep)X{b`&db$<T7%gh+jO zAt|YRWY|Zpu#bOn9)g5npO-oK4$NS<J_fP-obQ6TpUQ>v=0SpvGI=&DyTW0(K<n8_ zTjPvLQAy54N+gCVf9zJ4Y>!jtwX$q`uKj)LZY<z6c*%i<)fs4h=kg?apIDxR@k5YB z?dFa{o9&?p)r{JVZL_DxZNp`nP<tyTiU{xZ_x23+`BoM?Iz5UGxlQguW*<ESr{Av8 zWBY?reb;-n2wZko3O|OH7XF7xz6kJrIJ&4Cl9s&`h?M&yo_N!ce$6NuG(KOx=DUzA zH{l8ij?BZ1bp2)4O^NDNt*rW{R7l@&)5zF7szsZRoDOx0+Xidh+{(VaX*hB97&DN{ zqr{zSWw~P#(pK+>?QW}tksEA@*o(q1Nk%(dgoOHsIGWcAB70zrsn7qE_Wqc(q1!59 z+}cnB#WUPesCEBuMD-dw*sHyj*hz~7f6V`eALR+7C1We5P0zm2d1Q<;r6-1>`!m>N zx|D$<nl&gY9nrYLX8Eg|t@(7rsCtTgO`Mik61QO?96Eu~IkI^SmgnMhKTN?|P>EfZ zA^1AaPdKVCD0Is5;M-uP)07{_#;XmVu`Y#qU0%dJ0-So!pUox~&K{7Bn1g>j8?<X@ zDia#Wg-hqAu}(XcGviZsqOkuclt@?a!<Z40>%+HM%<h>ou7KcJs2s;aE1_<cm4;I{ z`KPv1b_qojy<`^XBx#LEYwVb@dO`|LvQ@Y`=MKa9B7gNCw5m~Y+8kCAl1%0gYeifo zzxrMSy49EW^SdzKDo_9V@Spbr^P9yHlcM)E#Vc{avn98-$ag($*(We+vt~B>(!7Eb z_AhWrR?6i5)Gp=w>Fo;H2G#22TOO`*M^`AHRPc$oWCHJgrSGt2#q~48p}yw?=PWrA zdfSCKJz);zlN;$5X5JRsp%rQr<NG95iCcgy#F4WTP1l3hpGF^J%~O5eaMsd4C5tT` zmx|@g^W*xe1D~)x<5C9)OYZ4Y!K?S6&U`nvs=*U>Xhxu$-xk1pfBPP$D(;my2+|r| zUHOnu>)_A?#|q{3lsVHEC#iA6%Tb&bJzU73OA!3TDYl}xUv$I`%9c}XOL3Wcw~GB< zJTlq?dl=f~#-C!|(iAprd`wSsC$;5al9F+X9Vm?(_S+%ML;GNBLbpD~aRu94y7f`X ze+GtI9}aQr<F`ZX?ePiP4@0Tx{J_WToAJG(w}KG5<#Ef$Of_K?V-wQUt*z{>34^`C zgRbCnQhiC9X4nzmH@dI`2}ipfsBF3QPP%m;qDFO4$AT)3f+pc^0d8uhVwdVK#Mzg& za?4+FU*QC{m`xIhSOjWQ=<H(@Y}FRKJDTqlNdu7-__oY&b_*-J*^n^-eISmG+*)oE zI2{{B;}DRavOqUsII*$Kb+I>Y9;qIFEBoqZLp;n;AR=@fDm=v<>r$7MP2&G>vLPh{ z)%z-0X-P(Jy7_?H9&!AE6P@jyc!J$K(X39rlf6E14oTrX;N12#Cs!V5jtq;coF$~P zv|H}&dK(73F9jEHKVmQ5(ys)(i<5aKi2iuq2SKok{B7(rsg4uj-jol+##X@C;&iJ9 zFUV0axQ~?R;r>$L&c_w7;^i-nvk1qWqDO+UMF>L5&<kiFTR!+P_m{@t5@49Wkgm4z zOYpVGXJD#Y;jv2MHt~j*bfoAw+v@12etJGT;&@qorIjt3v{QQp!vSs9ReVmOpKMeA zIF~J&d|zLTFUidx>80EKfo{LQ8Btu4j^^Kfz}hF5sN?3c31vwcM!f7Vyn^?Q9T#O+ zViQ;CE4`>-%Ws9H#`-zt4Ys=M?LX&KKBY{(a}Ilb%J{ytU2sLr#6_1r{L3SqP4%xM zEP85s;z}}bzk!ScW8ZJrJqdg(BGlAtj<AVS`wXF`?#JswMAr=g(Le*I<VLizXkqr~ z)U<e8h%Z&}(EtqWDdYR*vJa-FNB@A$N2sH&HnH}peTUu-wnpS!#v|sD;S-vS5sezh z@z({r7{n8sSV4K3_CZjBw`G!=*uwI@wyp)VpwI@Ipr3R@O2acq)A8j*b&TYG$Kui6 z*PZL>t<Z&<?XKR_VA+FXj5U-G=z{uT8`5%h9{axB!MaWxs9yCm8#yfzPL!?B_G9y> zSqI$o8hOL8Z-JZncN|4r^$g>evD*k~%2V&N1Jlwq5ikptZ{vpGEzkAGJbw3c>3C*U z&K&;dbHM|h$AWNguYx%1fzD&Wsmph~TN$TT57M(+UPxd+-1bk^2=?*xdsL4y`;3R8 zg&(yUuQ>Q!!`UY@7OTG+#>UP3TAltK%b1m^#*zDNvqr1qN3s`Y4OOR&WFO2*R;P^I z5t!vtsjm%X6?5)~yc2UiRX2=duiWlYy~+B|9i{F&j!m39TpcK4%jVvqUiSh!K6g@q z?|I?A`_W^Uez?yrCAs0Y0~`T-0_Y3+_$Br~o<cu06+K9^JZ|~C&1TH&JGTv*Czdg^ z$R{<Vc@=Ys*1_{=PR2;Zod+7G;u<jKxC_Y`qxE!;Fy;5owZaxs(OkUHsiJ|mf+~9Q zE%wIR4i!BVBEE_+q-7;Q|D>Wntc_PCv8?%nqHnpuRq+<-qP7u~pe|NTW#(toSlZwE zq>jhD?JLx%eKw`iqw^IE^~AUe<VKzedL?ZSyeHY3XJey6N8Rj!zfB$R2%3&hz-$^k znilF}MKq)?U+rN({_VbmZCy~`;Aycee6)8pbkXgu)t~G#npM9sK>fbFWBnWb)ano3 zV()Idh<<zZO+)ya0q_Gizqva6crp<8_NBT`-@1dpgMBdjZTs!vd4$Z_@wob<&)LsA zgkEIq6nb&jPN5f1?@S@Rcx&f!Rk++A%+}pi$l;~ko7MR{*!Vru)n9LCuk0xbci{3m zywfS_`?kX+eTTa1cD8J9iu$HC?1jDahQvKaI#NGgn(nhp(^uG~M*%GW(_ih<Xu!jO z<w#?X&QF?qdTVDB+z%8DqIg>_V7Su6g+}Vfx8LBH+R4!fj*9n)+4}X8IR5Ajo=l)E zk0F^3VQ}O4jnzz7HMjeu60!ievFu6Y>aDD%%5-z^dcLl14VcBJvi<np*E>sd8_-~B zYLPCNYT<4E8mnr>Yw^S^iRJY-SoN<ze#fK3=Vf6_iFIqc99aQpmP5`bLtR$p!YNRh z@|nM|4b;&twS&KTioJcu0JT)YzPw{lciW>-nm_=HI@!cWvL4RNguz&6U^an=k#0p| zMv0(7F<+$5aK?MDV7-9UFpQef<rVmrbUpNuc5j~XzQ*u+<8mBzOP==?$ipCcp&qH} zgi5|txE|=nr3!U2G*sS(^&p%^$b>HyT!e}%qPP+r+|B*A1z*3A+bmAPb`{_L9JR75 z&U9Nhlp(moh(=mF4eF*@x6N$CbrmiRpvKk)E_7Z;!OdpR*q|kFJ2qf<Q*mdKcLFFb z6|OLjrX*a|bSIW`6X9?olt3aXB&J9*7_bu9RjwI>zW6WOP<a@}0SCoyT%|sT@(LRQ zt0JyB#yF4kMEx#<-f1c}A>u2<bmj*VS_ol*P>WWnJG)e4#r8~0aMi>Iw%a?|cm&Y@ zv>pRKL$QMH>8PtHips4m<NqMbPxnA|JZ1}vc{WkI3a<uj$*8gHSy!T3mf2p{Ed##7 zY73iuxF?;1){sM`7Ip9N377M&jXN^#8mo#yw@ID|{xOUUROS3gwroMXcSR56T<?xl z(Gj@kdsq0l3Vr~q58Ai+^GPSsEFTg(J`@ZX%DDqC)Y$SX>!+#Q1L@;o25#_1L7585 zu^pe9C^`>qKMtpYVV7&5N=FnSfEV~ll}MK48%(M+>OXafa^!sl8=-&JrRO)Xo9?bu z_ZrXE-TiI)$YK;7xkf({iQ%ObYFDa?lqjH&M2bitns+N^o9~%044I(fx4aYjR&9Sw zE;%Mv_K&sF-Xa`&xq)tnVmF?}`Yjx*KKucjy)aEfS3CW<=E+tq?5CD4u`LS+sWq3_ z$%XyZIUlee7p9ENdWibS(~eJXg$5V42k0|g!by8>qsxlrd!?As(tbQx{-Pv}h9Y<% zlE}P^G*}U=Tr@BnewM&|e4Eq;)s3ftg6qWizC11#xS=)ZaHTLt#OX^RB~E*ZH80AD z#jDpo^_%Fj<;+>g(wDvn&VhT!sLzaJbMC#VcPmD>Ki=0};hz8tu5|8S&)&NCSnuwg zNta&iNP6-`_T1tj>M2?5@Zy_IUs}-zAHvUz4;uz>H@KkB{v=)jtVxQ8!t)ZK_&{W~ zf&iEH^Aj(zqWfI*Il~`k-s>G}&97cSoAK><(lzI{ma1^Fwaw&>5F>W1oc-S;G&p;Q zWWdMH*OC<Rni%L;J%8h~Kx&erQKZ0nUmF|cj_-A^8N)qNK8e-;fnOu!*e78*ZgnSP z3H6|RM9h8UpeBcd^*2vn{?$6R$DOLqKf%tq<6}>e?jfboeGooBNy+B1-|o<Ox~N9{ zXrz>wZU^1Ra2*>7UxAQ%5F{%O+8@1+b_}a8Jnult+2hQ%WRm(`BU`himwLL9y}V?2 z?i<gAS{ffhw9HcBAVj&2ftgU@Qsb4OxMe8v)zu!vn(T1lb#=fdvWfYIY41-`-;vIS z-#-vFxaIx<c;0)z8PC`5Pgj?;vXAdi@+Lw-&{v&H`&XVtB_r{151QbY2w7WCyB6$( z>c^_WYf`Qw%Xr*=tZdov8~C#2k8Uspd(R)<T}CfW{T1IC(;r{;3=BG)7`!K0eMlGg zVd>kzczhYi?pYe8?FMR4&;I@lTd{Pu+VU*>c4?{(Ip4^3r-{WqFpt$du(q$J7e40~ zH!(M$_auk!B7N;n*Bvt-e7&oA18f==;pc8*7akm>_B_D4|HaVV4r=J)CNbXfmnG>$ z^2ErKwDM%FWfFd_U@ux%YAl=dPAe8Pfo=j}-~VNx7kWzHN9g$Cr#kF6c}eRUV7uXJ zgYE9gD>{?-uTR2efA!yOsN0W5^flW<Za&||7!;-BaF`Y#SBtdv3SAw!Ow0KM>?kYW zq3?Ur>9l8LIkyl61#u57=Och)%~~GXSMvTJu?4Rt4{ayw8e(q^;)4A=@ISJjKF)T5 zea9czU-<+3bN|6EA7|HJO;e9Ev9#B+)xS2f>938N_&mlsjsiQ*+BYttg_v*_DasxR zMjK9bj8Xr?gu}M`l#Im}umTX3_8n0fj#?rTVN&s3JWko!zgIrWcE4s)|MPzK@7Lz4 zjje3*>n3$K#{38nYd~;j3A^_CSZ3e)Y-F(YSkGnSDDk#s<9l~)fJ*+y$7SXBv$vO- z$+vN38P-DYvDwve>Sva)`>Sux*Fg1QQ7X9o0bv}j@Ejn}-pcy<=V6UeGsAIy_TFH% z;N?M<P%K*IMY)+$ZG$bgEgwMH&skp3yZiqC&i+;p+r2zV{Y4Y|booHzB5IAowldyz zX-7U6JM(#C(f=Z!FBY@m-m%_(i(xF6Xn0Q%YI#=?#&ER=WBIQrqp~EP|3qOn4~S?A z|5k))q6*5A4E~9T59c3>P|q7hXyosRFp=*Np_#uf!aTkaq3@7g@h-9RwSqR5uNGl3 zUn#;8UL!&WUn;^We31yJai<8U^VuSt&8LZQ9(Rb)$%{p}fZIj5h?_;|=6Vq><ryMe z##0b_sSvwsFgSTnBJZY!6OR(;N<!PwcFOKmgkBQpYC<mv^a(=G3Un=@Ck6UEq3;WH zJ)v(4bQ7U}7w8s3HwpAzTws&<GXifQ^ihExBGf0)BZMv$=rKa?1}e&Yl47$&0-ba4 zDFQu1=y-viBh)6)W<m`DJx^$cKsit^?=SF+MD8ij(}YF|w3X2Ih13F92)!iGHbO55 zw4Kni0+nD{_(_3=6Z*bDHH5wmRFqjuv8^H@hR_WQy(Bx9@TUYho={n!iG<b&G=)%) zK+_0y2{ePySppqS=oEqK2^}v`BcV2dnh7-sG>^~>f!bAQf8Jl<V~N~Tpv8nn0TqQV zq1Z3?P(dAp{vgmPgnlc~X@s5;=yXDl3v@Q22Z3U*29HeV{sEV1D&<IPgc+909p!cK z7r}!=Xnd-jCrA!nAEaOcu!ksr2~zad2xo|Lr=Tn`T1^WCrJPp~<<B|kYUu_Fe5KZi zv7z@egOXyb5yeFD!oQXrB*j`IN<vK0K}x(e!V#kUcDE>GqBUYlh|&_Iq*x=Sg(#l| zDQVV-=^@JdL5kTLu_#2@5v1f{ZWEL#4!$8svRfmThL|1;QpQ>%mW3$QK}xYTq9#PS zOHh`SSWUHpQqC)hB79%WQae{W3@i8G%zt7f+fqAJ{qst8y7n^zrc_)Tt0?z<r}JIV zS+CEW<%wu<>T$8RDGZI}i9OJw_Wd)=_lsunkjtBuv?rzQ?5n!oNwIHY?oX?a*>0Cs zBYXnU)oFM*0PZ(g&xZ!9!D)Rcje<E3C8kT@gzw-Q($53V0Zs#s0S*D)1?+s2z4TCV zEOE*TNsb4k0nFRk^@rv~Kfi+#u6>UDN_Mb?5BKf89*Kxi!s>0nPXpSwvlkvtRzJUk zz4vei1uYNj(}@GQI}l6*%mz3CivUXjH2}7Q*;W?E1~b_T!cM>;z?mKF*_Gw2#y>`z zf%OnSUkiuN)D7%}Kc(mxKGEU7gV5--??hR|JiIXjb8V+{bK`KtHtOkNq=y-g&;X!S zIX(a!%}kFBkE6ukRTVnCE~cE!wQR{FL;H8ZW`MpuI)n4KSn_w%XxmR>Hw?G3eUA*9 z@5bR_$luflXyC2=By9sd0=@TB-W%u<=(?YB-$0M1!t;<qr}}h#0AIY}1BL2l94UVa zs->{m6NSUm^PBOl1rDt+EFSwiE0!%I?C@jZi#a)ZERe8hidss~Rri%k&)s!YD_2bS zkL{sR3eN?;<cHU>H{_A(lsDK{@^IUCf2BP#{j>#Yz0r;m+-QW2V{b(98$ZEw_X@>C zhwyYO0*CI!s|Kj+hOxU=4fHCnLbK^)qIdlmEbSND@F7Y&ZwXP|qYfx)N4;@xs4s*J zd@q|Xu2_6Vzs?h0u6(w!7P3(i@=<o|2VMBd8$MOYq1!^izNGgBGkjJNuJ~y)2R7o^ zjDENP*9D!_ORHBvAGY8EgSeiA^>8~|lh{YI734<#@CH^|pXe29nzS2>P~=>OHped! zlvlu&z+15Q@Xf!75ABN46zC4_sdnx}<KmZ-E<x+@k-n~$CJ8zJG`h>V1xfHd(cSJ} z$)<@5Bo5qZSxX5UNr>_p#cM^ptfZ6R6a+^{eB(4=*sI4l9YK+JY2y^oHqb*Z<-yLd z9&nGdL=&&O037(b(#mE&YRM+K)<W(c0wSagR8IN}F9PYifpVY}T`)zXcrJ=WTx+qi z<Bz6h)XwQ#hS6!!pT3j52NPk?TJ`j@_{WOWiC#AQF^l$_<sJ08<!tj~KK10~Z0PDi zDX@7n8aL4Qto~?v;#0FeU=0n4x2xHr)xEuzdC;2~xDq7%k2vGPN5v>ZC5}r=D*}4- zud0ZCwhWuf(9f5oQPb0fqK=>7u$S|Hy@Xd#!~VYO%}YKk=l`HMUt3MNY0(uy=M;Y! z1Q~7t@`K~olMw5TA3O>ClP~;JLUJ`rSd%^=_YbcJM(h4w8iOCYx@4K5ad)F)ZXd+v ztVvBdyNjkzEqTg2J{8}49xhv+g-evQ*V?qkIBeH4I2FwNIRv5V+kleQkRzce!o_yI ztUi%$B7{9|Fjr7&ip2A7yzup3;A!m-?yF<XIN;OK%^JEl^h~VSV3+e%xRl54dAyIh zVJ&<3@&4-Jm)OgXC#f?ovBQtw9e#o%HHQrfFC%0QO9>xENF5tRPMY0pu=-3Z`x?J~ zh`{{BZ1vVu_UseG)ZSFq^u(7$eEG>U6x2NR5xwAedKAT0JRPgHx3Z_7E}+<%r~6av z+S7v)XJ9uJIsljt)UOW|D7(g@g#6AW%(6Buy1!t6^Go9COPFhIa(bYGRL>RTTq&$9 z(mNf9EuLU=eB}AVS;N5)@fC!jtR?{cPj8_*u?N#UlOA1qgKesZ4ja4&KnXes=k>r= zj<ViDS<eGKlyxtz?tG1UYXi#r2bX0}{;j(+^M`;2zPND~hk<%I=oFBnu=S0^)>q~J zsEkjGL1wT@Hk^iD1zF|JEaPW!d6E3EdMG!HXt$t_l0OsITF=8_ejfL>xF1>Iny_EP z7r2FwlY<bff*gbM5=*@EZ&(9jyKYlF-RY!6Cx5uDfKUD#^FKRKoTg13uoLaf-S>&3 zk>5inY4Z^cDW>K=Ht@N0&HZq=|JO+xd+zo>otf0X3jx716Ahi2#5|v>)_uIg^nAQJ zy!uP1*Q6Kvsu#Y>7QB$6E`60f`oey3me!Y5y*O3<ukLK$i+?&$JMcMktiPv=`E!&C zHgp1;ICCItS-(fU{0a8b262Y+j}4)-wSmLvhZ6t%O-RkYA3^K<B&35OWh4vCO4(8O zQn4z0@)?xs<Bdzi8Qc(d@1|wyi!ZSsH{GUw>?KyRdAzz?7xwh#RqEXvSo+J!>T5T% zqL=3m`EWd`CYCBsAHhxf!?;Ng7}#W&?g2aqXaM|#H1_q&Mcxf+jK~xCQI{r94?gvd z4h4~$<j(>V#5qa(B>XrBdYcaC=%k_vpCSvR1QXW9!N=j9%D{>L$*=c2_UQ6dT=B<f ziFF<!l^u&W$nQAgoPUNF6PWPVB=U<@!Ey63!r%~*8|6=eT71KK(~lnt@x9u~$4h_) zK9=dHeCqRr7Yx^j#L=Lv)9o+G4EKGbavyf^BvkfKzW79zx}|^gxEqpgsbZyDk_PSu zMf@Jct@K84(ILKq4SsyU*VnW9Ew+*66>uw62Hi^cO%nNwj5`XU!{MYaue;(giK}CC z-bzh0(u5G@PmDA)+!pA~r_b-GET1=@t$!<JIF{4*mC5vrLGmTk;g5F_g69X>zC69+ z)(eRj-%9cZCHl0HxUL$A<DX)V3R#%1fWkX`M7d$@L#-Xp4<e0>_A8z2;QN!CtRH_N z#N+Ga;e&t%{t>Kc(BD{0-p6MAmWu|);kX=*kF>F@;ndL^z>FWy5qH<bEw+veY@KUI zT=AKy@tANy=Y`em<F|X$eYKXihm(W3eymvwt#d-_==Z{Ihpa}!S%?Gk?8l&UolC+0 zcHY4gN$-Yk7R5U?u_(>ixu`LIXNP>6=Um-*772m*(vYp3gM|@j%|!E8vnSr^(F<k3 zPubPJ1uKe=9OUqQnC|wzla%x{@fgmI!Ev5%IetYqLENgUFQOb#{44LI=1`%p7NI>7 zaAH8E@Ex`Xd~)>}+0%^T&wP}M@2fs1c`h6V8~pdfj3j>IyD9x#k023;L(LdobLzDq zV@aYCw**(dOTVRa_}xKW*WHfRKzX5uSKmDtWrk91Tu{i~+h<Q`g|j}m@Oc85d^msw zNj&+%dKR*N`!nJmhmRLa0KOTip`U?zb~dZnpQL?|-W*836PGlbt=L~G4n+1R(k%JI z{$zF2TBdu?(t8FKz7jvoSBT#aS>6pR=XgH;Q8x3vM0MUJw(Pz1?sQNXNUJ~!O#cq{ z_xA?%B+`L4^|dp!ojyD)VnG)B?!95LhaW--TTxq2JJwG0^Bhep*q{bmv^<jv*PABX zku%x74Sm)3jb^JF62_4rY<}_dbmiHYJ~j%<Vb4<W`yIHy;Wpq`Jx1{rJCP>NIG@`= z7Wx7Fx{5&D6M5K7_CrI;sJc4TWFs9#;f*i+dQ0$!W_r28xl1GkMZ4eg>t&p_vit+1 z)MH1pdkzfFux-byVT}%+3r0FmvtD=Kiai8vBL;@jD)^&iY~O()L#9Ikpl3Q?=@o^y zPw!~<3jU@YQY@Y}H!MUu-)LvLgCqM;rd$uP=p>q0Ou<UBDF;VHiL{HgY~{hEzJ#}Y zejNvX^v9pN4SbMtxq!WQFiBl%XI~%mBqdyd1t{d=2*W@1A~~BY>9Q4djdGbSZp=!V zg3>{A=OMuFLZh60xEZoSKUp*4Hg>4dpw@oHerX(;9O9h~Uc=dzhp2mw+=`N#BRt*s z>$kF^L+R@9%k1t$iKC{0=h_+inc(7Q@FT#f2mMp8&|QxqyaaCq&vN(_pur_B+j}TU z{mf77%%Krb@yPezw3#bARp9J2Qt4?>o#^S<6Zr8PsiB=;=nk7>4OO$_4qIQ|K8?NF zl%W248av!HHe+!Cir9c3H^2?GDuH+2Q2X@&dH{9C4Yk{UVp&H<smuP!<{mNh#i0A+ z4Yjy3TE9->zqPWz9~r7X`wcsLBu(Q|^K}nGz>ZJfk5GB(o;1d+#?_mz16p}CVm<Z| zYIhI(NG?{_c5H@UnIgIsSQfn3I69|I9sLq_9Iq_ztZw^MaFH?o(@w&H2JNV|FMk_E zU&p~Gxg0l8#K-0~_0!tLip>4bD%g4U`;or3eYlAf`eCA@7E)C@fcnjy4AD4@k1G-7 zK?8)#O#yB25J-*e#g29cjt-8x(}s7YJtUnyZKRAEgV{lov~;`?1>M2%)Rm(*g{^J1 z?-@N;^_|LlC{87v0-Oi@1km<YNlAb#Krvt*pa$?bU<+VB;0r)2AiR%CiUp(r@&FS7 zKjB05X240nZon4Mb^;mzX8^4LH9VcMfT8j5LE3=a3RntQ1=tBV1UL<72K)f{70|OU zUH}*YcEDu7Ouz!bgMeDVTEG^-e!xk<IlvEqc0l)jDrpd4B)|rk0GQs-tCE%gSq*p@ z@Gjs}zy&}%pf_re1~39{0!#+X11trs1Z)Ca2hd*)YUTp00{rLSzGS=fqQfrD-D8)Y z{|sCDHTED2zEqGJzQuws#m)|>`zM8d>0ZKGMk)oY)uC3u;AiF0oyDVeY(K7!_RhX; zr(Z*mGzjVP=eFPN63KBQ#2!G3L5Piqw5{DP^&~pNUPMP2k8u5OxZ{P;fe;%?srgsC zlz?y%LfSZ`AWS0qFLo&zVLZYVgxlKebR}e%)NmD!G!WJ!d;noB!UqvL5w;_ojZi^o zzq(`Q*%MvO(j3VRSO6*WrN#KS5Rf9>DcytT0;G6=&Bc0+QgcB|m+qq)?HF<)M>Q3K zq^V$-Bi#<>`ABv{Knh-1j2MX~+9Dx#F7W&CF9jm+6B*nAd@uwqKzwjWIB-_?-7W0Q zcWwRi{;L49q<MIAv1F5`?>Npg)F}f;ORYNizfg|JWn;@GUv7ALk84Eninb|Jvfq*3 zR<XE%mD*YSt+t?{aDr#wux3DSwkQxEwl=}e?h5o*Kc2`Q2_&lqvDfe%%svPttA~n| zu(w78!{MWXocHM2gp2(m-_Nv5pJy`H#iX#y*};@2O4w_NRyl&v@5|Wt7n5{x_!<ba z3pKfvHr394yx6z<XXQ6&-<C7mzx%0|-bS+2V{RuIy5n>0qQLZcX0Dwr4D?ky<`ai1 zmZ^S7R`2i%_U~5{yE^|xWYsS+upVO96WVPyT%hv+_+thC?i*IRJ6Na>_K5d(HHzTT zL!!hDr$l0T>_5Z{oy9#8Dt1TlKmRnCPEzB5FQD_0(BE_#Sgn$@6K1O8?|IOQFQKXb zUpHj%693;fWbCKbgE2EzMGF_-J;z--|GxX?+%dm`ZNK!zC|PpMaXTl^zkA`5`9*gw zm|wPN{@e<wP*vt$jQ_@0NEm9wF=pZ2x8Hfs{2fpJbXNpBsc!Gu01qquH5BdW+Lo+Q zr9C-Rb=_P1vt1esumkb{W`Gf(2V?+J0Pz4VKmxQ~u}c>L9B>}c3^)fk12_#h2{;Bg z0yqR{0K5y>3D^Qy4_FHz{?%9PUTGze8h{(H05BUc1>gXb0PKL_fD{0J4NdCMz<9)C z02+V<XuE8eS^*qz25<zh7qAJi8c+jR0GJ1u4wwQcxjY}W2a*Rc0)_+90EvKDfELjH z6NCcJ0Zsyl{}95xfGvP00JVTcfa!oKfD(Wi5CdqxWPkd-Fjd^s>(#39qlW}LfjWZz z=&Rm8{{5fhPY(@K4a}l*ypF#us?m}@a<sH9YP3Y(6w@c27~JA-YgiDk>556t#%g{I z-_sDLN>O`pl~mFL=;hbIloG*cX6pPie1qld{}1SR0YCL7{_iV#6fDL1e+0AS4gDGM zLK<H9pMYvgeNGu5-1Bs#>Yr-w?J9|5raljapug66Dk%f09r5@oEJrX?cf=d+!KZE! zH{uy?q^IAApL_=Km>cn>uYn)3JKk@8199Zj5my)geb295R9zk3GoW_WkJjI+ck5r* zAJU)KU(+XN4#^yysmXF?J&^Tbme!DH7-A?h%rY!AY&N`YIBfXZaM7SPCL6PjrN%`@ zkMU{aR^w6Q1>?`gFjKN=h^fd_VY<iUHZ3<jVtU5(f@zy+zp2sGCwpjiWp-osN7+AS zN9X8s{5enO?DyvUnA4V{<iwgsna7wPG{0be!+hC1$g;rVwLEUwXgOrzmhQRF=f0i$ zS?+ha^Q^t{-pJdTcR0_UUz|T9-<j{XWfjaRxT9c6!LEY+1!oGrE4WgC8l_0m6a<n! zM&Cz2NS~!I(9hL-^gjJs{Y&~I`rq_jGkay`WL9N*3$s2m{Wbe)_V3xrIfHWE%sHAf z+Oo?sHMf6WQQn075Asjtf1Cefew1yF4eBP*Wde=9M!#9VUH_i`L;Yv^FZDm7#nhSO zGbd(F%X~cZ>C6{0H)ZzCO3q5p8kUurm6K)5D$FX$D$BYxYYtj;QP$F|nygh>Pi8%v zwLa@LZ`O{i{aGJmoyht+>wMPtS*=--hG;bS=f-ZPp{DVst)>&EOQv>HR`yNV3$m-T zAIV;my&-!?_TFqwPQRRAbHdF%%(s}!&2!CnnqM{_H2=%oX3n%sw7g_lmRpN<+mqYf zYO;>DmZ04l@~ZQzY#tl^I=E!Vc`gF)-!faXa*ZpDe>0viZp!X!>2DceNw*BKjId-{ zOqQ`0hvg~D^Olz_*4(AJkL4=Xn7j#j6?t>=7UnI@`)l6ryaRb3=AF;`Ay1W`o<BW* zP5!3*SX-v;L0gMWQ&3&tFL=J-je?^E9~XRH&{`llB&ir$-bJ6TAE`I$^Yu4*^^^2V z^&8NfhxI)(=VZQ``FmzV)(=@#h8n{f!v}_64Ju=_u@{sr-8j-{H0B$}8*edI7-tyo zG%hqQGd^ToWqiuG*Ld7`*=R6XOmj_lniiVsOzWU<`Ps9xUD+>Xzm|PD`<HB8PHawM z&cK`@Iiqs2bMkV==G>g)$nj3gnUiyO&f=W=bE<Pz<UE|SD(7+N<;ywm<{ZrVAm>ER zXF2C`&gZn`v_eV4&0WpC%m%Z?Jjonm>1|2246+nk_F3MyoUwchMg7$hp4%liJ$F=Y zVXh<BmFvl^&aKa7xtnsogz`pPW34068MCeGytur<dH3W!l(*WO_j=xrytne2@_x(H z<oC~?n15IPp8S91Yi)gP$+q#fh3KyrZRc$Pn^Z8aU{b-df+q^L7kpYE&6cE9=)XvP zyuL_(r+%US8EEz!`hV$v(EqA8WaecSXVzstn)z(zj?B}UUuK49^~ma-m6oMs4K$1} zI1DcuE*So8xMuKnF{X%ytuW3s-fxtRYmCoA7d|%jFwI9Z9>!Reb6U;U%n`YXx!ZHU z&2{D_<!9ud%Maw=j>3&Cm{{;b0hZ8GF$yzWKU4pZzB%)OtVBbuVK#op@Py%egOdGo zj^A?3(q`$K`@40jt<p9FqiMeFPTSqKd%d=oZ98nA*#2!xM*XB3>@6TDQ=gb=#BiEp z+-z#jPR+R`=iZ!KE$>^8T8~>lw*JHVx%EryH&!j$=^8Y8qiwtGXS7e(f`o!W1veEe zFW6epQgE5<z-nwU5OjmGRp}qq-<i2E^I+D;SzQdh3{zmx?lgEGG}IXiP35MUrg=hj z&YHe4{ccLnz88aS6$ab(>^<3EW=lEIIel{mKzU3#Gjpm?oCM2rmdM<`x#rvnxifMX z=KekRMDC{i!}&`7AX|~mVY|b&%Jz5LbSlJpY>*H*^>^#N`gQs@^?UTgGuLFU%Z$q! z<;^m~$UdL7JL|KoU$Sm8$c8-QO~zv5G~-<3lcuojq?|i)B=br01k2sX?oxi3EzUOB zX0gq(-D~sNp0vGUd&~Br?MvH_HdR5dg7ktM%5a+`HdAlwFY3cHyTQ`MXC`K*z(g&~ ztj~NRb8Y7Hnd@Pswq$P2^lr=iHB-t8&(dURvleGnXRXb8F>6!SmaMH=$Fr_w4Ke5q zMuXXqXRsT_8j1}ihNleA8`c{(8MYY47#}p&80(D8xYoGN_>ytE@f+iJ#vhH@CacL~ zdeF4O^pL6E^tfrgNtN9-J0^Q=_A6-1{n>}JvvaH%eq}jJa{l7Ysljk-&FN)MGN+l1 z7+Mp|lg-o2x0{{j#pb2vznW$9Q|9N)n=miDWxitWZppB0&E1jvR<6z(XB}XjW?f`e ztTSPL2ITj!<=U7{YLKL{Xatr1tp4@PbD2M78nPy5{g^etFy7DzosTw-!L$`+dd+mi z^n=Me875+N_7=;Fx%#~O@?M0I+(_o}BsS0pbZERjnF*Q6nW>qBGlylqh$eeK^V`hi ztZ`7IR7195oMD0C70imK3_lqPjd#N!y#!0zjtoquF{TpJD$IXhm=u#X+n8OIeNXn* z?DaXDb9S3gn{T)HE&IHd<Ce#ApUmBm`&#b4-1c0JwU0H``jIs~FE{Vbyhrm^<+FU9 ztv7o61=~g2t^%nUYc3S*sQ#pWa@I5GtiNTwo%MOvHp6biJB9-&MsFkSbZ9uv%XDTg z$o&6SU|3?<VAx`~$MAsR5xX;XSM2Tp%ab>DU+n(avDi=FlE^5QlVYC(N;kmv3217% z$9{r6g8(B#1dy#^;9-zq&|$CwSZls8;4oA%)Bu(&8HOFe)N=w@vV1W7V<-Vk!x2U) zz-n!Q(E+0eMhw9EO97l@5`d-n0$?%lz?cD8%;=Z^4OEz}m&AC(iouS>j>C?}PQXsY zPQp&cPQgya&ce>d&cV*b&cn_Jn1Mp<BJ5)9GVF5f3hYYkD(q_P8thu^I_yBBPZewc D1!_B- delta 25633 zcmd_Sdt8*&_CNm2!);U$R8Z~=2#SI_%rL_Yb74$CG|@p6q!i5yO=M|0V^-Q|1LZhU z>ejT5njW-c9W~2)siBx)Y1&P#%&5$^ji=~jNJ>7x_ueyLo%8*Czpvlx_5JUQu4k{k z_Imbp?X}lldq1<}M$nQ^gH{@4?W;^9rmg>d+|WzwlNWcwxbue=e}#Bc(DKE9@bKNm zpYYJL_@4+rYk6z&=Lj#YS1f*x(_irOzvB-N^Kkq_Gi)r+Rmst-TIJ*4e)!Y-1pi-) zAj*2kx(h-x2%CJwmxmyX0JN~U#fOW65G2SLp(_*`5Dydt%`arMY(USs0W!hJ(ycOK zt94a!pU~Yh!O~3-R(>N0@vD;K`X0Q)vtvBIzZO9lBU_am+k2>e)<QdIxhL^pMK_aM zvc#U<Gk~@LG6AQ_05Pe11j=L#?E)DmCT5{5VLYO8gn#<8prE1iX3d^6Ll9o?i3C)I zzjoiAP2zpB)q%mmsQ8we-ef`Gfb7o-1i^J8vaYt{wj^0?nYSzgeJl*L28)$uMzX}v zYMB%zv{(dBafkX06}pjgfl-l(!v$*z^`Zc9)Hc}T4mBi_-vj&SmD|Q3le&TqwPc%? z0WSCA4)@|#`XIVPeag@jzUG*10ys*jLXxCvv0QR=(6VSjVByq$WO7iPv$PD<c19TJ zOL!*`jVMJ$9CE7Z!+7PMR;oq@t`iDFlRd%HS{+nTxA@avw|N%JGnxVl+F$atx+h-0 zwiG>Z6de#)MvwIn1b4v=PXP@Qgx1>??t*qh)8YX4;`Ywc4)x>JaF(%>ZqU=cS@Z@< zdjF0jd0N#ChMVDQ8*Fn?eYfMfm5yTMg6kOtH>!hBWCfUC@>PPBP0z>$0o8Gx$joqD z$#Aq}{Q88bZmSl`Sj)!x%0i>c=?oOavp*TvaPy+gxdsJT>GeNY^HS^b%~Iw4-kw#S zh(o)PP-W<44=FM<&Hu?8;4Wxoc~!>X{esqtsiztzdak>!hn7_jMHi{t+>Umdj9OGw zV2BdRY_=i81sfAwR6$-QMaeO&NU;}sA1k-*Mdoz{H>`Eef(|Qf;`O`X;lprGI2RMc z9mlA;f_Ce>C&0=E&a3!Sp4LC->4;>E<z+Mk!vzsBoOFEZ4N$igvij~1L6xR@PFC{z zmU&vGs7TbrH9)8qt7X+rp^W8am7?;Q*tOQSIYOl<BjacfcE}3x;uPM{#d5*Xdy7po zoHe3bwSYI`!KH4n_|mVmFTH*V0sv_rE~rnqYf>*F<|&Xvz|>NCxjR2KUW&?Qg<P!5 zPmQov&qc71DCNP<Oi+<)sB2Mb#9T)Z^7M-82HR=0+)BrHNYc_3smqX?pk&?4U$k4R zyOl~&p?K4Fo%0O_Uv++E8FI}ox6&`i@@{n7Q`0>f9oVYdr1MrJ&ZIXWN=<>Op|@G~ zzuuPOBxL%VFV#jb_)^n(>UnZb-q)GW@k5xlV3gT1`&fjg{M5-zQAtr2<c(~jS>Bn} z^73*>S-E6KC8VebqydezP2)K{jvPGT!We-XUC(+GjZpZw&^<-0o)Xf4NBi=Puu3W( zPet6ejOU}|H33_MPg|UZB$l9`J*NC`EW$fGZd=I7ZmFUpf?V&G5ql8tlu`@iKz*vV z0Xm`7b`hi!x|L*gkBW_fp5rpx+3%&c_zu=H4l=EKRNRjcN0#jPp~{sfcy8Kb!Vd&j z%dPaIaI&&{Kj%Szx?QersB#CpPKxEU5<^jn$u$A_#7Wh{q@_Qw*?{?MrGNa+W?Dp* z%aGz2fLC1Rl+y4u4Yp;x%&(E^%jZNWvT`+wt|o|rj|=MH9OpU&F4O|uSd|318NjNO z)me{d{=T;-BgTi5X+2W!O%3Rj%lF&`9Zcgz1)?{rT)Gr{V70;<RGs2#2&^cv(j(yH ztqpCeZ9yxVAzI$ly1Ama7t3#FPja?Ll#`|S)Z|g7cFU?E2I)LQ6N{(V<?ekHJl?RT zS{6<R(M3hiS{6~Cs&a?kQ&-yx+<G#ofoXR-3oY`rs~c)1bP{A!@dq1$`gTZM-E&MK ztJ^9(XASN1qn7ttLZ*er8g-mwHsh!$t*gBO1!g@NEM(j)E3cN@uKmG8Q;9k;U8nxi z@GDcMGx<BZcCtS-RlN~Q4<C0qI=V`q==g#6bAQ`PaA8EOwDl$zcyX$)GiQg9u&@|s zLuc|4p6p9Ac4pQ+;3h_<%FyVbb+y-76U*EiSx6to!(Dp?Pj!QSBMWUeSXN9Z<{>La zWsEk(9Y1w4s2=`?>qK;2kqE6E(B!N#eBx-!IQCJyr`5wt=uGKKaMxZ!J|5on|CQ)z z_VAIdvm(FffTpV2FOl8xkJ<y81M1j&<#qNfc^N%|PVX8K+ho+qN>8^*Qn9p;4Zp+( zSlY*GEbZg1SqYx9vU0i!i-7BdvYh^iK|Osf6(eH^V_MN6{@QqL2p0P4P}^JytG_9Z z{M55=*o9a@XcGEDztJaSNw4tau`gpma2Lo8O^z_Sk*7u@)q4u-dzIecP|#&vuT<Zu zD`>UaSpIZ(JS#cWy9FveqO6p@%#Df&vL(6CpfB-8BQ!nz7)w$`qep_yDDybvQ22dl z1`@0LR=Sn+Yz#RZo+{psA>W1%RdFsCRKI-)UWJ%zqEm)LuqHsTpc10X=xZ_PCafuJ z#jw(f81i96V&b3#Mnp}Jd9=Qu!`_G5z=x^YolcD*-FqoU=MF*(3+0}o+3ud6BAa_` zpr@xH%oaCXu=n<aDPpk96wP&n&}#$07wjnvGz6tbBeAasFLkBMX;Bif_bLqAiT?3- zp*5N`^_tiF=fQ#yexQ|gPWXWkEB$&fxwp4jEJ`7s-Z5c~=wLs8RX_4(@1;)GFu@T` z#|}eRB+~V;PHhq!lI_R{^W;l=pr;Y5DvY(e{V<75u=ZNXF`zoMQXjM<9W@LqrQ?wY zQ<<-ML+Do+SM=+79(bCjR(c~#X%=Ik$3bfnLQ#miA@u$!jR%E@;X>n<hC<TNC&pGd zT&NDT(rsC+`1-9{w(vR<s{_m2ng*oN6@N(xs*ash*{J^b5X-|Gw1x4zLq*pyh3k0W z(%+XrJQn>}eWwx}5#T8I96#0`<O!@@q9Ma0`#V8e^x;%>LuDgP&lZFwCOhj6dl0>W zQX~z_jrM`4|4DFoZ4IsSQkLGChTI>I^?aDoeC+oi*RhBaTBQ+$rDH>dC8)f&yX&L^ z2GC@57K{llQ?LtM?U9T7x*8&^RIe$6ycC2kE%Lnpo`O(Ug90W7d*|*eym5wO^O8)w z#OS5dLM5jpWgJ6WcuxIL_lhZvzIUg!5Pg)Q4^{t##_}U`qfVM1x`pQW^DZgznJvEM z!Sjpy%W}i7jz}8GCM%y82Vq)W%Y;r~h8G>FCHB7KvrnLvo@OW~cX7Mn14lo)4qQzF z1O~$c)FWKm1zcY&pWlnGN_0UVa;tBe>Ii%BgrU(M-z1zqjhWpG+LkTqAa!dJzWNID zxIykIZueY-dJ3aSeMm{am?Q;WLur^ty-ImKcA&kOW}a4WDE*-~araB?%e3+-Unje+ zM?eqM6}QqU<Ve5qVrdTP6|D>#0t?tL`{#O*vC#u%7P26EjyOG!{1AO#w-z({%Syj9 zk<l@+;!j#q6=Ug^$IGF|BFGyt=^-nb@@k17mt)fUgBvxVGhs#eGYtW%ZlK>IfWnEU z50L?}TG=)-B{psFZuX{zF?{Ksaz)($lihF|O90l3XnGb)xId41;bcc_T<<1m&<gjI zj<%7Z7RQ`&I@&}&k4==rZ1M&jS{O&9*kmV<99#&^(Pl|qVvnSzyV<m`JyY=&Nk8R^ zqklQ|n$Yb~2@zIwUuZeKjLN$UuE27Bgf}C;sh|VapG2{Lf<RuOwaAJq%|)ytzFsXk zC_yh&=?mnJKy||@@=9Evm;@L%2LgwC6R;PvR?F%N<akO^Gh4_9as9)Vpceji_3ue; z$Hj-0-=*9NBdPJb)ElsX`g!tzx|*eHvmk9)PBaEq=?ZKw^iu<|g_;^zEh{afdq$C< z1d~idMkM5jH$usxgxq*^1y&7Dz_F{rZTZT;CH5y^<3nQG>D40gae`WWvxtNwjvaQ6 zjly&&9;~?qa&$j5OB4+N!C!?}v3)}oyy<Mg=s{`{Q&TFC%DcjfMet8*z*88y-Q|d& zul6A46mb!sVYQQC_RfkBD#~bLI=QV--qWGuBDH)ZN<0N=akazDIaW7Zf;zxj*vhIi zA4UG9I*}mwt8<_`c_cYr?C~4%BoA_yB7xUr^lx1?ahaAtMG0jd%}gjRHZb1D%dz?l z)oUh$V1JO_87&C)6vh^f23FhfL^rx@6hj>M`d+GSvk&J>oq;vF1ZfqXc5k|s7BbY( zv>?_KT;(z@>|t{B!hQ_apuLEgQ~LLe5RmEQ?$8t=^uQo8C#7GHpCCX!y|J3CN>S## zRwM{@b7j?H8ST@`?B7s#h+4w(-jA*at8WpYbwE8#AiZ&=WmcM#LE2N&qJIRn5!;IL za=IOs1qz8zM}->6`u;;k?Mq{gZl~IN5z+%%0gG@I+6N08O$>6KmAt(@h4IiBJ*>3v z&nUVoUZ`+Yg$isb;=QkA)sH4UQ+3@^Sy5&5^?#Fbsi|Xc{`*hedmTL|Vm+9v;3|+F zO6*)8PsO1&P^8t7HY>~ODO6J3rq^u7fH-=TVSf+ZKf8q-OC9694?~9@msiKrDVS#! zRKmK=2@!M*Cp<U~Qxe<OER=&5$gAUMV*(10qHI_S>Nl{3XvbI#<rOxRKNS19<a8tB zqSMe1{Iza$1tV0;=nPgr!$~`A0Ou2owuMPV{WLtRl4b$|Nn^#6CSCW9OhJp@am5jA zr802SbUeI6;d$`LJ>A^Jm*9zcYZP+96qrRl1()0fEw05^1jhdZ%r6=Oe!m*=6tvLU z*mgD2%c$|SwRf;q@zrnX^>pkr>f4hrU&C&&XH+YDCR}Gv4P{T$6+Ej}L`5iA5EjUT zgceQ=y2OLDi#!<eDS{0z1j)hTDG=&A5cL!U)(eP36V-<@I=6Y9O+iISl;PhNmRjF| zPHYNl#j{(W(9jfg0~oZ9ZxipXy@(Zm%Q7tPK8sq4TExb{f-KOl|3I56*v@Amh_)Tr zcuG-rpe_?*5HQFx>S8=LmN)l0L@}?yT=3jopteM3>`5H>jx1y^GN26;NK1M~;bGuq z^eZnP4~Rz(*U9cnKVV)!?{I8xntAfkKPN}9WKr;@REO9u@#I&$q-4;@zE?}xWQfEB zU^+qJDTtJUS`x{wLCM4B3K;Z~07&bXYRSF$3hKKp1@*;x!@M~y25x7C*S*g=WUH3k z88pDTB^eCZMAxkG5g|O;yUXYw5S@w&ZghD9cehzWUG`9G8J+MiMlSTO^0Xs=?~H04 zEn*Vr3FGEj;G0<P$J#byk?DNZXSpmv7u7a!nxhX2ppQ^wgm^y;Mf`4W918JqGW!U0 z^7i2D5sO5KnuoVzA7+nVN5j&pk4zcfX+`-&?@!i_XNtVsbz8EBl+v-INmfQ*@$xV- zD&tKj8|!LVx4r?c<HCU9V?&G~iGa}?>VL0-%?Efar>f7~Pd9w;vp+EJkM_gBmsN}O zJuIrwdC&uqc-t=nmA>Uv(y-b|QOkI~_JAo;3ycQIL7vDQS_;v#_ZjZ`eaO6D>Wcp5 z`n}iVJ*&ee!DtISvj%BX8iVqoHL8177@Ff6`X7VMG^LR`(VwVc@WJ*zFsDIlq_aCo z9k9PLhbe3djYXI#!JLA|g)_=MJ2j#XkkJC?0Q1*chm^{CldVINdxu}aq#YZIZAZXa zb*PR?-;oPL9_zj0J0H1>lXK~!@5p^a`zNzS$gZ%hXKLzKs4~}yK!{$rXCh|9cqwYV zmTVdti<}P)P3ztE?O(IF`YmZ4nuNrNVQD=rIb9WG^%*g2O!#rDARJ<|(=$R&Kedug z!-k6TZ^?(lW;&CQ#J2hBQ|gc5slAH59U4+CjJI7yJ|%SQCPA25i7m!N&+JtB_!8!m zqWgTKGUd1&<7{JoYc`u#VjQ@iXzj6jwkPr&^IPo2^4e4Oxat=5$CGaUVkiimA5JyU z4{Yrs3r&f=eLF+8BSPn^T&5lAWV>m;cySO(Fek|(iOFn?#+a$Oja^<>OJRRor3Haf z)S4Eu%3Lf)4I(Y(3~@vW3Cl|q+s2V0dHuzY#*y)PTb<i3GpT~Du#!#MpJ9#Bm#|oQ z+PCnn;>fx71ebHRVDIO7+=gXeUag?7W6Uc&Cus=EX{-*T@z{Y@xMp<-U~xOH7&Lr8 zsl~D%3&Gsrt9SU)J3=IcE(9jw*KE!)Cilp2n}@UGJ@Bq?AvKoFgw0@ZC&Z(VTzBp` z5}Se;%Em{E+S)?SSo$U1C2U6a-Y;EuIvl;>JoZ=u88-<S-XFs~aJW_$O)7`0Gq>|f zgx8cIm%0LKg`aATpOgcVO~&XW;$a);29P7e3$lK}^6x3Wg1PnZDJ4`^-J2FxU&j@1 z#_~d0O@n<<_<nQv{stHU1ue^$WJVM9Jw+&N(LIk0V3iB6oxp^kSz@!dl26|+c}GXu zcC(JY+D5`hJgWA0PA^ZP&E)GID=oN$?r~SzN<F6asH1eDt#GawUi%~3-xj2ZA-hM! z+>?nSAguLD9){LuBUCrQz;U$)Esm)2j2RIDUs!<xWQ&*b6ccOX;;`joqT`n>Q}iW0 z^5cd;>sh>4B|ZXOBh`lwO;>eRpxzbkg!E`qnm=G1AHq^v#)vctW}?rK^ai?SwP0;* zp_@Q!beUGL`ReFh{Rb^WKU6dYF#;-pTKc5E6c#0hyE*?}Sq%ASftrjghz(@jK|ahc z;H2@QJdsERDTVt!hbr2C7%$U%*s@pQd3G-%Q1Sir(T+Y=y5=s!LD(%771I9QjC;{t z`eUDy2Md#&R!}_8VqYk{y9AGnCMz{@(m8)@7=NV!F`5b#)}H6tPZ2Yv!*ODd)30YG zvoeoDHB0+h61{y9Ev%?}+S*hg4+deWvptn@_wt0Gjy+IESV-p?Q-A%`4tQZk2HvH= z%jmAA2&q`^Xey(-FOu|;d6}`knKGaCou{diIqj!%oAMkJo=0In(r6J9+KO39$jH>F zr66Gz(-(XB=c1&s#gWq^hdOJ}r;Sq74g|*o1&>rFY35=}Cq?ZBg&#}ANg5*r#kW#; zTDutbGKQ0tjaz1eA3F<sUzm0IU7XT6R~So?wy^wIdQU7s2l0|CjF)h2Hxz{eW<@Kj z^t@l-fi!;4jG4nMV~4z{aPsDM3wfzX(HCL`Me-svC08WU0vlHbXN%HCB5!x$Y-z!4 z`VQO2cWxx3SkI>;&qlR05}D!kN~V`sCrsoBnu=*|L|Kw%b{sK`8WhHAtFic7Kpq<v z8GC|NcWj{K4)%5{qa)6v1a%u;YiJ>_j7oJz%TV(SY%I7xFTAE5rFsfevGQO@+dgBX zQ<&+CQC}?E7yH5&Gx}nueKCtKcGMTk_r-SmVnx2#-+i&kCiIM8SD}<ujDpt5S);g? zecDzl*<(>jzxD`UD*1FrIi2|#v5y|s4ehi?E|pfsk{zQHouMoTYQr9d2>|b^SFjU? z83R0&5+5#=|9Nzn>#ZBj#-0<B*NC~=jE=G`#PF3%=WVlDSg&IpdEPdSN4Nx?w@u;^ z-o@u#CdMH+JsKuS^-OZ?eO)HT31=RxS&m>CccgA%!dTBzwlD!|3Sw_I-(!Kz&bauy zJ{hj}3tM>2F^q*za;?&NW;yaXzYR)8(y-S(8YcSE`C=cFzq-Eh=S4~9ed1j2t1d5u zUgaAp`wWkX*-bWgJ`*3_(hJhLddN}Kn53k;as(m<E)K_`G?RJpu$?;5LQ2Qvh$lZG zE5>AtcfKOW$E3-=AaqQ6bo5o^#>F2mMcL$tV#+kPko4kYSqm9mJTP|BkCIeIzZl1M zo$vAI+fZp`w7!^liq*y342rR>WM<G4CJZ7n>W)1#%nOhz=wT~Xv)QoIZ4>aITcA1l zsmp}2a$ET#P-?(%a17FpTE8JZ#>P2Wj#De`ake!q&v1X9q1EgZ6P0CDbpv#d!e(KU zVG}k7)pGiKcfT~kZL1GjgzgTnxrhO&UkG%0LB!wf2wjUqu@G+N<FpL<cr>g}rSqPP z?kVTs_q9oy8kv^7I#$;Msf-SSmRuW~I(RN>xODNAMF{5ig2TuI+s7_K<rG{A^j>W$ zXj#G;T57%_IpYS2i&Dt+aRbENKa%C+77pC~6Xs5Li>tlwyq{RzJ%R7-1<CJ{Tzb6& zGQw=XLDoTuF(s@a?XvKi5KyZFsGJcXlMv7)y~$i_U+49o(VdY2Y|TKU%4sv&4#NN< zS9=g`v$0d8qi-RpthB2B?NqdcpIN<gTjb_)lI<`ufMaQ8=X>^0b%T}tQ3bXiWpwXA zTz5bvj0*;r(SIOP7g)gNf~Ps-!|DLrN59+lB8QC6ORcmHBEffEjw7}mARcA)f-g(p zbkxbQP1+ZTWdpS9SS)!x9jj~+3YlP=h8N1|L&coztt@4K*PeigRNs)IbS>nOl3dFo zI8-XEb+vNjVTDO{8Y9+a!(2GcZI}SM3B|Eqvqz!FR<Q_5PU1cGLrGfHcDU>Py)9{2 zgsm+kd3?Hfv4z|_J}rd#(=>Jsd3^jJING+1&rbkl@x$-dAgH#$#ypKa<!5Lmt|0J< zrG-6^wCL~NU{3<#rKN+!Pg}^-rTvmX_7t?kyA$jw=vXc+D-7iOzPiFtOsyp?WN&GI z6mhY1XrzucrQL;7;CrAz%}}7oB6dQi2(F?DDXdab)VUV&*o3&$56?j7w^u+54%7$j zMhg}sp)*>LL*26!U2Xu8?4F?M_dlg|Oh_HPd<<5Lb~HnBWa<k-?f;vsMo%0#>MCbv zBgXk7{x|%nPXIG6Te0jqj`%%J5w>Y;h8nJrw<pFujFXJT6FfKhURNK#+g{MQe-5|O z%U?h(uoXK%g2^aFE=uQYU7O!aXeXzzE%L!rVCFIGkVksAbgsbIC7|c8&|BXTOPR6T zCAN55X<jL*Et}??kC>JI6F!29n-?qM8khycyke2IC9IA=V~56F1MbC_;2pR}1Rsvz z{^A{QA%9+MUEprL&R*$(8yegHGHm}FA%_(e>W}YnPweQdxu+<!?U>6xOg~ZX=tX9h z8@n@|!h8s)Pm|Zn6UDjTlK0Ewod+?PF5_>9p~Gd4gu1}r4gz!QxX?&SS5q{T8n)H4 zhZef-q%Qgd8g6Q3qbrp&LKk{}@kphf1lC7oN7>8e9y8>rkL&3`nLVsr`lOt$XoBfD z05jyIeCeHw@etfgoO7xtn9Q2gCx=n4R^IR5{}srk?DtqkQq(kL%I&=Etd}Ny!+L2X zJRIsT^$F1_<c&%F<H4s-#F1H(_mS#o`Zs11dD;!P9pUs)3;AhMf1|JVj(+9zos&qU zH7&lqB;0>fu6TR6&EAl5x|t>Y=Scpmg%nrB$l}O@6^Y{H7V=z0YW6Mc#r)3VjX)uE z*H*HDUmSrm&QQZ`dkoHVqUa}g*%o|FzOE=0!$gvO-_WoTcN4B&CUfpn_73i%Tzd{V z(YDLv&HJJ<Hoc0~=?JtA&S6~bcN`<>nahaiQXCbu5tv)&wR)n<%WRuoCBNJkm-&)E zHI2T&Qlq`_#Xb>-gL?LI1bq_3CLx6tkZ_sk?jM%K&Sc@awug8Yvl$jehk(WBOaAC9 zNg@^fRbl@EQk_!amV18j9wyW8A0%FBA$9i;be7bONtI_byS@nzI2L!R$FYiQKi)40 zUDS|H>ZhP`Ph<(}o-*1E=7hTdPL9xu?a#m|U5Emj1U7YCV?!0JH-|yfZMWFNXa-MW zHTHg2s{V4j%-+{s8Mr+nyyiK9<V_x!o`o@kbIQ<a52a}ww-{O-VQ?4G#3`6SP{G(; zzdxBgKY6J5#u##RvN{?%%^T!D!wFQ_Bdf*I(h@rBe$s79if9=_hD=E(t14o(Ut>e( zALeI(vWl?{*WF=`$1q3JgLm2P2TDSdrUqX51IV1Dn7}56tVkLX)&ce6&+L^iFy;z! zH7TO^yIs_49yTm*e8I>4;X@erRd>3^eH48bjBMPe&_%#7?i2X9KYWN3DH2Mp{?viA zkfla@6EW@y?CwWT-^dRG*+23hvn^b+#2qwzRG19FixEY;b0!=U3v}n_<l!kg@k|BT zHst}<VYF8WhNmT|;#f;?KuE<@-hm{s@-g#uY}Q-~fqsP0KZ@w6#X-2%$8Lfwk{ht7 z4HxDm(c|E7N7L<HjP%}%<V<D1I3#;QaE!Ekj54t8m<&guwLaE|FA}Np0cV^qCgU&i z`fa^{`z~6~@sFsxI09$10qz2J`bhU;%jdZWr)n!sG85@pu=$TWE^rj51^l$@(LmTW zGT3wMgq03wC&J<G{GRF9+qx(8%=Cl@eAqTTu*lPBe{%UK^7T}=NLxttw7sFUg?pU3 zvrQBk{D4_}X#%N!V15j=mtd#M`-Ja*Km4e>1<~45I7+pDM%o@2Q}izP3rApO$e7al z5ygdxP@D<2!Vz}&>I^P0LRhmqFE_-n4T`JyI<6tM!vDqgR=wUK%csBn*D6?Nl!}$( z$+j7zVvML-bx<Yl<MpGDe$qJ@bipSiWM<mrW~MxAL52&~b^DGZj+zi*1DDdp%Ore1 zdZ_lxg>nN-3XEDT?dxZ|{c%^8n}B*h_B7%kyw1oD^;g#PC7WjUj6RD5mgQc~%SOt3 zpp>4SNlwg6>*c(g!E>CCoU4f%d>v>b>QqYm&BA=`Qvc|SC~EA=hkpGL(a%bq(gPHq zetP30wB@9?$h9~ETPVFFbY3sqeS{Rq>5ZX2Ypx0X>>J?}HtskIWW4WycYaTL4DWZU zwYRGKPQpq|8fGPTL#uHW;abls^5ZP)^sX6FMlWGL;uwyZ!fc1Jh(7Q(TFZ0~KLHG< zB|Ogb&or!$5<LIIg`0(;p-^Uyv8CncD*0fnvw6Zh%>C(5l+qp8IFL`36JnC#yI)!r zD*c2UtD@O`NmrFzsHmJm^i}=EOaCU7Rq=4%JykU^`HB5-beuYisZ_fL$^|nwgT9TZ zU%5;<OMa<JlN*p!@iq>v6CX@~sb_pJ4bSNhR%S-GS}>2pne7pNwC0!&(dv+6?SU2U z+7)=ma4h^e1#EIDwKi?;p6d@riGjVWf<Sg1jv<NF-^vn*?BPdc=gHiMpAEw~w7a$i zFIj0^1__uwPb?op9-sZSn0SRudL&DPdFy#(xVS8leE7&<aZMuG($KH_R>of{X;vog ziF>qKCjKy*JU8cY<SWfNA4qagl(Vrf`FZXX+3%$Iv0>tMEqVH}O!26e?09UP`04B9 zkH<>#3SVP3_sVx4gt0%%BK!mxQ)Lm>1L^_S05LX;Z_#1ut&Coth+$)fVFc}Sg4E56 zabCxC;g&DkD6+Z3x`bIcRvK`E%{`n*$9Yb6UR=SzT}^_Hs$)mpia)x{OUmZet;Rpi ztG5tglVBafy9#m5f6S{?>hbf<?c$?-fd0qCik4N8>GKEl>U)<fk-x}hkHH1hH+y!m z2Kr`?>v}|~XG#P#V2HZmS}??qoisaQ>G9)TVlwMEiC7XfeDht(2%rZgNpfCmL{-b* zWvJon1yOX-U7jSM*Iq!fgj>MHtu*vCQnh48&xT2CcHyRoBc8S%BNvuv6h+vo31#86 ztJuWzmHz6nuF^*xBbiGR`#yD-k_Z&Dc`62r9P2%i>|3hLoQyo!&5aIEa5<cVR*y-E zVJa=Hh5(gH7uYTDM*Hw;5w}CXB)y(V7UxVN+0P8<x#3j|pVtdtmq$YriS3!Jf{|F5 zu`a-8%TDakGeK;4XR&k`1DY}_LvwNO(^OHnP-aiT<#if~SMWjrL}GYZxt0F?8*=%X z=&|6Z9osO%Y3x<O)gO)-b{hb_3_G~uewf7)zZO8xb1qgMzukcQC%*mdwWGMG$9#@a zH(E%(E7P2Tb=SAA3Inl~$z{{j`zsr9s6t2jFWFt~Qbf2vas@MzZ&j>YpnP|^5}k)Y zsg~hrem6_P{Q-M?8QscoZ1{?hBgTfWSxn|3=-6NjNd0f|t~d$#7GS#L!pi%oD4zz* z)5<m~5jMGq=6?Zt*DA6a5mykz{qdv3TGMZkZ_#*|Q9-mK^vwbK>yiJKZL!cwUav_I z?;IhI9E=a04{o0{Z?ckKYU0If^(4G@T=G5~|HEeiPo3@T0_H7Rh!%A^&DmaNkFz#k z#=SO<Ao>f;K#V*)vd3L;=*MWbt}#J0n(VTXWponyw4%-)EL$F2)8P2AI&fq;{RUo6 zwk>s=nAD;DANX=8^{UJ0K)kSo$;Kom8?sWmhxtS++#!&SyfSzoHsFGvi`?^EpL`}B z8@Dj=VC4(0f&VM<U`5c!ITu6!Djv(<CNDi#AwDya+<5LgapMH?`Lbm3$qA%m+2{<& z85%eT>dEI|iNJbNR=O-95?6kusCM+q7ItUBt(l!d9&_i5S3e;K+^MPT!hxG#KEO_m z?U7;pbsdU_mii=7at{zYTFAiV31aytq;R=n=*mrKC9X%{(l{5&f(|dc1<!CM5Il>r zgVQ7^VLZs&%j3J39kj719*Cx5#rk@AiWcW^-dSbrs{vf0{{|B1=uqP$kSIBDbt`iY z`ocgSXLl%Z(HuT3PkJO?y?NfPVYdP*r=mbt+FT8p?-?PEoInnG?&*6Oi=jK()l;I! zfcXrAS^G)Eiqm~N_H`zG4a}G1+DHDqVvsl{g(TG7s~H2gj_U)sxp5yXnNA0nOCh)u zX#ub%AsPZhbAjSs-85(i9Bq3{5#NxFb=B+^*q>K}JDo<`>AmPNx*1P)v!?x8RUlK9 zEzGIHr(nKE``=T}x1#v-z)=HzmgT}<<1@F-v6B{q)FgzVO5V+`cJl1=(S2I-F@J+S z=U|i{yA|Y#Tnkn3=JQEd=0AFVNW{zVY-4r9=NY#hG4%8v(({G>LtloX@j1U@t6YmO z#ya|VnqC@jm9%uY&n*UhdYl`@T59-t&(s&X$%eEk(McyFdK|Z(#-$*zeh)(b;~*It z!)u<ugRTvzE4ack(S1(Vt}N-du=9=Zn$hSt-&-@3<d2n^;)XX##;SN@3CtZ<H{@^} z_Q1)4JEGUjbI<XmcwcTRxbpKm@51?bzo!vLQg!(zUd98-#I-5~jeK=gGM>$=ba={E zr-?;y7_Uw&ihvAZIJ?Z~wY$*|!MHYwUiv-A6I!<fV~0IVUA`=EiQ*1w{TwD>#kDQB z!Kd|n*vUkmUmcV6<Qve`aL=TRn6Fcj6L-(x9-$+ee=me9Rek91VDd@V#y7~t)zd`9 zZjwv-hohXkh4jCN%qBAl?(fAEAv52NfGrqK!RorsK3ZX}*QIOz*_}QM{RR_u%s8T5 zJ3ySYnpCb;_iP2#e}RB8zOr^f8Y4L(JSCx?@uB(>xg+SH47oLymb}vnyUg2zBP935 zlwg<x>?OtDMvBT7^3dN#_0@fd#{S2J(1+KM=D+FK)?VCD9rHAE^>x6LVQUIuS4Le; za&N4a?jodi!@Z^sNHfH{oQ{2wOX>|;00bV#in>`>pcs7LlP!9I8EJgoLKV;`W--LC zB4HcNPz+->CbR0y*_fv_!gKYX)tR#zIYS)(bUziBQ|)ip1?J&KbSb45$d!#L*)x4L zDW%bCJ1cO#s{-Sm|6f$#=NCxsrhMlxR8CqTrvrHyN|SgPLHqGAl7{mznuf4YO9dWP z&~{`gEl8!UJWQvQhncj6he~>phuQQT594Vw4|TMGhemo3q3eW2axSpYot&0WxACxu zZsg$@x|WA<Yx8gdt>xh)x|oNPse^~p=p#IwNoVoUMyK#_Hl4`BxwM#vc3Qy0h1A5u zMO1^($ttmb2_%LNWaRxd42|dLGKTiz=n94gb95y`Z`UziX3El5j;?3u_Z;28&@VW; znW1Mnx|N|PIJ%vohd6rJ$?#nqZ(wLWM^7+xJx7}vTE|g{pFp1h%IkcN#q2zR9W>B~ zIC_zxl^ngq&{B@JFtmuHR~TyIC<W@IDvn=g<bfRJ$JX&2ZDrJ69KFHNV2-vk^!5tY z104))<*0yZN5AJNJM*AlaFl(zNzVf1b!Iz2dW<JTFm(S4C(9+0;s501Xoha)XgovL za#X?4<s40AsFS1V47GEVT@j}baa76BN{(hTw3MSdh8A(u$WRkUEi&{!RdGC@kq2_L zh@tU7d8Nm&SWli{WoQ6LCor_#!>T`tq5tCOWQKmr(P<3502CVn*o<9fE1ZlKo?t_e z9A=R{#A&5_!Gn`xd}<NONwBbe6uBWN!cST0qbLnQIzQ!UPFav`&=@(TjLv41zm^nU z^CJcb{zXF&`<#{eP6~ZFMHqsj8N~@wwvQBP2#WVJrT8e(h9HHX65*r78-h~(lsn6L zwH5e6#80{Iqof*wGX0b<d=#A_$l|9o`zS_35VH{NA>*xdzmH@w1Qq$2w)!afhM+Ni z%4#2_$Pi@pQ<iYbf-wfo1WqZVk1`6{Pe#?x2>rOeb1_;_PY%`(7B|$BEA<!DSd4JD zt-Q?jL%8c|xZzIaR7cRYJ{Cc^*%W}Tazq}xCc3^2aQ!ZyH|W=GQtH~))#RtGeG>y$ zV>wE#6Np7viLeIIm63Q@05eyUlx+h=aNfHul?9J&i%)AsLft6g2Lzu2P6JK=4g>ZA zb^x}nCU0#kie#J~iRCN@L;}(Y>G|r+uoY`q!p+Z7o^cIvz8ceaB@z*12`e`PKLNN- z$gWqD#1(7E*;mt9@cXOEG{%9_EeMJLR=^~{G=L2-7vNe$#{Q!y(pSg^5VivL0-D#5 z*Z)yQ=C1dU+y5vEwPBg2H(&wNO}ogr?TW%5j1cUwSFl5|HLOK_qS#4#eJU*PE}vC> zI%4&i?4e|jY&`r^fLV`W#@{Vu-}cNXmgu`#!j6!av66~5k+$uF6UOhvGKTA$7vYSL zWZM=t-;T0ux1?tf(`y4}c{X<KqK5+Y9y-c$+rl2+zDHTvTiC<f{U|Ga3wywnLN4r7 zogFpf3bDGuwYUYRyXQgG7qmD+a9}xPk0j{}uCTds@i1wAO+Q5WJD7_vdBU=R1cb4u zzWCDO7*Fw~hfjri$|t+?d&#AOOWv<&_&>?e*N2J^y+bNr&otlMz_xMhlP|Qo-iR99 z?Sy*E-6%dH^__l~OB!|*&h8%IsJr>~WN~vY@xGqoEII($W<LDBbt7PJ&NIVv+Cf!* zs(sioNwn5wAM}p}&ja6&oA@1v3+(gIz%uDVeGT$PO_+bDbJtRh4>9Q`Y#DHI3YU9) zW69p<%-AyqRYy;%OlgE8j(w(ttAN|tC~f{5<l&cHP;R){$IP@2vMRBk{4mIk^xQ6T z=AZFS_5reVU@m5-FI)F>40kmJx}@}ZSK%J;TSTca%KR4;QMpnK3%)}tE605`8V>iC zYu0sKZq7$<VZ^9ov282*A6Hso-?Gni!fV({;xRR2L~ZMl1cwN_?{SN*tiD?wxB+5Y z#!~88F46@SxAS;WS9}tWkLirpPXLBZZG_F@%M$O@TS42x9-d<A8!y8#`$&Bl<F)63 z1NYilYCF5EP)kniP{y+Y3<cCa2&u>}MLOp$xDF&{%a{xju`^oC&me<$_809>lW{v^ zL#MygskMS%B95JfV#G`2$WDFeXX`uZPpl{6E|*xio;<f}fD`>WrEv@UNWdM&p16kF z1FU`z=&p6>y=KWTbKLpVgPjL$T!P`Af_pm-_N5?>h~zZ*X^^ldE-v>fL6a4-ujk=0 z_J4R7%{rUi`sw`I&QeBu?3Ttz%>SF<y3?}YgED&i4dg*rpwP~*>zJ>EJ_UjYn_D9W z$VxL1tH6D#Z|RJ`(EF?-3*Sf+hpr>9ypbTjt|3R?=&#Vd$5zi!=3>MpS}raJui%?W znuiNs%#@ey&dzuZ7PnhRUqIk*w+bjz86MoX^X)S4l-9*F_x!ZoYUjNyRUuI2-4{NA z&3r8Z+-><foP&k4%Ps8E&a07p%U4Dx_QlqSN!3dtvJ7a;4+&fnUb6xbdKJzsk7c&{ zl)ebRF`@V#pGdDcX7vO}*&&`^@R2!s4!;R;EWBnKxDCJ9dxh%i@)%c*6$E_E{|)K? z<|BbWM8n~`=FI_t4>RQGn~K1Z4EgrWVaylN=dFR_=@w#qtDFUE-<l@wRFRu+Wr#~u zBz4bMj2N)@A`8CS`yqQ_=e}Xl>~aQ%Cp2JRI?6L#c7EXHzcD~=?8{5`Cq{W{S)yxO zKAv!5>k#<4ChQ*&U$T;QDm##*6+qoeZ=SS|vC=6k$&33_!+LRsc*YR3k~Hm4n)Rif z-vWuSH8VYthV(8E*;m**xX|;*5&qBqV7lMW_O~uJY6kkB?qG3MGMRNCt=I6oY@_&_ z%pW{;7f_!E?ek=e9dp@HR-qEm{T1p%+hEY&t;iO%;!g)Yz3GLnWcnmf?-zeM=m|d< zU3f-;ocX1@khc6_obP9qQY)ijfl0-<LtXdq$vykH2$x{5zyo|mu*dKn3@#A6<#gKf z+~M~b^7mC&NG5O3PbG&AMu}^WkaGtkMduN6^<WA=^oS+>4;hl9(Z$pTQ`}v<0Zs$V z*9=;Ls9*d$RuJc*H2D`eg!!+7kJdxe<2FO1`42ukb%&9`!wucS_t(f9uqZ$Bc7O4$ zxAv@lJ6a4}3oi>h?hrQ|BBu^3#3v6C@8P5T;3I}K9+@cKmqcQZ{^j&z6D2#3J{ou# z5<`;Sy)16sLZo;3`G=~(fBuoueFutU2Os<#BuQMENa7w<?D@8#NESFpju(d>U%*d6 z29XbsFA`(+kTH$-i-+GLuQiSmm&A}8jVt<p_a-Fi$R%VX*#m1O=hv<yn-PI!C+%Yh zz28RaPt4H1Q^kZ4hEVS779nT5MR*AC9-!B279jyJ1F#lpr~mv%hP_wl?1m3X@f{aD z$c|prxU^GHJWZYp-qGCa-%)~3{4mtlG0&I_9zGD?Xk*fKv(lk>r^0*oe{ysE!R{qZ z#Px67uaC6J10b8Jc!Rm&rd)DQabgj~x4L(5knjoAHJdgE5&ERn)9Cr+8W-0FoKNBk z6hEK*r=M?S7avsv^?u}G-v`$jxyJ{Wg81o;Cp`S<n5;n8H!}N4D}5H<rgllP5|SKG z=ATRm8+tb<?Gm!_WMayTpzx0XkFz&?D%p3w$G>Ethh8RMoiyjMiigvU&dv(6dv>*V z>)M+>)0ITifO;?g#W6)Ty%;m9(<Xpz<Oppm47HSp*J`oU!|T5TJ^K#(^pxFF8d^p_ zVFZ+EPbs5Kz;RSn#?NEXsRwqfUY9iZMIQ&}Uj8TUr~iZdQE>MqQ`dDHT>PguhX3ge z&40X+wu7u*mn!}mNe-^lin}(G)^)=MbwDG+pz6Hc|Ne9)hTP0?HOsJ61WkfnfOWz{ zeqWy>4xLF<FU??1pmi^4#2*%s4-p_1^s+`wok5H*=aZ*iekBSQoGkWW*FyX(KrGBZ zIb}b+LMCU$lE(R}SgdA}t3WQg%u<CT+?6-j5mBAzX{1j}B;3xvcTn3WCg1x2=EmQi z<eCG1KCU@dAnMl~!=5Cs+2;p9L69UzQ~*;5IgVtp$9sG3bHs{bzYenC@#~nIZ)p0G zCl)3I2H!#jUS7B|@Sb)cL!Vd}&QC(wC)HN^+h?Hddt!QE-c2BJPp%eAULbEg$+f|^ zPjYRL_7p!2EqO`-EwE?)Q%}nRgWACS+oCFzGI{Yfarti~;OWU?=5OSIrwaq$`!^E* z`Lsf8xIxZ5{h(NWgJ_)!F`$f0a?YGOeS&|o|NXE<`1{+~cL9dIV-cPLJO?-fIDxcI zi@dBHGFjo9Kl4lYEaB6mbE<^nl+$JGDLFL!?&7GX{_*L|5DIa}2df+I_x7b<KH4ea z^t(sNvL;0prsDik5BoCS^9yF0Ukdr9jy4=9$?RH}Q7PG#EJuFFEaL-v#3;Lbl%r2+ z^$oAQwE8!1Kq-DSC(2ipD>N9Fti4fm;@zUAJW6^t$BrBh3eyV%x>C5Y_osOlOB4J& zViynH_6X+NXTH`lFEE+Y-0$9Wrhj6<gfGKAO|14eeR?7w8+KFI&IvcoD&-DMb$2=r z>ka#4kHusDdZ+TE=FDWgQkSn|mn<hkJF<&_U!d4V^_R#WZ}(b35>G`}gUjcEW{m#Z zZPB(OR+mz+vrovGqxn*G5BeSRKVU(XGsEryr~%MctzdOvzrA(QC0WAj3?O^5kFfgI zwuRRm!g`L6b3B2dkj<wO#iA9Y;Z!WU(0S=p|72FNwnB_Z9FB5W;jWVwugkOeqQ}vK zDu#QA;{7Q2)V1#`V&k4eN%%Fb7O3_I>Oy%DZW+IPI`@6{HTbUg2Xv2`jTWL>CCka> z_m78ULR9K!Ysf!7u*A)&?zG?g0K@Sv%<E|81(uZ%@pO9BlQ7z0=+acA`hUnErkdoP zP7MA1A-tKwej;S!L*${;#mu-ncsgD*wvh9ulf*aHk(;OW*^jZ_!3NG#fO~FF_kebY zrc*eLeP@MA<KqncYzV&<5(?FU->CV9%si9UlO4)>Q_HzJ`Qufx?#zJRjC8DBym^ti zl1^p>%}gO@&tybC?n1>|(Fu+Ys112_H@fg?((^;Jv%9TJf9!k^&A|99LC=@sS{CHP zaM?bq+Ehw^D#q(LRy5qPPc(dOFTw9J=pPsHdpB4)x3D4&K{h-c-KiFete#U&Iu`Pe zhH*{BuEwXt!)WjANTb;3(9C~8eKkB$PJ6IJZnSVLeda-O{zJtu?K9|$dUl+MSH5s7 z^}cV@`&0+gIG*Io+y1Uw>A@j5Njp1CR2P%7vjfx5Zo{hqjaKT#O6;%geb{KTz4jh| zS~;D)jBGnQXi&D#!V7m5-;n52Ff@Dvhuf25kc-~YZinXRpsUA_n`ejiV};Uh*aFY* zhSz9(tt7^CLqd4k#-(KLxx^TTU;7+~NoDMhM$G~ra{2aN@{e<gqHzpqI_D7gd`HY5 z#ft0j@#9A-g%z@k`7;Rter5@Cq#r#nlT{}a!$5b;Bs)G*i|fviZ$28D<ma6LUiFvP zo@ERp!RyimIeO5+Gs)18(~38ID@puZ3#)5*O+G5X7Mq>Fp;ahGhi2cr$bQ7F=q3C% zTmR$ki8t7Fl0o!?8LUutG)F%H>f7^?%^xR<S>KX|kB4BI{PgjgNe8eP`q!KLfa1at z<ScA57DP9G%WBFV!)HM<=aNP~sPgK*^ckQPZ9$tc>Y}|Deg_PDKMVXc^;3Cg0-N?Y zzefN!3*X~HS7Mp-7tubAi{un$wA|g#jzRlzj4#p(yy`b&N?fMhDB}*!m1hQv*kA5W zB!|x@WE%N6+rO=XLFPZj@n7Fu{yY}auIrmIv}qdo>XSI}^=ahRC;90+N1<ePDGC=d zw{X1cLgvdYC=~;R3z>y2r0UaQ;^1>+-KXjpXs$nB$c!pw`_K~l^=0zir-Q|}&XVxY zQZcXDccxIqQ}|C|cyTF%4bWJ)==?u+aIk=hFFRMfZBIfPu21|5!;OocgIx{|#ohkW zQo6K~f5F2x*PZq+F<!vWp50;|7IuPI&$^@$UE#B}@Dmbu;22{!DafZ6?g?1YV%d{? zu}S7^hs!@WNG3!B1^_I8@qjA8BEWjUUcmc+7C;A}H!kE10B8Y)fJ(qzz)#&}!X>~~ z;JX2b0iOb{1MUDK@GZswKrWyRU=M*8e;JV7fCj)hz$L(sfHpu+96}5LC;>*mSipS% z8(;xo31AIiE8uOwX~0FmkAQYS_ii#F1uz(p3n&Ck0?Y%f0Q|k1Qzje)@-g6BKs%r} zTA%<Z0cOBhz%+mzPy^TucnfeEa0|fxR-kqN`FAV<eyu!<@ZEZNXHQ}0^0=jorR6h= zFfS4DRsH)VcK+}uL{}(gEE2$rpMk{K6(#LC{-qe^wEhGqGsx}#vhZE~W`x}lpM)?1 zp@6U#g3Z@00t|d%5<={Q1OXx35yDC@?vo+3AcP-AIQJudX%68ugv<#Lfe=?zgmc#{ zLJ~qbLIuLj)FLF~i%MblRg16?gp~-NKxjw!B*IAuI}lnCvU)48?s@du+3tIuxS^J% zO?>o$>9c1k>~j<|=FNJ*K1(ro-kce;=Fe9+=0Ei4gNlJ^v*!;Khzy@DuWXAV548QT z$96MKRKUKn;toatvKwETUn=&Wo7B?yO~LDFdwZ-8{$)!r+4;MnyF!E3;Iduov#XN( zl89T;Vn!?(eJe?pN~-ZpBg=0kiRl*7<5ohzo<Y8FKn4%RpiI*Jc5LwEA#kV<A%kxx z1}s<kQl=P4HKN1ve9=Akkk@Y~hF>0G5un*w`-Fduu#h)y$MoD>aF@2DfZVwqD-Ivc z@)AQ!SRUaoj<+y={x^NRg@oUV5o0EBj@aoekh#c~jwF#RVC>@68Wa^9{I_Q<!mei- z+M@+N`YQnZ5$SeJz?pTvN{xPrXEJFU53*k6ug~2}-oKSJv5B*HRkqn*>CVdk((FrT zc@6=-BjB#wKFwsf1sw{F(D}C>wES<-!T+!8iK72odV(C4j;~!UyV$KjR%)Mz|Dz7P zB$GWH)V%?&d-m6$J<we)OOneX*A0%4-Er!^w+Pt)B_I=!4oC$k0MUR5fE>_q#Uiu< zDBu#{BH&ZNIlyT^GvEZE0dN>_5U>}p8?Xbg6|f$#60i)w_?KL<IE95k<^pB{CIhU1 ze1HXD1SkRVfCvD7xkTuc9y#Iyp#3{k3_t-bfQx`;z(K%vz#703z+Av&zy!b;Kt90u z-7K^oNG2c^5D$n1gaQHq9pB;{4R8@~8qfe>{Cg2@2doFw0PKKifH8o4fDw=m5CGS| zv8;Qyk1UEPFBaXyKJotZ-~a#ky1{*9DJphI-1)cu7YtXMMc8~3g(Clzu{g}||84jc zdJ}(3x|8IK=5<TD%K{HH^pPn<r?M4q0odCwgG&)am<I3q8w!kpfRf@f{?Fj_{{@0y z0sj99Sj}aEpyQbX>H4#Lf<ga3!+~{uW#5X<@iKv8MY{rtV1KO>WI{S}>x@U^HyhZ` zGj+xrDzU8Ni;K>9`b?y!-Ho4fBaW4>GkxL9;78t_@s>@9qnysTSoF$)TQRckR%Z*S z-Ic?Y_bH!L)+zTYk1EeAU(f2U>aUuiny*@-dR?_&byC%#>ZMLlYt<vv73u}*I`vj{ zllrv!Cv`x!BHNTbCi}kZ1=-H*wb>i8U(J3i`&jnL?2Fk~v%T4Qn(>;Y8n32J(@$&C zzNp=*{XpBH4ay14NzO6ll;$jV=IqKjniHVY>Xz!(=?3eI_0{?(^*8nVa^J~4lRMHd z&M?z3$FS0<H`z>&niiW5n~s|<nY^ZUQ=mE09A_SE)|y9{A2dH<e$KqX{Hpo1Su*#` z>z8N9tIk8q6oRk@L4opu`kCyu>_4)TGy^nmYEEf}=j_XwsLR(oPv}qUztUgR%X06} zeZ|nvXf%#8mKmoTA2r&I&l;aMzGQ4MesBEQILI{Aq%j#xyG{FfC7Mkinm$8SzBB!3 z`UTaIOd;lQb3b!`bEY}VoMRqt9&Mgze!yI1USNL8ycE^jXx?hxV@7S1P-6(@D*vuL z=2W&QM`TrHEz2TVr?NiJ3Q$F=l2z#{oocjdoaz<To2pgnzp1yYud4s0zNHq>*%jG~ zvR^<KU(Qx(LbZ|FOzlYRChdFLkF=L^!gMjZsk&LZIl4goaQ!5GbMEK4eGCf2V1v#u z+;FeKX;^MpW7ugpV0ho~lOfG`pV9e<@hjsk;|SARrjJa8=JDoP=Eu#e%`cn(Za!rG z*!+|Eruh$Za9)qR*u3HB(x>uX$on!+um}QFzA#uhPkB=5&N`4aOf^b%pNgm&Rqw0L zt1hXi>L=Ar)gP*0b#HaFTA?1GR;hLB0`)lcMD<klQuT}K!|D#Tn4Rg&R%Pq5$7WY$ zugDJ84AQ7I4{PRY4r`8UE^5Bgd=Jt2x29bqX@az2Xlsl%Svy#()f%)GZJ~Cw)~cPT zy<a;`J4aiq^=OH9y>_#9n|7!6E$ty~v-XVk6YV94o0t=u^LftKIW*_zoL;(Tb<gXz z=yvGd)HUkP=$s$xe$ut;dg^2JYQ0%ss4vq`)<2?uRli$*9&&M0e@8zk_k-NaxvjZ5 z5XK6F-LS~;jA5nWgyAE@Rme-Wafxx4@iXJMMr!PB(wWQ<)Safirc<T}bF8_*JO!iw ziutVh8*_M`Do>X;J@4VX$MbgO9ms3U>&O$FRv3#Ah9G4x<yd98a)xq|a-;H9<r(D{ z%CD9GQZ{FKv)Zy^R5sNcn1~Ikk5$)r$12nV)fV*x^?m9Yn2;~4zfccFFTS8Ts5zjG z(Dl=u*Gu{yh7`j=!!^UBrd0D#^S9<-%(grSn-$ZT{t|@I$_Yy64W%k;X4Z<V7S#gv zGWDqJ>)G8kO3g~mE1I`8XLaxCD-0V9&B)}832)35gslkDl!KH*lvzrRQm-^YcPvo4 zl=aFZ%Ab|}vi7N-S8rB_Lnqvyy(#;(>^<2hvd=;j4Ae~2tk!JM?8UH#Yjd={bE0z| z);;Oey`?*>`$H$!_ty8<57qyaE9Hh5!VJ9)(S{mG>#K$j4Y9@nMvXDoc)xKPrsQkJ z{l??Q_l+MLFB?Nl;ig#A33Gnl8+pg`zRCNQwQdQ%PDc=|j8;xkE>sq)=V1PvRsXF1 zT^*6F$)1!Qq>0uhXoqQwwGU{W4?;wS=M?9>#|0!pSE2j2?zrBVJ2kgG_dD}%=8(LY zyo|h(ycy`a=ki|4`)A&fytBTdR$?Q7SrDY`hyHjy>r&Q@tlOBta#g4*LZw$tRV`N4 zsFtZ#s8*`hsMf1Cs5Yy9RJE#ZsM?`Vr>h@9f4|^VuTig8Z%`jm|DcY6GEL1+&(6$N zW@lqo7_*<ntXP@7CVPE$qvn=I&<1JyXj$D;v?|p7LCF1n?K|4vw70afoZy_WoZdN# zoTWL-b5`a=>jvs_bY@+lZolpw-3i?%x;EWyouKchpQCr^pVAZke*H0hliqnje@Xu@ z{crl)`k>q%xly@sxoNpWbM?9A+>yB@xs!9(<-Q5|9|xT{+4z?6pz(z9wy}@t?<N_v z*$MOBysuc7Zx)1n1X|_Rtfy6D)lT(V^?CJg>eU$g>)BP1rrnyWnpVy0+Txstb5`a2 z21TXT-D|kV_=(fh*L;t822+c>1z`+=LgibK^tV+<RmZXLys!FDHBLQQ?NAep;Z60@ z?C-PXniS0l%`D9>%}vb~?GCLKE6C&e>A6qlZp!_K$;<L;5QJq21}Gm=u2mjTZdUcs zglqa}`f1`cNt*teff|EmtY)gFM&o=}^RebfO?T}SsE|$Cx3xcLBXg1<5BKNH%-Nc= zKj+^${dB3iF}kOrI8Nve>W}C@%)Orbq+yHUGsE|We;9Wf4?!G1Ge(*ereRngu9{5H zqD!$9ZH24|=eW8T1}cXt-OBySz^pS_|59bEbJZp4O7&v(+96mn&uPwUzSMjJO{dig zEl{nf`J}ALS<^5vZCUoLg;_hYc4zH1A2zT1_Ay!dOeD7o0?zP-8|HTNvb^ni&3P2V zAmjZfSEehAlrxn!<uc`VWwVkh<ynzg$}D5nB;>R{Yim|RR!f$kQm8Dd$*O6pMOfrF zL$kD~1So2YdYXE+T8LP;V4mz^AGtZy9D#K<+8l3Im{ZN^=FD{u%$N0voNKn57n&ED bmzZnJ%gigxE6r=n2Ug9O%?XB?Sn~e?VB|SA diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/t64.exe b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/t64.exe index 9da9b40de922fb203df6b9a1d0ad4139af536850..325b8057c08cf7113d4fd889991fa5638d443793 100644 GIT binary patch delta 32032 zcmeFad0Z4n*Dv1FFzmx1GRW?*DGDlz3kWEqgEk6^`-X}EagCx5ibgXe1BsK)JRz0| zYTUB#iHUg>%_6t~uEZq9#HeI%9FrJL6xaEEPj^G|yl>v$y?@-#=l*jiY0jxrr?yj7 zr%s)!ZYy?rRD9%7ouQGQ&Kz!iad6?vpJ({Tw?7ucv++M6eD~<f@keF&QvB00td9Q` z@YmRF@!tXdJVPJ9Pp0ow`8TSN$#DFFqWP33r~dM@Ij;CI4{lxedj;*Zt6WTH&X@P$ zxGNyNqmUa1vM^0|?*N|T+&QkSijC3?O6slQGAMedhI=U=uv)`K_$vI$U!Gg6;kaZs z(An>NFLw=B%Re0*7TDV|x7-4jUoYcN0G=z%hmZ1?22nDkNCmjc2J&6Kvk(eGD1pu5 z!~EskQ3~e=QU-er3@Do$kC4FL0mCRapqx@kZp-eod{52O$zB`WyL9#fi>Q4yMZ*ax zhXs3OT(TGI=^mA2<TxockmGV4=D1|RAruM@LyMn=lUzEE+lS0Km!bMMjyrMa#EA{= zk$g%D>*}7+{gFbXJKX6%)4fIMhKF{fi};N#aqrS|_b?PH-WJ5)Q_km#O}UO?clDn% z=ZH;GRcB-%HVRdBmPk=64rA}S_tki@Z{3a7?#K&ODIHHBFEu;mhg|#IRH+axH;E^z z>JaA#7X8M0srkOkC5Yfb6Zn)fQUKaSVQFcEg5(xmH90=H3$`kkNiW>ZPN@^Wlv=w| z;goD`oaPhs4Kn^RM?7g3k4jo=2*&{z#AfLyIs0=|$*!nM3l$SY5tCj=_C>Co#Cw9< zoz}-8st?hd?&5LjIlWRaN<~NBsY3Ox{#EcYwP3S&C`bHK5Kl=pU5Nh_Wm=BX&_FBN zH^vujM@z+X`e*8;EBDY!6fcOMnQCcBNM~P@%cq2i_fZ8Y55kcfB4%He1(H(dmVHG? zY(s1BMJcV#6_27&G);;{p(qIj>6C(~l*dqtNxwIH4p)+{4FtVRn<xJ0ETQZiHL?iL zpe3I}gwmV;Ez3`X+q3-8=kImzi(JPz3KCs;=@*uV>Gx`DeKf4vqf0`nR31%Zs30yY zM?9P(&PdJ?R{(uT$PvFc%{I+4%`_FvJamGc@`&)>fGH_BdW{^<u6snM_Q&k3s?%3i zLKFvRoI2O#I<!$#u47`{e8G_$H&wcWDheV6MFhoj(#1fQ>)E}}7fwoC7M6n1XpSp{ z3_kP5*nNQd0raCzt(>KtN~-o@FL?Il9ZvRzXHWkoBo)tr$<JkY+sSlZ@%|puG+b4a z#Vh5E%h1osvb;ijycJh*6aDqbl?rdFfwl&^LY{b4rSeR?m2xJf$z@1zvbA2Z{PoN1 zd9MiTHZRoShubdKBY9W3WRgpEi)mjq+O{9LAe-YZ!vd%bRCfbd#YSl_m{ZPBTT9K3 zVe_RIXJPW)aJgI!=D5=NNaHeWk`qj-6TrTahO)y@rX*g2#QXjPv&*o8m?k+UH4@iS zg{xHInnYZG$XsKTteS~wl)|(~VM+#*;Ltq};(qFjhU~XUkGTwqm)X<a8P=*+S>D+$ z!+BtWtvD_jt;j1i?IU$+7VA>ZnC-c72?G8Y_mN6;8CHWc7d)u&4NncGYC}qs6miSt z63x_~6XT{rAa<?K6b+colr8$W&}Dd9MW@=1p=IF|>Q22+!y+FwB7(zk7g?eiCdx0D z=!YbZW13Z+ji!mF$%5^QQ4lYK?^o~%;w_h<7y2Dq$n*&-^@+5emG(h*VHAXd=Y8UF zjP`OFu9AdA(iLSVgsSrv-yFA#f_Ss_G<BcDuo;Y|T2kJEcv>p*Zl5jBgI-mK7Lqz! z-$t@r6{6qkUxHE9$t0eYa#5J9jFGXMsviE0L`Wh{BYxXaUgh^A{z#>Ak|16zF{gF2 zgzESD5?!m%eFWPf9ucu+KdTW}umnKCd6k&$u@p&K$#_RrMX2|q)Ihm{P-LVgtUqFO zm)M5Jl=UdFrN=00!~rfefa-LVqT92wHSknF<g*(brdlXRx&3o76HunH#Ac0(v4sAW z(Fc^#Gjc}jkP)g#f{3REWGnTwhEncMJxKb!jZ(5g^`n~W_qvrpN<RwrnJ5K{MGzOq z34(Z35G`@}g6*&oTBTt2Awk44IMqwAmj(#-@{mdB_DQD6rYVBswNwlWOx>NHR8Q^L zp6ye&+JnZW^(T(A9*99BO43p-TF2P?PEY;b(}JzeUBCBJnj2d%rc-X19VK2g5WI68 zz2;+iamMC4=JU?ZCh>Es^Exb^XuW@q?FdgAm!n#L4<UJtA|cUwL>l4&lva|Kc|8Zi zwb_Ea21r0hIDLVA3rw(GhGZS3Solj!_WqNPYS*V}xO^!Vg2Te#Fzok``%Ya&lA#;~ zhmCR<#QQ8k*VPIxEKm-^Q(&aNy$+R-wSJHQ#h9OXCs#a#3h!&ZN7M8))JlS!)^3h7 zx(pxv2wiweUW^~1Vv5=*gZGg^iQTd`U%<+mUy=r~TQHi@S)WTwFtyQYZvlg%ByCWc zRARLajg>1}O66nxTacW3R+iFGg&#fJfS&b~dlus^T28S<O)>26DRlpkf~G#k!D+#6 ziOZ5MVO-1UOQ_CP8rLmU@4L`)x#HTmEb&yHERj`|KO(+I#E0^zb?DBc`SQa3xm2yC zwv?^KvXdzwCx@dDV%<;jkuqREs05ecH2Tmnp>v5TZDL$hS<f7(YolZOuhI+{YGM&} z(hwxVSjoef6lEDMLy;x+K|;|X!SM`bi_9u8tnwW0W|!e{WSHxCnFuL$IkqNOj(89D zlp~)nx%O_3=n`&zn(Huu+9ZeXvZwufS@)wf6!oE2E(+yj>7O}@X@2oKnr&YkmzoRd zB<DJGpMnG34nqSX%eAkKOUShs#~GzG9~v~Lr5q@n6VQ+e(nEKF{nM|cMBlX%GXx3J zb)R;F8PO?7sILzwxmoXWmGRdP3H40@ttUfSo_>ICO)M4+4yFSqdB(D*^znt&u#iZN zVwC066w*lpWgZBXa~^==>RlEi#dah^s}!9Ixk_(=TwR?~=j;m0GMfY{n^-etTd)mF zhBV!S1qJl=--{{yA(d(mk%q^x2?23@RSa7lkYpvzkPodOc;(u&LUQebal+&&%93y^ zrB0}7(mzv&S{bC?$U?38htV8oYjinB6T=yq!E#A(VC+jR=%#8CoExepp34<a!Fr+v z3|i^5beo7U*S<u}_E6o0k)UQ{Y(Zr~Tg{#|1Ex1tlP1fc2~>$(v0e~$1%ovl*&A<t zD-Xgh(u=47WactV2L(!KbTs=iAgb%>-ijzu-|#eZT$aTk6%r>Vp|3PKngw<0&QHr= znVq7B+=Go`2^K3FA(JZY0jRw)FDYX%nC))RgYWdfz)2hKWeFF=;Q``$nN7Ee*u16e zzHD=+g}S-M4uM?j$^rtH1^$K|66QY5K%;)|Iq6acV}Y}yMx-Lysyx(U4O&(fCe1~c ztD?LX#R57r>w|-6pf;5mrJsjk^@AnHN$awJxd=Oid?5w_Ok^2A65B2k<tmMrq%`f% znP`kZb@!i0jdFC(H>Kssq(R<1N%J~TEDg>;=`^^c@g2mJIu;XT&@YSBaEG#Fv5VRt zX0j1M(boEzu+r{g<0#m*&*aIIsVvTs??u)~<VzHJNr^01BRy-wP-ExHlqL*RH{>YT zgEWHeV6tGV_Yz#kp$$dtXfQi}qJiNs+=<X|*$%-iM{LY>7=FikkR#R!^=(?)w_S7C zPeJLK={X!%ky$uU!*Ld^baI3&i!-^%+IBdhbq*BV38mJ8_!aE3smM@B|7!UV!J+-8 zE1PBL+5gHY^vnK5A$l5PGoWQgqdLF@SNs>>V7u3z{A=5-pmm*X^#RiUQS2i_Xvm8c zi}C0%G=)>H$CeL3u6bSAFNUO^Gf*$IDjB&HfREDZ^!t~G=#fF|5Q=n+qR64&u;IbO z2V9Oq4k<?jdp1m~8+jt^m1|UE^m?##B>b=WISzR1QnXe|$zvY|_X&F;7aFjr1${kK zT272TA!1DAl5pnUC6n(S&W3l%=6B|@C%UBZFNd)sT@oX=j+O^PAjXRzo~5Dj3ii0- zIj5gAGmPm%;xzMEzYviZz1V?}99<zcvg(NZC4?D5hx2v0Y<lRU{121a#n5FbIg?=! zibr!{R?N>4&tl4jPoe^pEs3qoWm$w?OO#qy!g!{cUYW}t4IAk>7S)ipbYab5Wr3fe z7o;fcI@E&R3}#cpcj%TwEvXcjfw@KW8CEh8(nOX$ZEg20@j?-7@MA*#EiKlbhlXo7 z+jm`sMQ5hwxCJ$$yF=qJmO23m03wK33$&AmGfPD8%%g^mj3)rKopM#(DC<d;+l3;< zb9v$kRX4%7J}n(bsqMO{s0+IkVTujtjx}*_lYXze;Or)wq`6qmvS@xMo5gyUVdNKV zcx1BfPi#@uCOjI%Dk6`D#r#Ei7iK7wW$Se6(ZS<cVN@tTZx(wfDuz!qu^mx){Myd! z*Qjy)^D~*LYd`+MnXIg9PrgSMd%o*r-7;8vYS#BVF?VAMYZ%uzbQev!Dpy&EbOV-^ z8gB#2KAyTV`jO#^{GtUhyV;d@uH1cnh%-QNOlroaq8O2E;<RUV`(ZoeI!0(9x;``6 z&1pSiD?zcP#^|e_B&D+wA$v$`1>p;UgYWwSHhg+oW`e?3)?bPOX0{uhJ*(<g_kfk! z2R&96DS3k2z+t`MoW9a+7!zs@9|y|zO*gXNCj|M{(5A|EI28(?B^%?C)_3sH(z4Lj z@>|L;c2kJS)~12ZjUqedo*UMV<I?stR$)x>pO!~vT7x_qpP9z$jk%ep{X5Dk07@D4 z!p_9Uc1L5;+V1dW7o^JHU9M^vH8d$sOTVHLSbKrFUt^=9yIYTCqMHOUvIt!dCyu_- zhE`yAQ<fs6MGgVcQYPJ50Fqss9}V@lcRo#n)!x!i^9U)Mg0$T!Es7m2RNb(QpxwpW zh=dU$h`zM6rAXT-oZhf96qFKM#_G&6y)seVq~A)|_2_XiFQsF3OgRtBr0RyU0(X`w zF@xI|@c<nwiRrWOQVDfpp@!@oD9#hoJ5ET|?m-JfkkkVS)FmJas2Ji5%M*{}h@X?; z4y8gB4#qgLxaHcr@52s&lXxtF;vuI&uouJ`vs0QdLumIdonfw+XcqFKF<>jqkM>$5 z0B46MCh5sl^`YG12Vspgu&%Lv`xOqPMjsd3q%$NiblebV0F>-fWTv&9b~!r>;^+ht zijS0VnLQd?U|9GoI*7OjK$Vcpj!VD&%C5)825-S0*W#XX-kB;CX`2whkbLYION#4j z?E-5Fjft^k8lh%+tP{QZVD*fkCLKy@Uq`)rAh@`DJL{iVo`{tvRt#BA+SI{%F#*A* zrRW5y2+`<bpM6lhoD>Zfm*My^w8*aQ55q)ycL`^uz$RoTz4@TZ>_yCfCQ_ygRgyQ6 zz5_{CB~l@3gSFZd5;+NCuBaUniv`}U9SR_b1|xu|3&vk5^CuDo?L#mUDM2x1yb6Du zUBouqT5DVu>=)3dfpkJzioeyQ_zPfm*$yYqKBSnii_ji~6Ixed*kf6P1Hub?zxAjW zMENx850?S8Ow_KW)d~epl~?JBa2t_ZLHeSO+D(?K4ci^gViK@RO6Azs@nO~sq^CF- zZE!N=!gB$=ij){VHY9V#2x4WNT$WwyMcF&lpsfx&Q|&X9T-5$5=c1iWt2p?pscv>{ zA*c=7fgpOjb9D{cWMG)6lt4$$QxrRMJ_6oqpwwj;*aZEh-3dlT-2P}hZ8C#FM{8#v zr+(BRV9Q(sCa%m!;rnR`MT9hxBMN0<s<;ei8d-C<eFIm*0OKrCNKN*dv<aN(K>z(p zO-{NDlR*|oyPIl>k|TbR<91qllpW~a!{cfLI?T5FSGaols2;oTCOoGJSPhz~)=h54 zXOh*zUhUa;a+XZg%EaRm|0Ddkb!Ftk@X_!8Ezmgzwn#6Tg$5W+j!QpuumpgmlQR)4 zF*1v%%JR_<taswj0NV<xufFOlcqm=kKZDu2#ED(s>`RKqg`JNvJ#+kUEO?G9l&t6` z#2J&qt|lf7TAM5<tlTB%XK3w5eG_yIT5}BsX090dB`7(f`}G@HAvTL2T)T+6^b;GK z6g*e#N!4tG?-==Qr<8WTHk$C0$adPlQ%};Ke25Ati-M8+kes>SEy?FwIqOA~wJq)r z=Zr%JGnLpjO6xKlI7rhbQs~EyCH3Pg`mp;+p?qr?3+)x_mZ8<Kp}j&vbLOLmi{nbs z=eco~lryH<jcA4R{s30mD?Bt?&f#S^_7IPKGgK%BymX_Mz0@m}-&4yj_8Ktw<$4-9 zVjIj8V3d#GGE@LXl>@K}``PZeFpZB(#q+4G4%!0=DJP^!7g%2J0bM57VJLM`h7!#_ zn1sGa4F6j&d$+e|r%|Esfrww_kgmIi30Th>de3*8<ECN#`iz{n1NJm&b8i@)E3k8R zy2JSrK=~Y!x24v<#ZPIE50c|mTmCC)#@~2SI(T3~4nqX2?r?VvYw6RC@yXrz<$GAq z<k42Be@x1?aRH`@%4YB@DJWCc-XF`OUSC;5V%*e1i+EkKkbbVr9h$$h^q4{^B+4pz z9=Nn`bkcA)hlgNpa%Dxij`)6EFbs;?a%>rQxw1sr1uA)T!;G=J-=(&O2yE-z2i->} zKuS7TxsCNPZf7_R`=vh7YB7r~nC%6!Iy3{LxnGKBs+9SS%O$PF;Le7d*q&WW%XX5q z2n5ysXu}phJLSANb>?dpUqQ^?Y1@fu&fO#PEXs$*Qg#Jx(~f8B_a>OcYi2u!M7DU_ zB%aB&Puf+IUG<AqU-_Lk_43TxeN&MZPF*a$X2-O~99tcqYtOF0dcFPtSm5V6QesLx zRFYo5$^exScUqz8KgsoKv3;v|!nb)$C^{$<VXH)@td<9d{*xfS{<b>`UMwEN-nudz z^W4>>uZ%^OfY!=WD&Zj&mZ&gHg>4=(V^t5e4*QBx&GdTxQ(ma3zA_H_#@1%iR~|)g z*xH8cE8)%olC7`&Kt@LDE8mon3Hr+QNNQ`Fs;^vvw6?bCmM&IX+Z=u6vob@Gz7j*1 zYi$Nsj_8qNI|?5H`5hs`bU}o_5k^qI1HDL0`u#unt_aZ|;ILjyMIvr<;=xbb%wk(B zjU%)EKq?7)YO5U~m{1FJq+*Usb4tk(eI{3Ib3SOgc{th2iXr5d?fX5JA2WPDQMFKI zR=pWxNi96P@_T*dBPaodqw55{IWkht?0S{!;cn*CU`w=qk3WpzYH2Q31Z0ldx(TkM zCRYRgn$ultzs4>nORlyVPOMUW)k>&i(~ZMf`pRr8<~B9POK<Z7T7oH3C?B5y5n2-| zma3`N5@SFXOM9bb@swb&7NeS$LF;Abes;Ba{lvHhxkVK;L2wFCO{xfoI(f~~aF`Qv z6<7Nsk5UaR4QPN8dv>$nnAvQSOKzP288l?CC#jZXyE*qk3vnjfi7c5#{JqMhw~t5v znGRrNz+8vqTAz{obFTi$E=VgUNUwkR_`U^Tz)-1lLteJFCHl&G5cWY#GIcv(maVPS zGRxMsLSMNSM#R1{HSR1imMej(Lx^#h-gRc9589hf9kByvZ0z!puGaNnI1m7Ci1LcQ zmewf<#bT1JFw@60rLrr3SiREZ$brYaZk%J>nGzT)g(SKMoJFR65U|WKMrHQ>(V3GY z4(S*L($|Yr!Raf1#?~R*G2KWa*TT=}<YVoF6lr4cK7si_!ZPiH0^pSF$X&%gIJz#t z*JS(QrYZfNNk8@;28D%XHNcy}KK&rYdKw}kQ6?S|T!%1@v#Wm8SKft!sj90@Mot#5 zR5yL469jlYFt*mawO&M^tW$|Oy|v7wcHQ$(PNwu*rUNfh;rg++C`(hi>*4N?G|WT^ zLeXids4Jj7dl#mazOobv;Q11J-892{Yj(PsMN9(^ESw*E(?Y4v0+aredqU9-B&%~7 zVryC7#<X4)U~n0}+l$;V`Z2p)&2rDn6$-;T7K~gb$V?yaWi^e_)?*MN%>|4=nyH^d zkeCJ<i@p{HCdNKa6I*{!h`gn9t+AcxDF2qGgVP&T6lAg)*%ajBaH2dr@uoTPCbID> z3zT=$Vk7n>blj0K3^fzWYSle<Zd^biV&Kt)PBBU=5X;6k1zT0_`zG=S!^=0d40_r1 z@Ek$BV-jJsWLGOwC^xQHol@{Ox=u+a=gT7^w@9k0dpMpf({lY@VuJ5KOIn5Oa@_9G zT)Sc2FW1!BLk930paNNNvC`f@&q|g364Fzubpq*-h@@0?qdW%fa<@dtocl-|YGucR zOBE!!Q1VS5QY7lCbZR!nJ8j86`wV72dKDScdZTc)CL)#=6I)x3CEv!%k98RkDwFnK z@om)ugRk97UX03cz*2_;mi<@!T5o9NaUIb53lJ#G))sd7ShV#Mgi1`d)ReMdfO5W{ zoW>n#{?NBRkH}hDE|4RZiVYMbm7oM|6eL}s1T7S#>F*G}S19v9ahQxsfZQFxy!4g% zz(~0-hRm&T<+8ldo|EmGK{`FOLprVJp&3xIdp*l~n3VY}w@g-?G&e|@k>!uQBL!Rk zpbGdMAo)1f3e7U@JZL8IHkD~^JtbF|GE6dw9_-@rPQ3~+k?ijhlkJ52&2t#W1+6;~ z#EzxF;a4H&Wky~^x3P#5$=2!M^m0xlerFE;YMjGK%AU>)1bF8_)W&R|w4O>Vr9TDq zN5~exqxuj&)fq?~i$S-Tj10wUkXuHk1{oIms=hd`GN(?GO_RKxw&xpBrF(Rg=kRHU zk|K+8<Y{){MC2$k{9vZ;r?cafCde@vqX2Wed^gza+DD*fX$|h-GRy)c!(&AlX7(ab z<%=(-eORSre|qX}_DOO-e(g1OExCK>9Ulz`*Df3pF2mUOkrOPjUbf8VI<bCzXUB^1 zG|<lrj&%Vh$7l}->5$nz(=XRP)4k;Sn}Q>A)D8AQ-#L-9(IW!gx~VEfF>s-XIqtNc z;new?_gKGv`I;@v)-Qu^TFC1A_2OTc573#pr5O1K=d<{f7x*uZv!;{*eD7Nu`u9z{ za}%0$7}*iwS6#l)P!4VFb;>pu7kJVev3VbNk`$u$3>XFb<F#lRLWgg%h5biFl_Sm; zKi8AS%bojW@eSI^z!8G1^512r`bR}2br7O3BAlPw?%XZIg%|8%8?*tQ%x6Fv-}iTx zJ)kFl@&;Qrz}#o)JIY?@Cn^c;GK!~NhG8o4h)f(4LE8)s%8}C84+Hk`RxR5xFw(!! zNXz@g2k|Fz@t^G2KpfyUvZn^E=d~p)G&Rn8;5Xc$ae0?@62yN|OLEdrPz#&TLi=_i zTy?qKc#Oscudlida6|hc#I4a+{er+JI{bmN>CX__{SSAyYhT1VE)MsyYq!gYpIy6| zkT2mQb$`R<!nicrZfn;WgrZ~mz2?zg0XS8y6w0Tu!>LBiNcLlDbPpJZt_CO0^|cX@ z`&E}0RZbTGT&di{ja0cv*EQBBEjVcJuSEaGLgxe0ctlBt%WPWO0K+6Q6w8A4VfAuJ z-k9SJ+M%KB#k6UB|Lg32+E)JAYi#S_63uPqmfllXb(H#4OdtuaSV%o8h5)BsW3Vj^ z=WEiWzj6Es6$Op};?DdVz3)+0mL6l(DI~IT4^%)xLgH!Z))Bb?(o7V*tOS!Ohl>hM zHYx_&{NDjE&cbZ_=wOnHUN9FVQ6vWGJz{n|H1!j*tiaxj*waQ{wFC;HUiNC)JW*e* zWIw_pGeY>KN7$f@D1Ph_HYLNv7hh*vGLrcRuCfyuefimcFjq!D<L+0KBL_@zQP&N< zYrEyL=v;>Dpxbmof3R_xG5qUwtSqw+KlL(uC36^Wxy~+U4i4~o8QrfNjIN=r>g|_V z{~^(X^zv+Jb{Wnh7QF(s5R7YMSJ4G7!`DQ!;Vy^W&FLw{9m4&Z^E90Izs#N-GSPbY zAW3PoKn>AKdz8>rCG?sSny-YmD4|j%^rR9hS3(cVAyF2+oY8{{VW%RpFF!#Y<e5AP z^aJu3J}pJU?agKn4eMl+sM_jm_mCnEvL}Wn`Mv@<Tf!jl%&V+^Xe?h>&wd)3=r#N$ za{1)_&O)<{T@$G<M6~=GY%4T73Y$WVN<SKch8r)ksae^)SkHE3jpPGQGAS!fvy{b{ z(jxo5PU^u8iAaail@~oIvK#297g)I|p4UImc9;frA9#txb6lD%kI>`N2o-)UdjXDP z3vQgz`u03_&s1SuLZegs$`XQ&U=eU>`$)M`UUbcAJP~vgPzY}Syg)$L03bb#nnDLp zNBDX+!pEopN91!dOXgMpCl3AGsG7X187?9(A^p<wUj$pGClyCG_;mO6$h&WlQktH; zxlJY<ox2P_=~=*VQ=ehCNPkq4vTD%wrU1Idygoh_Xw`Xr)#ubpqOKU(xC~2nvWnrs zUPI(S{!X@i_>iOk^dk&X$xQc1?4B;1qwB8t6ZefBaty+s%i*7PFfVi88Mf!C6}n;o zX)jr`4nu`PT1KRoK&sBJ`oW@?jeIG<59?0V4`tmNw2^H}^^}O4zO>4pmJY$H26P$1 zwzG|9D?jZWX2{Ow!@gw&+1(-^g$auj1bt<h28O~IH)$CN5US3Wd8&o&$PTgocmw%- z6oeHPYH<@50YN-Tw>K^{V>&dEzbptzaj)f9VwCTyl-LKfqAqaoxeWf>K*imUpOI-# z<j@T{D0dFGA_H`d;V&<`Vlt+F1LrGUO8kr*<OWxrU~9`*Ig%&`K|vnp-(jOiL{2>c z#r97bw46r<V5`QeBRlg?N})1qnF&`JREnoU!6Y$6fX`GLV5BYeN$IB&RyU%{Ks#mf zU#n*MJ5j0a93rWA;zGAF-N-EdbUqt9GBnT~Ef>Tm<Fa@_7$<SHY|Y4|@L4Dr61Q~O z2a$5pSfw73w#>IKvigxRq2FTRQjeMG_*2w1gMeB39J@PmIG??hnS|~E#a^hamO~ed zC+)f+UaVZ`6CO`Q+d*7z8+B8OgyL9!MAZG^$qowX{Fk20B?JfUz>+RF)>C2(NVhNS z28So>kz)=VNfEXNuk_E>u6KHfk%K&$H77CP%obV9*+S887?(kwY=2H~{*TS<a!xFN zz=P>WrG{SEL`{{@Q7%g!%{M{T2T@yC;ix`*-e&gns1&cOxJu$Oys?>`7&Wxl5!fRs zzrg%~fe0;eN-Eq(+Tt7;RS7t8i4);2F5ZmxWp262Qb(7C#UjPZ7#he_n%S{Kg8a<s zk-j%s!{}+f;^tBn<n(>$$fjqSb{hsHQk-%brpPI_t0}5(u)N&DUW@*Fihzz3$7-mI z_7rV@vafUV3bnx+Zrxm==sK4BujuNGE#oR@$rnVwt2a#W>;_1qaq2=s@NhN2w2iaf z30m71%x@7hxD>Z+6!J#D)+of^rEVX{p4`W^?D4#50Uvdy*a-o2bk`^aE@oHrmU?G` zh|rqFZ2p++Kr3dP-cf<!lUwvXvLEZt4vY!*`E0LzGNkR}&Mu7^rgOrU86^gN2NisG zFY7&aIzRC{_QY7Dz8|f54!=ghu6G&YH?n<Wr;I-FEegQyFj1)b#S(6N$V+1}U^ci6 zMH{I)C#4ndp?_?#X!D@}!F8(ab7<NfNQ4EjjTSOrTe_yq!(~X>$Y$hs5139-ZcS)( zBmB94Y+xJmWBA!lwkJQH*L=$^<VW*M&TJUhW6<smvcBm;E5*`XuzvcTA%7&a8m`Sb zbvgEedz{llnusXu$#rqa*JbFoffbHR;`3YBzsB|E>z-zxj2qk~PKzS3O*cpjKkaa7 z-fU+2@egQx+4Awv>WZOp)Fa)h1<Wv^hp{&(@{#W2G#8Bs*+ZNUOI?6qtY<7>vnHet zK6_ebDoc@00HgEV9ysqxIO>UcOvkx<K<zlreO(R&Q+5MSv*rmq_+D?aWfNmJOv>R8 zJjF&&N@uI9{n%rZ$^-Ay^c@`{6kWYh|7ckk>64QzU~(A$!bz4i*_71%T@twLI=1*r zp-M=08~b9qM8nyPsUjPaZ!c$?C->!_En+7o$Mwp9y1?BKx*{ShYMLsz`5kn6qA{O; z4SU$+Wwdm`cKj4>2=h|*G8Q={fe$a*J#tDPO?dmYCUhr+C>-w);upu*z3GGawWVy} zjAH)&O}2GL4FAMARySk2^){?;9FL?*p8{#%ywUL1BQW<;T(}yw{v}}P+WVMRdfO)8 zxRV24CGM4dzux7_sds5@KXn}~K6%Ty1QTS@jj2zV`+B{G)o3wzAGw3|-MX@iRD+yR zK&e~+Nw#%1qRryZSb*7uf+=B@S7`!WCEXD&-Go`K9_sovu`x5dM4w(vtA@@GMdpg< zMvIklak=(_<SZq?9-En>S;F?uG!8&Lp2$s3GYxLEjKi)fZ@o`Dzsc=u!F4d(QP^f) zZw5!jJxy6}HNAdTaBpgX9DfU(#4@<OVeq$ngA;Ykg}!uTV00O}+S!CzaXQW87*1~A zA(J-a5>`2D11iyFc2q|Cdbx9bT!wv*$yiC+x^FCVu%z{P%;i$pHQWtzvX@{9t5`SH z$I{ijJ{HAMHp^!B=(h!y5Ydt1vnm`4>z$rJ8-aok{<z%q2POTZevq#d;+IQ5F}pBi z9FFgQ(C_gbrvIc;Q;HA-+F81BgqaItQMLJn5nbd~9$Ihin`XQ1R@UhWH?`E~3XS~g z1?=O(ZvF06Qs=ZFt-)pRs*)AgBiIPoj=NfqX)qoRi8EVZn?6|Sa*a|7H_X|Vgq%rG z@V%WQNttS~Y{Fr8gjZ#@9XTOSf0rSml3kn=>{DrLmj$~&#~6Y1sFwE=*<UGDl7ne# z>jpoNG~^WQ&iy{f6r%7z+LE+M=PbcazjoTSM?q8XDMLVo`bUpo7vPLhvv}!&ehFKi zM>Fo+H|fpa;1a+zm!Xr57IQ6z-AL>Xl;JNG9Aft$2#uKoN^26#HJtl|Nht_n5c~bb zw)r4SD>8Mv2(v!rJPw=w4Md$lu=%-^^E82ALvZPt-~GmR78&^Oe`9-#dWNh5Blz=W zewq)?DX{Y{EMT{aqWed6<RYcWd=NWgM`J{w%3%`4BQf2*RI6KA&&Cud=nl#hS#gc6 zXSU+hVe3F?zscYY9O*OY_$N?E|7^KVz7={_^2$K@$8*Jl*fr6mS-R74L`c80!cWSq zXZMP`x>ucfI2Siqg4oqMug+Odv?@0_vYt5UIxeM6!#>8hKAx?8H;RAf5KEdjJT`I% zR)K=L_G7A@@NVFkYBUJyG1a*PY{R@BV<uT~9lYvgw4c_FLFg;kasj7c%cVD166kJJ zY;I8<CVmnaaedBYYk^k0rOjJZH#N_#vWaw#(+4#Ni?@||&+kE_`^K=;3QLl-@jyp$ z(Fa)0{8f=Hfw)@r-;Sj)RW|x?o!N!?mwon~R;uz|KlbT@Y<~G^7P4@VW*^I6*uXD1 z#kTC86DWJ&rN-W}Q<VJGZ!z5mLv|lq6s6(kf5#U6J0hHX)kjaz&_C)0H}ze`O<n6U z^nHj$KbV#9IO1G})-@>0F{uJgcQu<T?#?S~_fXH4<D1qnpFI)o=TQ>txo1^$HN6-h zX7B899^(SSQSu$blOuH>4=&S^FwG=(VNXAP&0N-LZ=(C_<3K&Iq^J8sge=(`t$Be} z@9n~W_bf|T+Ldow#d4M=@#042RU5-LFWtjGoyf+O2D|@zEZFbYc6WaQ$nIxxm0Q<; zPlq(?|IJ=s7O(3$6ZMCsR2CrJ{5LzlEUZ`fDvZ!JBH`VMf5CC;<WuS}+Pq=+xc-fa zcmiz|phnTj-?NzIWAZ9MwHY~SDxf-Xp2BFTYARomo+*CEaVr)H;-y^ix19QWxEwd2 z8YJ8IxEyzW>S)pZR~$5kc-g+ySh6uccCn9_N2gwcC8o{_PbG6@_C%q!82#bW?_i)J zoBm>)43xd|VFY`nF4(;>4o$AB>#5-%*u$>WkBmL}GzPQZ?)Jf4s|@Bl?{~;!{rhb4 zK@0!Hzu1X`vHaYB?f(5>uEuZgL1iet8xL9L93I9mILI7_qugId{_Neu3*G555sN%> zm=CXK-yd1;J}w#sFFjhJ;lKEt{oW84(dU3Ngk;a~e;ju_^Pqg(aSckwWq`Mh#?k+D z+;K!^>hmBScerPu4E9muMfbr`K(;r%z-QQZ4?DJ*=Qmfv?;54cd99;aXJ2JrC;PCW zE24D2z1l%t`zl+wB9?#XG4|AoMZVjyrH4lEF6F$+w3dR<4^ic^a<V?cq)V@KBwG9m zTVr{FHy&V7WpTmp&qalc+AuK6aV<^0*CpLJ$)=V?2R?)-&<E2Ege<6|z2li!HqiT! z4XW#i^Xv)s>!>h~WSpK_YT4zoAt7<HYqSj>;V?9lwk;naGOCtkt&9)(`2;46-IJ*0 zva0)nP9QpMV(V6hg&o<ebkC2E!lbar??DhZ#2U1>K4b5#ObIS{5qm;R<HC<&1u}(a z;>YavN+bX1To$paPuwFgu9W-9mQK0i5&2$vqx2l)innyoE|;PET()#oFa5buAkmIu z8ti<xPuZKRjMmpdlkeh?UtudTq#v?GVGvgnPWN#J70Hr?0bu}!Tdh1fuB5{`sHb=d z`I8<r(OhsCSTiN^JC=uOG3=;0)C+zMiyt)VbhLtQ2x1!2O{rvZ*x*$eI~!FVGRB7@ zF%iCdf%-|d`W{i{-rV#^1F~E(69-O$cwMN!kGU5g29oV(%stmsT!i@pb1zk%dxKEW z;8AQ>d06j9JF>!Y0mysM8nFSB&W;EwYfcA)^y~91;N2kRw>pfURlyQgkK>))*+U53 zoWp8X$MS<e+Wpb$V;XBFR5JM&&tf*CZx6t2pMoXod|5c%Y4`w<8JTbn;VRg1X;csd z;V5v^JmFSZSN5}Gq-MVt7=)rm$=X24X{5}p1qEXQB8gmv+$z%L4IunxNL`Vh=_u7$ zdJ9FrDtDg3v2ij}T4f-u_-i1oiDAaIArTSCg330p1CuS@4?%QycleqnpTbZIVT;!W z2Y<3n8BXtGm`Q)Gl{sJD%QmgG@^i+r&~@ke$}#N5I&si2sD5hgd8ikhI22%1x4%4u z5q4BN1``Z({x0cWF>(O71$*Wz|7NuhPvJj4#5!4%{hg4B$#p!X&TNl7{{b6o?aznK zWL4Hloz4`1oel>r<7hwl{Xyom_DrY&6>}K3MNUaYt7)D6Lms~Kh#Qyb5rM%xt$<}# zBzcFg1JRKgw2r+|(JjzR*>5-^udkKKSJr~uhks}rJ6{nwym%W{fR!|ragnKE2IYn^ z=rVi&)GVIMNjdK_1pE!(NG#w^ZI66t21|b=G3wW?(1SNW%e6;d0j9hJCJ*Ejws>6w zt9;~He)&`u@#wSsuw3@RqvjCHCNk7)zAkALuy*_BYz+&*+f_YxvAed3vGa$*k?KZk zRkRdP)6W0Xvose%AY<wLvus7>06u9dd!uqf@Rw6ivOT*+UhoDBMfASbbBEaUs_>+X zlQ75|r71`%1>%pG+WMS3)f2Ng5Xm6h(Mp1ea)PKDIRPfVKez=kuf=893KE9&>(YnM zFjf`sSD20r<U%eVWQVKf=sho?irI}+-%x4rGpzq(-FtisPX#jXQes>8d01J1P&BDg z^p(O#kUi_N92R}a7kWl|<Kphh$9y!M_YXpz==&4x5w8zo&p#f@Kib4TdVB~!p0T@+ z8~K0>ENXoOf9)HVvA%nk9|y{5^p)3Po3*EF9>~_LPvp~kuvgdb=8uhI1@<IEE>;3r zI0JA<2WtZo`Tc3O%^uDBq_R5uAU>y>-Ll8|_rYq~-XkAYv&1JJ3=P5{r|m9nbw&4s z6~$g&_eH(*Z7O^32`jz3nP9thA1@6BR<|oE=P+0Zz0-99wDXE?^7SIaMBsv0|Mz_} zGSh!z_VTi|WZ{*N&z@>eE)_n-mVDHee|H0W>Z7Q%y|jopdR1e%Ld|{kks`kV81~Xu zI$nbrnm~c4<i+N+P;^eHzoV5ZPO`L*BM0*ON?g7{(Wj-!GPlbjC(ZeL(v1I1+KZAV zB)X~m@GqT~ZeX>dqHqWBwA4yV7j(Q@)Ym82lOOly-yh6A`Z$Xh>e&5{Ct3HIs8l*P zfM?QuAtfk?E2Y;X%c1A9@KWR~T#BVzZtb(wx&sG9KBS*SUzZ_t3=q5uL{4?r=`!cv zRtQ|26?OJ%Xxci#)}#^AbaA_||KC`L9dP?=mTb!>)A%Ec*q@)2^Mi8P(gT@%{*&yT z13CQLquH$k@%)yB%vdMz`2<qg_$_-_vIw7pgH066`CU2el$gt}`H)3C8N%;c$Ob-{ z!gtPLi=K?<Eu+|mCr9u}quAF^X7CU1XCaOVe%Asv(9xgYlEW4|hV*{|%OJc3j?q^i zNxN`C)|g{Us29inMHm7Coh|2i2`ML~gkpBlF@(32u&94+;=4V;YX6nWKRJS3``5^b z8MtC4B(`AXfH1LizSrP(0-6<9{hpr3COy@Y*Y0PJJQY5;C7Wim#emNY6yhakdUc;v zqX{z(E6d+2Golz}V@b+`+xVoE`~b>MVtjQVzqXhKSI^)5SoK6V>)=A_v(W)kUj+$~ z5){NJbybiADOf?0C7q1;N?L`GCEcAZrxv8^3X(5fR*<REw+d1yol}tc(rE=rm5wS% zsr2z|tDL@E`amX$6{PfDQ*byhIBBbbV-#>wwSvP3%Sn|AzLW5B1%Hk3#R`sr!byb+ zj+K^^CMtLx;evuU5^l{<0?iaiR`7F#$0>LV;UNkx5$>npR|)47yp8ZbW>HH-I@ggT z1&14*lfF@KIw+Ew6+DFS!wPQ2X#pqIDuD#T-%{{o!gnfoD&ZRyJd5!43N8@7R>AWL zFIDhT!iyEWobag%UO{-Sg0Ck$YnD~6bu|V0D-oLrPf+mrghwd&YlH_V_%6cT6}*=4 zwwZE1b%bA0@J7O06ug=6&lP+p;f)G@PGd#jBPGy65xW#zBK#!<ze@Nf1#ct#Nd>3d zYf^=R)16DnqTuvB6=}YLhY((%;6}pp6+D4(GjJ>VjBa&EsY*mD;YkWk2QHFP!3DyD z6g;1BF9n}U_}v1z^@W6ARq*+Q|DfQdgr8IJa>7q2cm?n&Qk@c5PZ1v|cs1d#DflMB zw<`Ef!mAbhHNq<ud>7&63SLY2;sW+YP2{+2OdFgikC0<V<J3}@u2A|blmJlrDwG(7 zG8L403MD|Hz*DT7sZj3EU?H1~)+^W?BiAoxP@_j15&xY+Iiye$Ksl#S-cu;JLZUmR zP<AMkR8S5p6o*2|0%gBKc}SrMpzKm83lz$HP(HCLq$vui6r>LnO145N2jvZgk|I;Y z3d~Osw<*LZg-F^3#A=1;tq{>pBH9$<AJfr7I$g%AQ|haWIG|tyH|zat*8XKc#I9u% zHYf74*0PGt-|minZmY)X`wYoueKTc;B<`+Hot!kJ4?=XGTz*Lep7c*$_!$xhgKXC> zCaY48r58qYb17)ol_+4I0_FqA@9m1ZNknnQ-$<id479%D)nZFr$Q1dD11w|PP-`@- zXw}j6PC48jSuiUuJcm@G?kqs-lc*d>Un``~0GxR1JpP~({*c0prAkH(kv-yGw9{FR z`6r0gJE>vfvD_l<potupHqG7A8Ann9@*AZs?t&Xm3EAy!-9pStW|;u(mv;+sB%aDr zw@3JI+>GrAPT0Kg;)?JJdtm!`I-3_irO$PU+Hn(D{r0#npg2F19)!JyOY3pq_5}Ak zFR{DZ=cV@80&ze{GE_Q9-f1~Tg-DS*sN#*-*}4pNvN`SAHLu8E(JSn^9RrQ7jmkk# z3aBnaCRp&sK4cRvhkFfTzwhYArzNq_oiY56+gaw$FrjHX^2E}cA=JCv1V;qkBY<`N zDp{v^E9qtI59o}rMh+vMzTNSJ61hT-yhz(;Y3N3_V`rK*Yr>?-N^g;InU7uh{&HkQ zH@eJmf=QHe#cR1@t27k`Bd&v`MY2tZ#*6$_Uy$$}x)rc*T?QSsP3|ClC8N%97)BvN zuGzSDyLpY=Yp83(0GT6X9KA^;o<g;UAXj`chB)QMC=4z66sGuYa=cf2{Nqae&W?D0 zIbI?%rJn}`XCp|crRqXhF{sfrh23SSYv&tkCd{rqSA+fyK;usnf?F2aqd@SkNdo}h z+_OB8n|{dx7Xb1Ok`wFRrWo<6%iunMjej9WFv*;!E&W}Fcls-3_P{L-`3M(RGelj2 z%Fx+maHtG)+@<h{x?9hZ1XlEC-@g#UyT&rl7yJ5eUGdlSPFTT4yeRM^R<P<9NBiGe z{#Q)Za@P7{Ilp2#D|#t9W_}e~5G`s80qwf!6zVLcgI%43jmy|8FC|!mCtxlv#OwTJ zo>ISj;01#1cFU^as3>~^H$Bl+klT4&<Hk$<cn3X~Zmujyk=qP;mMu-w#y$-Nj1hBO z>WjS_WSl72?zF7@m?&LAkzQCvrTQVKc9#n-!={odo89Y>RE1c1US#x@cSyYu#>J#B zq{ZzKu}VbHGPdUB0et*2_TJ0Ee0?|8^m0%B>6h4_FAwlr2p<YrtD^R`6>Q)uy?Q)d z>T*S=N(%wh2H#wYy##bdy$AIuKKk)J4r|`n#-4m7FXU!t6n$L!{1Vk)UwI6R79z8^ zvEN_moOt&MdC23^UjdY8ojlyN;n*0ZO$!<&`nBYUetP}JL%2QJm-Tryluzo*M!lNM z@88PSzS__I;qT!`egD;N?(T$q^J=8~_um1z_i848yAR8FEj&`uf_``pUj2ekA?1kA zI^fGFeVFC7LA?Dr_V#Phyhk6_{Ms!3*;tnRdNjWxmQ8p)f#0`<t$RH_^jZ>HaIW<V zYK|RGZmUyi!b4lwhp#`zU*F8)-;D8D+KX&;ePtgP8~0`yv%j&0?<B|Yu*7rTJjF*s z!Mv3ic~u_Me)t9hu6k3o1bEWC%!x-`hO$I9=dI4Xu_s&k)_}qu*kP%T<sm@v-f-<L zT0}nWLA7x<QA31ioDIsE;7xM*?#%U`N;P)h#fAV^#qnzAOOSp9a%e3_dMY*_DkZUo z`Mw?2^W6nhNCf0A9R_MMa;=%*N~DXm*iJZ%VdyPv_x5J9v2UBk6e~PZhMY`x;bHw$ z-RDa&yZ(#&+yc`7<UZdRz%IUROg%flqnS>isDa`?=R2X9<UjvHp-ON6#W)x!{O7?e z_nk27k`8(+#trJCUylii6R>U1pg!y1&c_#&RF~olAgT3HU5bi3pAN$g>T$=Ds5i_Z zxXO?SQaVVoKH3kNV?x3?TGw5cKD0abZ3d~+{6y*+>GHhxR;b?p0J-*^rB6E|q*?Q1 z2jtGWFgC2D3;)z+HlyS;zh?p4`tI=9_-st(V(cJsQC+ZS?^K-r6F^X>Sj$|-@9HsT zX8hkzu!g>b9(5Q(EOdf(^gm9p`h)R*ae~z;o~_>X_Y<tGb32ObF_-PzwaT~)uW|l$ zf>n<zAayxKb%I;t2>qT)@8$@W|6ZHVt}+tSap|>lZ2bG#{DLyJ`TarsE<GE!(7=9v z|2@9Yf|IOy?gu`1xpr$mDAw?^*W=vE*yZEpbl}kNpJ!GH(ejzq!-#VkT8zm0AJ42N z%kgzaI<w->e9jW~tnT_at=eMt&VM|yifUtZV9njmF7D~eFY~1XD}LuqHe_!Szx*a! zyf>ELyp27*H<<q_gT21j$RCJg$M>f5%Ln7M>g%wTpF&gJkTo!_oR=zB?w>dzZTXYk zt?j`t{d0HseO?+}C`@U!$6lGiGWN&m0&wR@c2sqeF3eyL?(dp-7xN7(e=0fGuu?r3 zPJ>H6wE7OVsS`I~jw^>&*VnVX`}3^DpkiJ~8Gx$ee1kIUR;Fhzg>}1P$$uPLSwQ-y zSx1hZjnG}ybd;rx@<4p85VzY9L2n6Qsc?3YzJM*PDxS+gZ3#xyUp^7$s419~5z-lo zK$YJ3i+9j;_U4Dt`n7aABz~p5f%w}a?EHsA*z>CmhOgm4o0qPefm<^xW(!5P-~gNV zI2(I1ivOvAEj`(-`znY5+UZZ&3K5<-{|=Vkp4L&3gFs0&G`d*fyAdq89<Lpo^!4BU z7;;-66yYN!y$(FaZk~+i2NkfWQwheSq_{-$JyhM#jgHfUr8`qQvJ05TN>0TkTn2?& zaj+xCnTKljP1m(Sg{<fy6nzR+>HZjd|5S`&r^;O3!7OQ~ver|5`7Ko};&h~``(rRO zZEI8fxH4Q3%uNYU-^5)J!f`jxg`bJN?J}qM$?rr-cW{vb7OsA@==T$NLjzofD_z){ z(+S2vlq`q`>5Cllb(}Ngqr}9^4Zt0$__w;SgQpXtL`q(V*Vd1=?tvA7xi_Z@h2xyU zp?$NG`8MZ;coD(&5GoT5`?S3-le@6e=CHwMZBR5-zmyeI6kfd##yy|mP<nn!_z=QQ zyykvNW*T}FQGz`McOb6W*x}|O{QEZM{;AQJ{4j<L1~PnDb(R%E5nX~`Yr~YbyEogk z(`?NA={)}5kFvKvo!2#M(qG2ZiHTU74mpbjaZ(mu$!n1=rLdGU{Q`HahYq}q$OaCp z7`}ruk*zwD?DsY(j@dELvgs4q?la5ss}1sm>1<n<!0D?}uqcuf?RgkB+gygvU~bsj z*66FkW%><VJ%FHb^#BGIojAPsI12d{mYMu*1!THr0<(NpnB@+6ktJDVrmq9y6-2z! zgp&wZm!~C(#C_gkL^C7M-UPFV+Y3j<FX+2<%IzH9z=A%Xt^0O7RUXpgq$T57#pe-x zMl##}c~rorMRWv#m)<ru2<ZbhEMg}<PcRg&p(3%hP&Qo-!CN}Af_Z<@Z%8QE=_?iL zRR!&>g=n$Ml0$+vOYgo-a@V@gMKO!sqLHwAwY23;LM)}yg>h{47Xvc8t(KXdLP|=7 z%~OIP4ycxcH_D94g<p-rv_@sxt}rcH$edsF<I~?@UC$=-+J$V~*&cjgAGY?aIeg;+ zIw10*7Cu9Xk@Z8)<;eM=1?>FUaeV*PtoN6ZS>9kXr@=F9DWgwlY<S(}`UIK>y4=qL z@1<Q{rL-KWCsF)z#OJ}&3lFp-$V2iS&crWc@^@hQ#VGfO@{yh!+deF)Lyq*K@$16( z5s|nSfjoO}@lfjv_h|iW|G)-)l*}ki9)qnXu|3_3-TN|;mwK_D=Qf8puF=gd<@#y9 z*)b9ye#PKeb&Z`r7n&mYqmc*bRUpR{>>uzMFucDr0Ytj6iW>Emh9Fn<##jnakz@G} zhqA1%;%CHOfKI`2h@Y@YNW|UJ^ACqR&r7%RKuy(_`AVmOHE?t%U%;6qZbhS5h7dkL z1)_Yg+en|IdKDq(q}4NVrXq-6fN8_;tnRCj?8TtpJST+XBW;)9Mi(%X%i^Vyj$EaO zfXX(1AUMh04uhf6ACmz&!9+w(1Ab?bUq|t)*Rag5Pg+Oic8u9CN4s3LG<uyrqIUXH zB-fBbUiM=sZ?7+o3!$wny}xc2zom}|iC;^pFyh4t@}<OskK(wf4cRCQh;4L;vDW2+ z3nCS2>c+t=i%$Alj*X#GT2fp$*n{8nPnz8eb?&&hI8F%_();doadFy*vbIA{9G8B) z%1(aMk3ab>(_Dz_wW1u8{U0ta`r?kzf4R80_BS^ELJ~jbTUK!)&bp5VW2?tvj1A>G z9<F9(F#GJ2XD^4<{)RShCSD7&1S#L1*bl9fWpW6-<Qj>$bg>k2(&j~U%!&gex`7ai zkFj)!@^}ulsF9pX35S$Vt~EY@P1+eE2t8c!*S+PJQs^CAFXWJlO`%jkg91lSe$qbV z4$9dAcJ6$4-r>tM--N|k;sWSoC0Y5zl_gh7ei7eGRi>4off|uq-P!322IH;_SzuZL zXW=3AwNxYCGi#p~(g=aMUre@E$J33Xru`@>%M7h%bQzxUL8*4_uQR9~hXvJpCF(B7 z1iUokN7chkWy6Qi0CLk@@d;>%E54`vx4+V;j!*h!;foYkjPxZ`F)vGsN+LfDwx;+Z zZx(y=qK<b@V(+zd_CzdB6B5~P7Y)X|WT`pvZ+w&kT6}q*CkcF9871f@h-mJoqL;Ml zmH)y@g~=Bkq>wVEyA&FE&+<<kv|*OMvk(X0U}3W^Me(m%*u$4%c$<ZNd@0hmZ6$R; z&@st3k^OinnoqS*?fUBCCeu{yhRSrbq`i&Q#I)c3DovIG6I${^{#c<5Qye&AgHfj( zIQmHSY*$NU@*+4O{##y>z4#xfBqj^H*Am7@uVD9ELV1rBsCsB8Tngr-pWuIJ&u8zP zo|-_7byHt*F>u@~Dy&uEQ5CkR@R|xe`YHSoDoj=3L=~2(aIFeAsPG*X9#-Kw1zI^t zCA6t90N)R%UyKS*(AQt^+poemR9LOTwJP6w6>e1FTPi%P!m}!rRCrg0IB*E*uM{v) zg`-qBONHesd{Tv5RCrK@XH@u|3folZF+j;TNQFif_Eh0Of+(L8RKjEx7OQZH3LjSC zMiuT<;rl8)s=_lW{6U4+ROmiXsYrwh<5bumkjm$>RKj=_E>_`VD%`5V_f>d8h2N;~ znhO2YE(ud%PZg%BkiMiyzv(JmGDwpw6CPFx=Ts;Y@n@piS3^`-n4-Z)?&Bk?W4J8J z>fB@nRyQfIZ9Cx8(Vca5Hx)u@EFh9}@7DhLeNu&acy`8UxSfGYeOe}H<m@UY?jHQ( z5}ma$KnarxBmdMeG?)C_saN8m3gutE8ip|-|E~Hg@o-PbziKrcsD`uDFw9W-*P>JS zu?e7`l`B;Vp$dUBs$pzc<linoB>}cW@-Isblaq#iSACUuT>O%Muc_g#YPiKmi8rd@ zQZ*bchbQ66k(a`NwNw5Ts^QgYI7<z$QNu<xd{+&_J0eTerH0!)HM{Rz@8+Aq&EYI~ z7IQ{!F1H;2EyZKx7H~`OcQInhfE97`<=DZX#d9k-RnEI3U86Nv|CHY(FwWr~K%%)| zvmhfQ5-vxGiXd94oP81S75LAHtXIed%>&*Y881e-JEs&vILAHnq^6)IRio)tqtR%7 zj?DOPHJZ&8Ba7wS5UybFj~b1h?<Bha^8Tgb_2zB2ue;BF;L%4lHClJgiiruS^1qS5 zZHVN!f8*)jrqp^r@Kb*acM5ao!ukc58VZAwDUAkf^c~Xgsq54<8XunX>F1GLv(sH; z<PR^aIq0q#71+*Q+k@lC-xt<a6Q$JzhqTkndve^s#J@_esq@!#sVUNGIvLto=Jw{e zrFh0rK{ZX@n(*XKK^|OCu@@KA-;)dK7f{;HYUsytad>1_aBDnx&SN~EUz6*p39%9v zO{^X%99M^DzFPBBz`y@nSmWc)`Skao)|7VAyK{P!qVMP1QA(cy@ap1eFU4Et_vU=Y zdvx%)59GKP@z_uXL5&|@GuA_s*CCLYOpY5igyWuXt9i#m6BU1W8OOC~!H7>Vxx>Dq zpT?8tJT=eB;wF)6YI^u+qHDg>X@Wh7zuCR!vWF(D8{wr$+exm7r(E%Nkt`p_aZPwW zYOCq#sR^Tw8R4mkl7&Hep2Iw<gemZmDvVAfvVMF=HxHlAanta8*&hEl;j|0X2Ub-O zo@**k58(ND0&h_(P_NGJTxSTeb3eT-Aaue2ZWrdr1w8GEegocZ=3j|4f$m&je;pUN z!8c#-M^vgN!b=m^A;KMn9QXDdj_Y-+X1tdshMKn0OY=ZS{E~$n_Z=RH__CKKDyF@< zJr{G_5<KJ7>iv#A>f;DMa;v5%Mpw<ZKAND}iWu@c8N4`y1y8XjXTX>+^y}2zF4}oZ zIPPUUTh)9t;XYirC4dVr)^p+g{kiZBLCt}60r?ue2d6Lg#yIlg^y7U?NhCF|d28Zp ze)QJ#>8NhS!yNZM9_#IzcQ8e23U!)<j>xw~j=O{hk^l11^sIrh4oT?L$%E_E--qke zPgmz#>QmTWvDob#Hxkc&wPH1Y_-OKj+hZSp6)|{P5WD}?n(4lpuqb8Z_;5bU{~E$4 z--U|C!{4siqEklB9-XF}MpLuRR}(AGa9_>9?lkyleN<YhM=Sj~{rgHQ<w5JubN>B& zY7+f4p*6XF8n2Fu{Bo4ze#b*xzaOnx=%)$mXm3>`$8~PvxY4(3p7qm2^=_~H4bVP1 z#&HX7lN32FGzv1^;M?Ne>{+KRbuV<w=aVrGzVp+>SgB^la11}{!~VY1cP;wn{}ffz zmmGHok5$d@e-i0)=OBMP?II0L_T{?vFXp;xrgH&T{agH+ed@eQJqtbZIgZM0<hjKE z|AK$S2R!g71=rlyX?k`v{uK!?T;thvyJoPzCMuO0Pm1eEEB=8TJiFD(E(Wf1a@=QX zSYyBd?H|Ay-uExn75e4ZyymaT3=j0;0w?-lE&=xQuk$PQE!5ycKHg9=&+0WsSxm{e zb{U$)b76>)I}}DmXrd<<X6dg9>z|?t(<Ezr)uxqtqs#m}I6sU$zwt_=HC_F<u9i+* z*Wv((#h>fCJmhL{i#q&6e7KNeJQKZP#d>ie{eoIBElNWN>9}r|yZKzV;yYZo{<pbq znm@R|k|}bm#{NWBiNi2pd(%i!e#x?}rQym|gP*9Spl#w|S%v<;{lfp-FZ|!!FZ7WI z>Hq$Ip=NoY=2$?nX2jCv56-dV&0Vo#&b+x(Y6f@KeBDRn^5<CQkDvSC(v@>ZELc2u zLh0P1DO|Q@f@L}W&zZu(HYASWOCNk-!IHUqBZD-|J+PuwF>RIE0#mtkZ(x{askbJ3 z@1wCAe~ne}R2a-E%u->73R6|sUxmpkOi-awg&`{RQem5ilJ6B2wy5wM6`oU|mHS*J zG^_B03L8~;ScP>e{78khD*Ql&yHxm^3OA|nNflPAus#2^au_>GiyE<5g@r1dszO19 zsVYoXVUh}sD)dren^tK#J`5`_Y3-uNtN&S~xFi*}5TqULIhD|?!bTO=sc@GHcdD>j zh2<(NRAIgfQ&pI(!UPo>RT!ee06;6%$V(+~Dr|FCYI;?Lk_uZ?cus|lD%_<)Sp0Ge zt5rw^Q2rHaxKxFODiqu`!(@h3m0(n%mkRH?DH;5s!WI?Qs&FU48ly!MvB7PuwsI6g zPe+IU?;tg(gL_Ruf+j+Hb-bLcbHBnMrL?Od3M>s%U^V57X5{B75T0jwr&t`k_h5qN z9XD%Rpb{aeusx#(f)#qg4P}4-e+ps~#L*+y9Hs-;LQMdnyy*W7w5q=!)O^>f`TjG6 zmiy-jW&Bf3sd62^nr*!_i>w@0aTv+mPk65O!Ut#o$9Celf*$zSdH|p9jQ5hjvk`Dz z5XaF!E?5ER#M1))D}Vt87)PKJ9MT10;8}pVp&U0AIKjpUSOmZcp20)>S0f-aJvO!h zu}BQ~3!Wh0R{@7dA%pH3t{Cu7Jo%uv0S@R2T>(55a5Wy{Sqm6xRB$8Uz-Vk?@ehEc z0yg8J!U#SVqcjYzAP!z0q{YM|qy`T$Z34U_M`*aafW6|BOltvO>;?g&;Fkb@!}AhV z2r#TW$L#_h0XU%tu3rJ42zUoiGw{2Bq1acShzAegs6=!@9HavHz7>C{H6H-3NKz_c z0qlwHN&r(5U?m<iaDv;>%ZC%-g#!E~8JlL%uLAn_RSF0IJcws4cn$*&!S=fXJXwHu z@KCqj1-y?9_Abz^Tz`&Rgonhq81N@NR}morj=`?_F7SN72JD)RXh9?3*LX-;-vD-l zF>D5%jN<sg$cJ!1Z6+!Q+zW8_5Hte#e8Am9Iqogs9{~P16y;M!5&~1QFb>dXQvp}v zA#v3Kw%{Qif}Bag368)+l`aIV#B&b()qt<zX#u_q@I5>~P(Fb5cqHJ50iPKL%N_Wp zVOEZ#e_`b=0?mMj;pKDh1}4C;F-l<(fP3(eRB8duV-@id>^~0O1b%|Q;t_yf1*{m4 z!q6Cke@^7M#h|wVo}J8b>w&`ui54S&D_WL3mE&HYru5BQfNs;3mbnA2##4&~>jB@! zLz379I2Zf*bD+-$yn=@+M=+p3NlS3}O#FkKkdPVh2p*DFBjC1K@ZJL7326Nae<<P` zz-ub*Hyb(t57F}hZFnXEuLS%U4;5AixU^8gO95STIIa*psSj}6EIgF<Nx;@(oM-^Q z0@!D+(tuPz`r-|EtlVNCL*{eb2Vlwq`~*)k@H)WZ3l!W8*rek0rKO2*BS_#Ocpndk z(atS`wD8afUkliXhblqv1Gp0sz*7fkeh>o_IKkr&T9v;10fA#nP&-7N01Q~Fw2a^l zJT&Fn0JD~%LMVXXEIc$w3jq(|p~7gh_AMUjrXK)J%Q@~Fq^$rvh36{pX26*iIO>3x zT7gV2LxIS&0PvlaN(*)Ye)j(rb#5V1L}47Cl}amveW|tx>Y>PPAS7ia;bT$M7D5t1 zJw=ew8a~){4GJVzvIsOQ+s;_{QdwErg|f0MWrAJSh+3qJ-MnQt3+ngZGotYK_dnl! zb7tnund9)ygF9Ibi9dy1yI6Yh-Ea&itpfz$ailAb!XA`|?}O2OXeMzur^M!D!m$x1 zCpiJQ9EEk(LCguHovY!(Qailau(HBF5l3OuVV+%b#N0}I5(RMH5h{4Zvta&FJ3<BU zH_}4%7d9NTBPwpMqJQ#tRPp>@prMnfgWr*^jFP<LKW@DkkJ^qzzd<}&;v3hll5zOF znpKr}3_dz(*MD)yDSKcs%kyFvS~{Kezlh??O_a;dO)&2)f5GwM!dhl8J{xX9y5CXw z59vgmbNKU2Fmjv=3^mfz5rlW@7zyGnaP&pH9t7Z8&xc?je2E=M3c&UT$LYhrh1)JW z&V(5Z5qx=tdyjtwS6^lJ<3q6eIx`tB9=yeg;}1dSw*8wTj%c#+RJaLgq>Ets9WIjm za5g6SE|Y@BB=`&!<74m-is1jkarf*MFNM1vFb|0H1;uGa>hvjm{gByCJPv=N#I1zR zV-_MNqqr9-zZkA+=KiaZ+f1^=kcu0h*cqD#dp+L=b6#-2=}4R%qa%C<9NEUz;8WpB zq&{=tN2GP)3%vT)J`va8ig)%l<-FrjnfsoZ&CZ#y1Zn38ocF;_x-9q}b(7Np*L2tu z6@4GMI^Ke2!i>)hD_;Edg%=FGnEKVu8}U5S`cVfr_b}>Vl0ulzNUu{)FP(owcyWCn zlMBBAmZQW7!Kpteyb;fU1xRu6wC8K#@ShxvoK(0QX(8GJZ=e`)zIQp{c7ADReK#17 z%19FR?V*O5EH3lnqN?=D5milBj(Eb0iz;3#M^vF&UQ}&aUL5s1;lqil^eo$;bZpDO zN>Vv^s~6u5Rn}EpRM}5nZ1ueAr%C-bE-pbjIZ@?F<%_ED$%_^J(`=)HhRS%_AU1nm zrMyn-`5^eUK4rcy-xoGage2Z9hH&%w>8|FLhY=iT2Tu8%nv}tI;~9o)=wLgoj_p$h z+bwOh{e$hoPvq10K!5R0+8k$~owTT{zJGx)IXICQ`T!eROw7bhhjH8#H|XZNj-UHR zynJ%Rmo_ZL1dNkizAWNvT^!`KlMNwLXo^g+DKix&YHCcaX)ulEzEMXVrrZ2Vbme#X iA3|=Lo9<@1ixM5Lb2qplx6mze>+?%}`;!i&C;tabtURCq delta 28445 zcmeIbc~lfvw?155(9F<G4b9wOqX?)74k%(kF)0)kXH--aoKaC5W1K)660sy99u39} z8i#12Q4>uZVhlths6>q#=K+T|DQ%4;PB<j}dv;Z!dGCGi`hDM8-&)^4mzCqQ&p!Li z(?0v0>J;yDC|=`G(M8knZgTgcFUC|nzh(FLwk9{H_qc5m{C|EtXj`p}1Gjx7W6rjE zVEx=F+wK5w+1a*zCG#^?`8*YKW$ZsOcLJr!{g=PY9vqkVrUSQq^66m+3X+2xxCqXb zcj34z@NQLN%xlZMH7%Sup5yE}&T%R0rs)_PqT#wwaD|3jp9w71aP^4x;;%u&<!d<3 z^FGG~vuk{;wt?f;@b88PvIdP)hmn^d=m|&JAklyR896Se^^CmH=FuFtvI9a8lS_oV z&RX+fZR3!tFMNT(fBdCzTvh7=C6-(e6o_+!Kx?)Du~1B|KYX44Tdb7UGsfmm%jLKf zJA|Xya(R~SIL1e7mf8C|p>%?%-PnvV`>3J~eK}6LspGhe@-$PNP%h*M<@&1UVCMRB z+#X8Yrmwiladijk>V&dvlao+pHhB&aEO(5ewnc&lX+0IhKN2oxh=()EyFJ(KKanmT zmNs}HNwHQatulv*T3bBH9ZOBpr0deor`xL2V<4Q?={f7H4b*tDL0V%*5;8^JH@iW! zymwrWgqs;<W8)-WSB^^+>q@H-W`u}td9`%nZ<|dJA%imW2`8oX=xXvy?*u9YtJeB8 zpAT+^EH%%hNfpng?zTsD(#4;o7Jf1c;t5G&Hm#w^x&-lZZRs(833ktJEkdQgMQ_@R zN2TxFl@wFU_oFSJl_KAURGsj=M?#f&R&rFcND~jFi)T?oX^)=dk5STdP)d{><<5Hs zCTbv_)2*nMe*X)lFYjG1h^JC_Q^!k1`AYs2G3IX+L7Ik<7c;+sDit&T&cb_C@|W4Z zi)vEMnrd{GDwTqqmFz<QExTf5*QY!?#@0LO@{B;;E^QEk$E9|%GrRPTxeDgYKL=Xv zdW$#Y7Q7csLm}Igk}e)h7l$XNi!(va71G5UsiRUyrjAI>9?>)s1`VcR5SM^9`J;SD zYoTnODMxzG8Fejh<=UO?atu#wyu@*(Rl1TARP%cpOxF1s<=RD0NoJsFf>54e%95_i zy+&RUUh$mN$BTJ7wFr1$j$9a+Kx4&}gNE#gh|)<t%XISPIcGM`sV(2{5_{h%+M^V* z;<?hQLN}X!;wAR8Q)JI0=^C!|u-Pf$q)q?mBFF8f`~}Owr~@>PJ*l{cK|*=HsaRNe zhb9+h#mF#>zJ!B9S&srie3Ec7;jm4A`yzYQxf%cUMK;+v*wOI`iqQEPd)qmrC;_RW zRL>iccdg|9%w|hCNmVLHEAKWzn%@T#=#kB4t4T8zOhDc?{V+MgAdG}sX%!9Pa{W~$ z@`<N5+n$FIw&~j|LbW7Rs0dw9gf^4V16jxuLT!ceK_^HmQ<3^bk$MwSLb>}!F!xfG zYkF+qfHwWz3v9Scm!iHG<Wg5aIzy~M^iV`d6SJxP2b!`3@n~9Eh>M^>j5HEOr{vi5 zWh$9vIf60<HTkF;Gi~~2N}72}34VHFvmqAv2hL;Vc(HKgQ>(M>#cl>cyp6gKhB(HK zO&^Kg!-P-$jup6u6#XKN^`>D{7yy|-k}2=yWYbI3F5)>%q{2F(^rG1{-R`m={(b8> zb!)j^1HshYv_R54)HyZH1~2eSt1`qxv~u{OWJ37@gKn2@9L9!Ms`!i4h^0zi=#a1- zW>66li$o+uk^a>Pc}oqi%Da*LVaP*N62z<H(mJ&;2k3UW68}qu?;=<Z@CayD_?sHA zQVuXXjY}(QMuCEOT*}5^QD*C(k5OW|fP&aa>qF6WWIE2$HKMTPI7?E5BJ~%fPys4a ze+q6&%5sRO^1T!^q2x$~1Y1H7MWBjCQUSe`as*Ihs}3C9u3G6iHZ3gDZd5ScF1vB1 zRS|{%5=65pQz#pOro>Vpi1{W#5Nj}ZjI;oZI>0>N3k=4j!3Kj#Qy8R9ni5k1I5z$I zv+OPRup%2g=+*t`3aVkDb4Gcqn1@&cA~MP+@K$f}3>BcaQUH&1%a44jZdVTuMJT+P zQQkf_NyB|d3tz(Fbn#4@_@hn###t;~oszu~BAzI8g^)8y!E&1y%9omQu%D0XQDz!^ zNV_ds!$oCEXC61Ta-7o2>Joe%(^4q6DEq;4*2g2Ph?*Izd%518S{mK39x`c*TL@Ts zGGm@)hzG<odv5*3acSc5vV2pNP?lyg+Voe>$m2}jo}NBJJ#*zg9FNFo=Dc~CLis#X zCWb?vDbq~DPCO%Z!*+`%$3Q{HVFVRQ<19t5G*deIS0q9swK-C-dMHigtwcZ{(MWWp z2KsfJ<&|(H#!WTG11uj+G3q^<di7-tnyTo_;~8anrWDEHc~jF|qGD&z_9Rigw;~zp z{}j1J9-&3<PzDHDj*x_^bjx8IcKCGhukuWO+@ISp1%<~yXOyQBSN8wKdV97iia>85 zat)<Ui8rR|Rvbi{QY4beKocfrK$D2eD0im<pnb6)qI9HQpmFm}MrjRgC73HIn6y|# z>af{FNj})l$n)ZF+&EnrZ7+~ba(~yL;R?Nk>S|BVagVEQwnF~?0ipV^=dDyf=I+(r z{r51ehnydTrMY3OuUBM_eK2Kmj=D3G2C2LBxh!LVjP-kTPwT=6sq#fM)JR!kGEl%^ zBh<x;geq$oRGl8;K|#?0enwo>7YnI$NXt%pwe^TW{BJ4OGJ~`$l=*m@_$Q&PlXq-9 z=4QtMq)cUq)q?20CRW4IekGlf*BF0kbEsTAoBmU9uvQNUWidXXVW*Iwx)ug?N;9RH z_0j~A#ISOeMuxHpJ}vkWt=PLhq2c|)HJsT`EUrcNrCJ}TxrI(rN(_X{>@a!xV0OaC z*ZEE`_|8iR}q$$d)5i)MNn%<_Dvd*5$`F|bQVW5}r6bxyj`iv8w0Ds&0vS>fxw zk?kCz^l)LIGzHD6Ht^FRmZxV$U$(>yMVL{tCSajODLH9jTTnKlYLG7Ky%a*&jHv48 zPY|bp`XaWIHU`?U*@K$$31TB3ck5<pP7q~6{UA+yK?>p_tG9FtQqr&&%!Dd7%TKTS zQK#V!q{!_oY7;ZrD!=d|eWr$!xQ^5g+FP$s8LJ9S<(W>%t1P6kF9L6LkOS+~K*!yI z$k940;V=fl5Tq!S`Dz5q4{?H}+DWh-H9Mm@S3}tPE3`w%^o2~1a=|WLtj#Fb_ej=2 z`K%JE8?}~eVd<>1f0F+zopH`cULS=;&a9PwroxEs&0k?l{S6*lJdi`u`59e>a&4S9 z`^rC>Z%ATS`~w2Ku;bM!wGqS%mM6aReJs_U(*7joX$UY}J*I5A<@yjgg(EZCBZW`A zSgIj5dK5B5apMGA<Vwg<TzB2x83r8^xYdmU%X?Db++(cL(4Ft<#V#3w3_oE&Hx1vR zpl?k>fu(g_nJ%DBpm$fy-rRb$-Ye1^5{yPGVVqAju&jV&zKwxZ2K3<VyRs_*o%k>O zSwLV+h+DGUm)_`dK|Djf`*s)7->h!ZSbsJ;(4=|B-V79-YEr1U2NT&J2Ygwppf>Ik zFp1RR^-#|y26g96UD?*4Mf}d5EIxR;Z$M9MIpU!VtX~tbX<))w1-2_Vgddo~&IG^a zxC7ZpYkiq1q|mz~(aoWNs|oJ#VVgr%yU+1?!Ey9qLqgkh>mZ<|8g1riOXJ_>NFf(G z*Ab!mi58md@%9?day?9#d@?KD&bJn7*Azbt051@EAOsN$qxSRmtTwcD@-ZkiYF5KR zT8`OD9~DN+$^bGD&-E7TXxu0|*l{TVRlzh6&!Tm7=`0~EmCp-cv%<o7OAV_Gi*x@A z+qs(F5-)Z->`?pYmz-VLRV}r)Tf?ZQTwaA|t`l|Be0xi-6P6e(#86T-((wW8xG{n+ zN?`Ymz4;PP)-Ajr?>vC52#@D~>CX;_NAvUJ*~9R`?r&fdQnPc=v8;#$<{udn8~lQc z+^mx<_4g9)mZcg}EsYv1C!b@n7XPwCEsjd9Td|zT1dp$~(-vAI_f1(p_DN($axOM2 zHRT2zw<w`j(C=I<&opMO<r!aiS@ML|TY*y+4VL55?@(>AWP_@Bz@nSCDEb`dOEh*! z0#-37l)4fN#vpXW!UUmHhzFSFLTU0Ku*$S?=oYEPKpKB<I7@>E5K%Y;aUYWwgKT>V zrH{<7k`5VyKzSW^#rkDVkle|iR5L3;5M9Svy1bEGs8a^8oiwUHJJq~jM7uWF5E3q8 zFDredTTz9SeIx^_)HHe4yRlbWw3*Zw2ltHf9L-*AkysPzq|U!%&n<JHCCFF$4hx&Q zx_t#wG*|<Bi$A7|r%BbX!paFLKj<oEyNt3HdmNF3v<$1Lc)+TsgN?CA!eNXfI=e{Y z*wq%{EI%qPJ{z+Vy&yJ9Td5t;9}cOAQ!8CmnnG(iZnOFbVy`HwtBYj3%Z^25>&N_o zb|>lfICUsn+HZfbS6Vjn`}MfZX0}hbXiXGywfYmJPr08w&Stl4TjYaMql^)j<a{3u z$75w{*$nGFb_P!R-q^JAYq7^j6R=V%qN%Fr-w_bN;N_&{FGMdOs<dG^+EyBcl+bf7 zdk~S6LLg$(+rCGQ%CyC8(6iz&u82H0z2SLls47evsPqT+OvyZ0WkI*#q$^;_s=t(t z(qMOSM2j2-GegwQfhv(vrkx8AM13|ubnlOUrJrw6Q;_<H0Y(vWDB+p-$LcROTINNW zQn1B>KM1TkX)^w;pmi=AO0?x*+^7SJBJ<jCc04+``DGN;rXTe^b&9qYjX;`6mb`%s z#F>{)#L}MK%qyl%(PD_oYG(!=t};Z((Q1){jvl|8)M`O2F)c>DQ2&9HQn~82RKaSa zy++YRZD%<T?Pf3Xm6$3N`7H>n(Jlhh*`BMa(ar;fIYkkMYLsk8;C!eZq<RW$`bqn- z&TH>LP-(*3!$?)P>3f2Y!5~&q-?WFvl6(k4T%n61_XFf?`n>NrZZGA9QEk&Z>}SET zdpi68^@lTuBK96ok{Ocd6c70rD~LbY^lQNud)cS%rn04rzogq8mliPBRxKS&NKY!- z(YckIhMyF=Lumbh#&g3yIuhE+)i@;$&tdo5v>lu;Gqp1FsPyzVn+*ey4s9;Fz4yGW zeXwy4l|^XK(9l2oQp6J?Ue*|h^pQnq@{!2S-`JeES3F0JqVnoW&tuuMX;a<U;kbce z8zX5=u)*43OiKPZQN!uV?@+Yd$7s%}QLIbbsO;G$Ibz9nIX!)QC<KD&`zIE-85nFC zVn{P^(nb6Gk5UX4oYlk=2s(=(s}@?Ib)K!})*HxjPZr`!Z-r8!YGKbp2De+Vs<wV9 zOE5lA%}}TpH7zJ{w<i*RQ%-sp{OK0cGtO#){AWsNBPj_s{rz1u6+)J`V1DuOA@eZz z1k%&xGoktAa4vIi@5#o-H*2@}DI~~zQd$oyW2)FF{wbcfJh5R3h{QT$$ukvTXk?ho z*noB~r^0$fvwiVF0dL3|ZE!-j%EuHeikJe@h3%{%K9OIwokb_K?|l9n8en3hO<x0w z(h+R>ogh&&&r(~Vo8_4eGx(^KA}`$KR0biTPP$gl-cM-nzxHd4IDcitkr@bT&MSz_ zrdRkG;so&)+U7lIOsV4ebXw{D#H{>=`Lvs0_eCSe7PNb9{0G>lXo+@)KJ`7!KvsLf zQsqfW9gvSEbN>;(t|{DC4p(*0^U{!i$P~Shfx6Tc0nif1KjBzx`{rzL`v`v6RyL=7 zuXLPx&=z~ix}0zrwxl@PHEjCrJ5ic4ZM7S^6O*K+P5&M^T^wcxq9U8Yl`Xodd(a5- zk)mKH^XL}O7yr$=b!!oj1OX_!*uQN0w{|G?YGp~j=*<>&8`aE;G20{IqEJ5HGqt>z z1CHef(#l4-Wt5GuA9sJVP@cT%F|+SJI%L~JYGLZ{0%~jV4>8?dwFvp{V2iqEYM!!F z-MjE@M=-CnR=mq_fDapxX5@bv#tPHc@?$TufFAAn1y5OKkG7q9p+U^uXcG=<u(@KB zE7v+v4p{Eugrr)GqTZM2aH@@fpiuS}1~me;zE9ZR9<PR)5oU>$$B^aO--VGi+V?P? zXrZv_7jI{wuZD)^zhDf;RI{G8JbPZ~D_|it)@U=G*sxbS@eBWCD_)J}LmFB2t7&aE zZc`@auapy=eZ=E7{T!9~qs;6ROoJKAA}77i+P$`i|K5S!do9GH%19kLa0>nj+4qPA z3J3VrXW3O@F<+R)CZ?N;bbr92Y;(qOReYOjlAcsYH9SYEz{kXBu-QF&vlaxeE5*d( z9%)@4ahyh1dKaGkP%AhnDGv>_y>z!P)82qNL+tKUroAE)Ze`l@M4W{NI`CVYtqi5N zJk`$E3%N&hyV80&d5%jhD-kB7GvA&@O-I(gXLw7jPPQ8B6|`yo1Ipit3g?~!S}EL| zvsAc{z7JSYPd{JB2I5~b(~(K)gCHs35&NuXd;Nw$v__%t9$E$^XDDkm+IIrk{hmYl zk_W6$uMhcO@3T9-#zDy+kP$7+{Dt~djG`92kxe}+Vw=UFK5X{0bi#37>h}+cL5hSY zNq71Oztu17$Bc*~XN4so`1@kCkPvfRdU#sS0Ovi~`>W!`!IG0MD0CJDm1}qZ3BWiD zwCu?Nhq69D1T0Y`0qF}8E}xsV-+>Y?*Ul&LPOmkXy#-Mxt4$V1lvgX2o@P^e8~Dkm z*;~Cs`Cg~l#@?y?&Ijyv?>PReKUi>|w*0n-tZ$!qqjjTlSi>9_-DhE}(_-mv)5mNA z>7MtHebOg_KVQv$?9+zdWMz-~bmI>`V4eGR_RRbg-S55xT|*0E{6<#NH@xF0c(FOy z^d3kTy@GYY5BDPt=mMMGn|KyjQp@bDj*{^tj5!x+IRE)6yVQ4JQPt1XO1%WCh*tVa z@ntE#^@?wT;(Jf=6)3*Nif^{!n<M)~x#{JECMt|IN|QNOp$zg&?g#$&@)$lYh2S{E zwq*u-ow`NER%g3|WcZm~$c%M;gmyLuqQT4mU|#*2@%v7&4*g=B=58SU<D);>#D2!G zxzra_^Js8U?NC_aF)H0?2<lsJV4M2&;NLpI?)7_(_qfCo`*+e@WHb7A3K{)5Ee}cS z5gW0{;QR_WvQ84Zp4Ik`<VUS#_xg8i(b-DPb5weB1O*?JW~t}`wG@7=^2Xt}E)Qe9 z2NW0WCv`_WZ#F=Y*#TNw@QPe0C-Ep$7-F;^iCE?O)|79#yKNmt*8-Fjo69ixPbI_u zlrks}IfXDRxTgVD*g@>5n7r)-^c5!|>FSKTf+cwfWoHgU#6Fa^m#>nWj!prGQ(>jE z={tC_oPnur=;B4AE1|`zMmv=}=$5p_k+~pCFX~E9Q!k0`J2AUpar%Uv80hEpw(ME_ z3HyCu*I3~b5;!NNksZCv(FTj1dly-twGslqN%n_+!m<Xn9sccFD!%)N0BILG1C`#b zus$Q!dax>bl-@M!_K><IdE)2*Z(;Kq?Fg*#R6Hf%v3w19Tsk0|Eiq~*t!6g|74c=; z*x126_(|8<mch+KZlhUo^3;_SB4&9XJLx<aXjFGga_Dt-Z?K_=4Ew43eC=p$*4APZ z5X7HS#nVzXw%>%qq`dheDh$=FNl?Bq99P!9CE5qtrcFQiV{l>7{SApmBL&(<MEUxy z7zv<j^e<0eWXUv2T_o5m>3(iUYh<RkRSA~HF0)=E&RTGghU<3r!H|%wcIfZ_l)z1> zpLGzc!mcA90)LVgVhuzAQeiYzInxvlCW$@+a;dvLjilxMB=s4_JhS{elv5)AwP>^d z$y7=^mRRZ))98=bh^!P|H-N3q3h?fNnhWAmQwk4+Qnj;&9m$FfI)seT;%5InXi{{s zQjQQy^7E_AYiLA3_;aqw@+OGqF*-cKz%2cUWen}k&;N+MJG6!8PG=NW%b|<KpUT|d zc4oCh+XNL7)AB<WEK-k^Kr9?HeiYr)oSFBqB))|+>pRTP_a3%%p?on##(=c@rA&X> ziM=r_&AXBUEHzF^r{*oTI*1`}IkDrzVmw_wl$*JSkb4i~GS7**3~$ZHe!w~pZ^rAK z*r?%&0g<bysuDWNX6{Y%4VND{qYVClZ6Dr-FI~m14Nq`NailqNeH9DNex;QbV>RI} z^q*8q9af2BQg$t^7V=$H6i(_UI=g&`WX0qQjcjptVPGzX*Q^Lye)V9dm9Li2erZV| zy&9SSh@q{PW>FF3_-*K@AKgiN`LP`1m`(rA`xN72HO7yR*+(OCTAljuF*0Ao;QuW~ z;$s#)vUkoPe+@T(tdM&j+kH#aMzC~g;LMpg8#Fvh#WBNE>U#zn6&k_8R)gb<$@0v1 zURwx1cYrXYlrzHMl~h#s-xTH_Qp5rISaH^YogF#UbFm+V4)7$CPpvd(6iXR3&Dj}D z_^yv)yGQl#KCY)TQ89*3MlLR)wX3yEm*eMh{E%#V&=zS~o1AX$Nm>opQ}n%#rMK}A zTaYu1Uw?yL$T8|>(4JTBRx6b0Z2A!^nDgi%y@GEb1Dvm7gwng_Aeg>1W<6$uP5;3P zD$Y;Rv?}zEr5Wmcz*DdtD?E*r_TF_0>(xjbnX4s9Q|MsRPhP<`k8a`lIR)7rMx|?U zYKUFVu8)r3SKna{V<P!}*IBDE;rxs%EOSiDj#eg5O@G=VCVvn0r^6MfKeDYl;gQun zy)64L&gvj#A_#UBcQX>U=|?iQeM~IB<{G;)rZwNVjCtgC_Ak*QPn>G>($ve81lo*~ zo?T+2a>r;UvmbKbbDsc(H>3Pj4aV{4Y&JHpr7@mV;KT@B>02ahg0IYAT_6R4fRG`Z zZOu#UeBq)jRG1*00F_o|w6=srCF+nPCi35fA<|xOYw&6Y8;+~=h3t{@?tGCM$FAnT z+smrQHd_{D^xSvBW^1x+37}bARIIL;m~!MFa4}NDZj9~8ixsTRxFj}f+-&bjSSH0@ z1|hfMQT3uie`(D5W#a?+SI@KH@u{%~zN22Lg06qg>>+)v_;TPYbj6%18!I=htZlX& z%gV;L<v$+G_Ki2SihYJf=vo+=)`iK3vjjW0AFPh3@WG2vnp2&OX8(kf;xX7ed8s;= zxlf4Vn+@L4aY7pn3(RdDa1PoLHMBwc0pne5Xd3$Lmxx*RpFIcBw^@MwQX=x3z32WW zR7|zmkB?_MQ_E(V;K<Fsl$yO8d}W4P#YA@?b+p{wGM)`LbxM8!1rl<F?WEhFsO6*4 z@-1;u*hXpkQ((Vn`a`luZu$Wr+0P~;b8hR-CQWp4FMb*3ewr<u*pXkF!}d<h;}<<* z#z_&p$61y(>0{UUQ&943Wr>pWDRy^KfN2plOr2#V^jF-lWQdKpd>dbBv!z$tw3c7P zdWlP)7)P?`$wAJ?p&cUHrjxAC<VpQ^-=!%nU&#bXZ4jiIC(~q-Htt*X7_DM&A>pKK zRlbUC0BKjeNAb?}5=)RSS<|xPQWOt!$xqOXWL@)(_;*ykUj~9x?JfwmA9|GMG^Q<1 zi|dqH{FkP%byM6W8F8IcJJ*^+a5xND?9#+mB0ND<iql!GL3O?_FTTaj=9}D0SD>No zu0y^NRdbrM4EcR9CDgR>O?fc6z(Q3jQ{y_#zhoYbefEpxHk&Gbd5S;8A5Li*|M@Un z)XdLvLHH1an{`%4kQpG;7B>@Y2QreTNV-LCP~og-Mmev{sX2yJSe|d{zH#lQ+h3w7 zfDgO2kJS1&tDV{mDc_kI?2n2g{^GQ@oh(o73cZ%tp$=5kuxUoVY$!{e);#_V3)XwF z9&z<HJ(;$#cJ4Wbp>))C>j<=!qyyrJdZ@>LEVJ2qDw)4O?IWb$u^?=MONm@P51W4F zBBFGq@1eg?SzSpv2-Ly}(UdZ4xJPMmPJ%gUuM2|4bsBWeoI;NUmh?a$QmHf?3hKP1 z$N7JFJE`^EUl1E{1*auBE|OjHF0Q~S2|ue_6K~eL%x!uQN)|OeKKIBMsKT?qQ*~*V zV7ne_)0ZzKT>{Suq@p?+W3q{MxlCeoako;TELs|;6xxedtzV-J-bc`_STM2B1WJ?P z!;npT8TIF*?7Qiy$$sD@T!e|`?$M@HZ|z5V2^UKb>lS|mDk*u~?=&&q1)U-OmLWb) zul^g?JIUjJXGt@n4cibA@fXYSX+m0u!2b4R5}Q9GyxrgzsY;V&Idls`$4^E-9YDD# z9Fg0+6x(-%otzQn{uB>s)Yay#Bg}DTqQ}?ZG%cv9M_BKf{vlgpklWD=@dq5^D*mK} z>XG?JA?evnH>vOlTR1bUNPUzy&*YgpkS>y;W=V+{h-je8gXJDCsYH>bJ6m5Yu7&n! zF(2uI!jTu)*LPzeaW-xLlN|TBy8+fAt}r*FJo02c){5N5bW4}#Tw%-}GD1ls|Ai%^ z%>Fr4qe27g-eGFUI;f2ca}73JV_0%c*i~s~&Gp1TI2AFMpAvY>J@(2^$N5!N%=4Iu zbuouE`}#BNj?ZgswjMaDmGxLd*EO+{6%4geX4kL<=9Ya@m*N#m=?0XJ_SDr_g`wm= zKLRB;sm2^dS8~lVa;vb~7$AtN)>KP9c7i9`-jl1cdfSy8rcrBkK?_3Up~xPZThg3+ z)Ge{t94obZnO!xs3bP;URXCcj;@Q^10KT4QhYN4JY^YPRE{|j1&FaCAtYhv!2eQ!D zLo~^(%j-4#prg#=<Y=d^>6SxI(u4jxrp_Lx;q9-mtEYm49I(gFKv^j)Rk#1SkoyRA z-EGrrXEXOXDNYFpv+2KkosB$gaB`JBAHB{NoDR0%6^k?R$ERPn=Tbp<&JD0{gXcV~ z)!ab)A5*~THMb4_cOIKJH^%-;ymDci=SFKR?CLM3ByR-M`wePWvB!2&V0BPnr8%F- zZJkjzXuCMilt%Xl6X^ON^;>!fi`MzNgtea+%kNsv=ADUPrSrZC_^vOe$s;5XLl+GN zWKm2>T~a4~$EKZ)wqN$pX4|o7zJtd7G`4EB*G7NGRxF4dxD`79DbR(UQl%V7lbLSz z`s4i!4yJUG2c7!fOq8{e<eY5!88a}}`GhLu=Zx~MMm%dBovmSWMwee@9z`&jfNL>w zQscfClldJaU3z%-%%tSkuX5bX$#|HRA>K=`{tHvM{k*H}?V><UJN8*oc!y;#B1s!T zA{}M2u0?4vS%ai^6|OY#rQoA)mlYe>hfCYK`^ZdrLtgVOyRftwU-=QUEuHN8sciQz zY#~kgmKBy~2Lv6)>>+~)9z@uE`=V(o_OjdMYx&|@_V(K*zrbl|q1;APb~bJ@<*Pu+ z?+mMbJKX!mRGPQu&Sc_F!NmW5BzyLD2dAs6FspyGo?#)rK@PB3huo}Y!`|s?NFyZ! zChIcooEoHwnEMd;b~W4nPNcz+0?QnUJ6nEa^9zjy{S|iioxs5M)ymM;Amxm*$ma0k zPO(Ni@iGgqNboD(h0`ELaMdBq`fcyaGCL2kNfk!^!c?}jqK)YjC>Y9P4YOB<_@n#) zrdEmxpk!f2#POzRD!W?IN>`7iNIp;vg@!-#0t<TASo9@$@|6%-6xNeK(gAZQ*3Okg z)9_fGPYqFsU3xw!+;s3{vXbfs(K>w<>C;3{wcH59_^`V`k=%~-o_sN@+o7}oCs;Jh zZdfuFpcXXpFtX_C0?#!L8b{#`DBJt4p-*cH#Bg!|Dm%`th01Umn-o%d9>ZybAU+es z`+~e9L@oi#@*6JvZCSXFd7#^$k?3T(rZIO!M%@N8<FdfkpMKMn6z){P-jCXdH5go9 zAb|2JeIX!uf5Rp$Yt7H8WGk2T<CB^(>#{(;?_}o4n(@O9?ucbaG(}&WLxEsbe1wsY zer>Z4a}xW&#lj%e>efyKc1eaw2seyJrO!jr2<6#!nk9B6RfWHmzoyws>Y0#>>4Xbg zWt99#y^isK<_Nj=Efkw1`Pg?<%17+tq~!7fjk&du`@3@O7KGzc9>rA#(ae{F$R&hT ztS|&GMG};E4zwY`5;+fEbae|@m=2xAFfy=9EBySff2j;5Z{#g`DU#Z1=JZ|>KXEuK zd+#E@bQl{~DR%to7p1n(_9C_N<T{LK#SD{x<JZ|k+WoZ%x9JCtr%XdOKw2nEcKDt} ztsKH%KFSuZjPvM)-96QIG@%ObSd;d%?^m|tr;lR~R}S)u9tZJU33cx$pjviE+3@$H zqwbBBN0}=f+>0t`6L}yH-S5GtS49ZGK>l(p`{Mmr=ZmFa!X8)30#`Nnc9I{$<M6mc zVQ(RJ8{YaU>$NJR`{_@ypv|ILjJvcjIW>$wo8Ap%ns_cf;i64HA@3!92Q*11HU*x_ zW1CmSgpNcy>5orkl!au2Qtsd6L0o5voPUBnSoI!%eL7qE!FznqEN1#J&9HI{=|UD) zn=}<vlS=V)Gb{e^Hos&K%l~L#vw~hQpW4yV8!olm+$8S;s)jS5jV0UX?8Zm!`I6Br z@Z$k~ZAK&KvL5yFqTX4^rN@`9Kd~bp2gSaM<qvDh+|TK_i+{w#TOZg{IWdDL<;ZQ@ zONnr>jv|EGMp6V!`glm=*?GN9Z%<3;#lp{|tDiBu)sb!!UPA(MCd2AjyVaw0q7_B# zQA_0wkVbyS)~{~S@+YiqSRVYxS>~S(Ec6s|2i1zMQnUV~M%v5=MOU(bo|IZ!cRX0_ zqVf4A9%-WQ>zV>K#51o?0{E$?S?niW`B@uT{wGF$`7QSLC&B!zTWrfGE&Q+Hy#?av zO77#7*c7j>ExY|m41d7H{MYW_<A<}lwXyp3O*>wDo4y{$1zKf4W$x?3dG9n9x2_{U zWj&j?&g3y*!^<AIx}JToZc0E2205Ky>Fg@n|4^Lg<n*_wlWwK4@byK^@OcD3<3rZv z^Vxhw7W?M&jNloE(QGkL`RCz5C#*wTY8>mJq(G%hdy9o`PT=nkVFNZt@@YfaoXxND z&$HMMo4fF{YL{&Z=F{F}p<CMVld{;rEnV9UgVhKJyYgNQ3p-uDEH5)R7SY7(`;u`I z4Dk@U$w^4~Ny=Qv_HT`4e{S*Soh;1Zi<P{``)th@iG2PbcHxWHf(O8CBbSX<TCBid z*4U|Ki!5QuTci2EYS_fBL7l4x(!en5@f|}>8lJ`BV+fjMURc_(mHo3o7mATTR=3`; zJN_hP6(Rqwyyrsp_tpvQvZvFIaoYyk6&;#KBdM3C^qoQ&q@4<3lr}3wl(beM;-vRw z!c}@#VWdcl6+)1T6e3far4U)t6otr<@)Tl%G+ZGPrTz+0Af?YMl5?LerN}I?nC4GA zg<ec_ltNb!9jwqWo59|#&})dcSLjVdH_oL5MOc@ixGVH-qU#m9is;h{P3xglRI7MS zkY}GlpCfv^Le~?$UZEwTS1NP^(MuJ&k?3NDCd<2IR%qB!@lZ#hVcO@UY=x#vIw@13 zi(qc%q%_47MRcM<#}OT?(1}DF6*`4zUxgNkc2ej}qMy%^YhOTggF??H`ldn`6Mash z7ZY7KM{a+yf;?49z)GTbD)a=RH!1WcqCZsV?L=27^lqX{6uOG&*$Q1tbiP8LAUa2( z*AP8Wq0ecG;1Lv0Jq2`8Xo=`Jg>E3)q|l8-8x)$9Z^=!eorvZX+LP!9c-Db&U?5sj zXd}^=6grCN6QGMAMDGHmgGxXm(YqCzEcVhCg%*fjqtKZ|FIVU+q8BT44$<=zdIHe} z3SB^So<h$iI!mF8K@X8K6whJ`NKxntqT4C-N}{6_dJWOR3cZPFPleu2w7o*_Cc1Gp zTk=gvzh6Ga^_EsTIa{7&xb|{Cs&J|ljwd+P3TLar$pYuG!ue3);E?NnSK%y~%^JTk z7R^(9qm^(Y!e=O)z6vJ_oCyjiS>eDg={{27#3-CZa0V)z0ELqRPEUp7sBi>uQWVa^ z*X0^a0H;rp!n&cb3cyNJI6o_#+2C|mIN!@0c~JqgmBRd7VbbaXrcq(8P?)sLfa#+! zi-}1GaeTL>z8Z#woB8e5dBng#*Q6cnTEP-_$MAoyU^%<5vFV>}bZUBHzawtX7n-7r zI6eyEVp?IyLT+#S*@~CeUE!k#F!Gx=SUPd5iwoBT0qinu$>&X>^a+sreDapLFHoRJ zfnop(M#qdP#IZ&Gjjdn#WC&jinN5ZvsRL67rLydAUnzPBo059YTm;gZ0t>~%&k##= z{~d5^DGCSHp9;$gu+D@3g5p0#{(NcR8mfqF&2%CSdIlz=AXcoQiit-ua<v`Dz(6+C z-t2=54o~^XcD=n|_k)nc0{1mHWGjgc0(_5{9DVg>)BpY|%i0(0k@qRoT<pkBc#~(q z3z&WV-A7|X`?(CC3y9i&quI56CVz0Or=;YMWh;sa(vFZ`@FffWZhT_uCNu{cNk*l& zWDUN&C+t_y&7f4_v}Du2+XJO7)2@R04-j{<Z@=qcJdQ4+TZIAO+Vnjjk|EZ^pCtR+ z_h4T8oAZN`Sf~9F{MVb=i2Z@W`;;d3;Vwe8%}yu}#=|FQZQIi1WOw6XP`Zd4r`56_ z;q)EWG9{47fmi5ACh0z5-|z1fBAcR`G~)5^p@CfFMs>C6Tc)v)jX^~z1MtzH+@FJC ztHNEAcI}(Uik=XqnS@kP$`Jp|5N}BhACS!sk9TLv3U8yXl#T@$UrEptZTibp1i1@! zC5*bST<?tlx$NN-MjG233ZU9lc9$jUN7B<!@fg}G6sg+ujl?H`*A;<HBgs$U4s!Up zrtl$3_=FeXzi*)UyPLv0f`FYH5&wk%YR(9hC|nW8=d(?6x-_E7w7Vf?^+e^j5rJEM z?WZ8{&~76|hgoY(M$%O?4xT96&2CgE_i_pm8*KX1-B{<UboX#s^0>L3O~0TUtElpA z;iPhWa0^CpPzt&#!BYWxD(!xLJr%ioH+Hltf^U(}?p3w*NQA|#Y5E@&S?g+nzb>-b z)xA7ch%ZCJMfP*`Y+fg_UO$9KWa5I$8ZK%DV3~V2^7%+-m%Nylc}v*aKSYiDd@82r zB%$<Vp`&zYGU#l<^0fZ-?kLG*Xg;*tqKFp-UD`SMyN?Wd8qq0Lt|W?HIIWZR*~eJ+ zF+`3^hu4yx4<nT2S^cc<h_eeEsp}FJd>|+$9GS~=BKQ?q>ponDlY*V~7s;t9z)lI+ zUdF~8XdhUOaP>y6%9@<6+lFgad?qSwXwSYp5Y3l;$u1vg?>1%|QiYB!YAY79(1Wd7 zwnDV<M9D#p?T6w^Y_V)uxK-^yorh1vZomk?Y%?<->|He32W2@bwbPT9t1CHz?F@mf zHgmYdw69Nnl;-GEtSi|8wzRxb9_x5rmW}nbN*?E0J4kjK>f2Lvt4|l*bh_mSaDqfN zD6#p71<l$yrR0&Wgmz_{{&`o93!i6#P2w^pQg#oP@?!+Q_H&l^W4!&R2AB(1{n$J* z76cAPx{|MGX4no`gOzu*^x+j*nj}|}{MV1kVMmhH8wXuUA9)txy|IATn?|a{kDsyB zLjinzGRr>Hk)Qq<TX86yKhuTnJ~WblGoBf0!ui~ImQ)kPf3k^<t%(dc*%@td?$#ZY z6!w>lTUHQACwCKDSMw%+aU*j-9KpYm#QN6m<DcNj1mB3xtnJ}r{M}AWb0mg8w}G`j z65!vXlYC@wwZqfXw4}d_c3ZI;afxi?k@kT<;*g@=7+e5}`?uXsXeauzBm3e=aK8hI zO4IFlj<du(lYn>Y>(M>CA$n^z1nCVRKGk#gk*Aq`wcJ4|X4ll>`COo`BpxDCFFA`~ z>iL{SEbwR$e|0qLcr-PhLx9Bym2_cK8E(BaNe;oh`!6QR?Db@l<bA{0p`*qQGhW1! z-U5k)6r1Fta5NX$B&RD}DgCA3nPRd@wyO)|T^6#;I)8oXI^;4RhED<4-m0Z)9h+Ad zSk%pk_km=pdX#3sHFylnXWxdQ{b_Ak(vez)%xi1fQy0-%j)A3@zQB<ScNT8~ltxvG z^fA&xk$j{nF9M{eP`$B>l2HOf1RscIj~aQPuJq)B40$(TpUrH}|NISGSlog&&J5+F zXX65(d$S&+(V$Ba_1}#DbHPxzcc&s);{#d6F<qaZyZ`e7pv_M7R=IxZBDw%L^lujc zn;`gqxd15b&h8%j=LNvQ1uwGfw?MuC=ze^(u>$uWmTNjIJ_kM`<W}Q(K~;Kgm0(x< zs_vT-=k0Fn;PFN`QwcRA-k9Nq5Bq+Wo?SlCn!jwp?LbF<pOL-#^CAB9B3uoOx1Wh^ zaK{@b^EA9=1+E2*{2vRM=CluA(nh`n_-Y|d;Vy0DOMoS^C%6q=0`MJfvJX$cp($eS zbA#=-VnrhRzb1_dbL08>!DRgB<F>J{eu?GzZLI#6W`XZM#`yjZw*MztEaZ%lPw2v4 zIg`XU9|8;ig}`XX|7_il{(=>pZOPlV?D*iUlg2%@@I@0HGqdgIOzy$Hs5x0;yd-TQ zyLK)tCMyQJL7FL%nAm{Mx0GipS@B&E+X~a2YQ-O42`j!IOFiE)eyLfRHhDnR^tud_ z3zqy@Ny#^$J<Oc?Z<c)NJxm)vR(0N<U*p3Loexj^Yi3hS$y-TGPzI_S6%ZpeSb7Q( znwuwIijJDe!Y+jC8s?JBdF4vW=N;DfLf4j-J5iP=f8!g_J}<<r-B6&`N$w|Bpg_%q zT>AL$(h7EDeVDuMg^={&4EAV!7~gR+^WPBK>D!GoMq3$XVP7bZ9F3T07jt`<5v4T< zqC2Z7@-UKv5j*#sQ2k6Rm270y8$y`4!8K->ET+ck{m(cl{~o6;#fge>RORtJ=eYC~ zN+{)l_x#7DyYT!Yk1L-t-%nff_zi`$PgD5%O7`ZbgF1c}LQO%>F7dFdE|Vf;n+oW& z@7b8rDaw?_&YeCH1Exx_9M%Y(+y|Dio*O6fZTqvYH_qnonOK`m$^5}-Z0e?TeyoY@ z*%Zk;^kem#1b$y8paXOJDbT@B+WicdMv2AI8vepqmbdV=W`Fd-Qjj>MX(`xEmhD)n z1sLj5urr?(FZE+L7mnnwC$qFg&G_$<cT8NAp>b=23nQ9={%0^Vsx942*QOKWeyL6O zc7P3c^ND`C5>F7iZ#?i@2^G?4WX@WYOtPPe1)Z%ZImojDHoSDPy>lZ<^;>DNhW~33 zoA{QAA3u$~^Ok{s7|eo_J2^$;t_GhP1heeq5U0OzJA+49!E9l2xIMj$W$hMUvH$Y{ z2*0wme9o&o_LjZR^ZmN9jj16XQsI~vHQ~pgCT-a23oaf9uz1WICFDNAI5}Rzc3lYN z@l@~Zh2~*3*o3hiJ%pkd?1+0>Kj|<QOLcF%GMWWnZ0`|7u_p?-=MlR_2^)1Wl3zWP zy?HUp=%>W~3-i;>jxNUiB>$JuV@I=N7fn$P;LxPFlGBv1wKqn2+a&iK3leTA<o=Ak zFyF!gFGcA0L5PHxzYvzfb6CGiZF6`C)7=X$TzZM^ew9x{mU#(oTw%NNzRuDs*cK}k zKi(x~BzW#!qobBP0G;UZxuBEHp4!q9-1g8BLGoD$4G+&MdWmkoLN>q?>!F@qzZAvK zS;X8f2lLN;S@h+Y&~?SK#n(|hbn6=^Cg}S&5EGx*K%ekg#AaUZU1TJNWiCn)j`K=W zrk`R(z*gs)Rz5+KM&IvS?QGrE%zwwW)1&lmVIIljw=E$0s5|ybH^uJ*zZIWu9g~G# zIfNjgECDYZhGD7h6%*Vb>|Q6f-YcyYTMLh^?3F7qeBeTsf5m8&EkalpVF;}<&lGa$ zDgMo3oT<v}Pgu0o#ca!!@%&E<nEvYcu<Q{pHEt;ztM~zHo*)iNp<jxT?ssJ$T#fhk zFU3@GfM5+5O|AAUn_ayc=XMz4<)b37VGqe>LG{!5dT;i2eGs4E#WvT6`UI6AmJ4L$ zXIYSXZ#cVDpTjR+$huw&<ntG@Vb_d&e{VML+9>zGhP@cAONKG&S}=bzi8)*k^=z0x zD&b$Ly5%)OQv2HzSi9>{`q{H+t-zU4IZc-f&eG*+Z2I;1uG1k-A3>@Y_u78&ql43_ zCDNpgpCDeDHh&B%k-LVv^o=*9WvgW^zd(92lwG^tKDjG)4ha1=o>HnZ<5Y}Av%MsH z9~ByvcfkRQ&`DM3V@2q|csAfhJfH9}d+SCVKVm%FbE75SyfeFbBQ5C8IJ!}CqW1fO zA|vU!))`3o)Hv4tX1`Gx3R71i{Eg6D{9J;m7C%=|>(sjpKXIcSjp%8euritp>C4R} zAK7gCc_aoM!2!SRz+)ex7D1|mD0~vad($ri92d%c9VEy3?EcM&mfw5GqtOGqm9%R} zQ-9KOeLj)Kq76T8g2;7=tn0smU1to2QGxs$64}^)#qiG)*|LAVpSSQDy{=N8sb;2? zzlL8z!Ufi3aOI2EWI?<v#NcdmQTKK=3TV@Baz)L*rzafcLvS?2FBahApLJl;V^>U$ zw$d=<q`vLy3sU4*Rb>F%^lN0qx;olZY?u=(l|&Z=SuaX@2w4)fg|3oy5WDxQq3AEn zX89htmOkZw44-w-8zXQHBZ$ACaBtQth0~P`#HtL&7g#lARrEtQS`>sJoACImU^^~Y z@Y@ZM(xO3B9{Nf^sst&Y@@61W{Bi*VbR`~01&QNnHqvTYZwr&Ft}A(ru1l!8MIdXT z8KlF7Tzj2u`0b}|<B+$ywcH<Iv+bs{oYh6t{*aG^HMGAq*|Ve@)kpwc>9R*Y@ZE&> zk+xqp%e<{G+SLyq+}TmldQYUgP3R7E;~yYBLZWK=7LWpI$CRY<3lIB7V-Gz^44As^ zVHm>pwTA}Kt$0eNZX8TSPDm^f`)&Pj^bIK_Zc;i#z&WOHY=w^O-0d*m`DhCi?>xTe z!A%f8l!qmaRZ5|4a55DK$geDaScD?Q$whKTE|y*f<8GS`*(GBAeDp(#*<JD;z-~&Z zvF~)G!Zy9WT&VbRnqFTHx+<@{WKxhGUSa)i#q$$Su+m$mR*%g{_CLJ7d}q*qdwsd| zGCOlCmcM<R+21xXH>)8!suI6aq|6^dgLOpmNkA!-3DjCBAFPqDn`@;lvsfQ%Fh3@f z<y+(O&qdJpX@~csLMdrjV2w8Yzo5)wdJY~+O?FUF4U(dJ51E0lYTT$L@swE~gyXv0 zA-2fbAZ?Lz<bO->FIhZO1}U$oZsW!&Q|jE2dEPOE7||LPh!lq(zmf&w@C!zJkR`rL zp3YK#_kj@yzdvT}poke+@g050yQ@@RQzRorl1Fp@>FlRF0m&Vw|Bn*E{|KUDy)4%r zOcmUb6tHG@BSOv<{HGMqwVd=_0ekIki0dMB4?ZS6BBgd<)9!{FXZ)ua7+PBg&>FGQ zo^8LYch8iD-+PfTJ9jtJPy*FTy`#iwvP!w5bdhecl;1<*HcxvY{IZ<VVQ380XvTlm zU+kSCeaOmx59CKrV{iW+5c}#s;{KP4VrjxJH2fnRzq_}#sMtu=<G8m~T%+Pv6>C+z zpyKZ;+J`Igj4F0fvA>EFR4h?(m5N(c+^=8}cT8pcs$!#x^O`FeOj0p6Lh&c6n5JU3 zie?qxRB@$>TU4x7@mCd}%2>p49?g`5qg3poVow!ksJKYQWh!n`@f#Hns(3=h3o8Dq zVxx)<CM7>#Lgdeds*Khurm2{z;usZYsklhR<tnaKai@xPDxOyHriu+J@{vjbf`ODj z*Fj}us5nN&c`7bdajlB`R6L^M1r;R~8&z~syDn74)+(l`IIOt_3O4>0s|?wLe|#e} z7@UzIU5s1`m346s1q(K-{!-w(;XdwJw-rWkI1o|7nyWuo$<Zp<8?FH#aW&W}6|V2D zk&`P<+cE6^RQIA3PVtlaLVxu(B{f>w>91JzV_%T};#5EOeEILh--^7K>Mv0J-m2fI z`eD74|Em5{<dsUJ?K4$IfGWVL{y^1V@mz@zr26rbjdBIZY({@4o+<uN#mg;L{b8!V z>ZuZLRQ;K%KV0@>N6Bea1ZJxODXRYs)o)b&b5uX4`k$-*#>Yy0o9aLJnA;V^Ynr?6 zTIjBMO=B7~b@Z70u}1SWWA2Qxqs?QD1v94Qj-5HvSU7Xy)bYj^k@+)QRGM^}?8+*g zCZO_No#wY)D?K%Cyw^6<s>{E(V!haB-M@CJUh$f?@@sF+%&1FvbwXWx5>4_DuHv?m z^Hb0sXqF~_<vbsakvHd5e(a;^*_mR|5Xfu6ar5CmS7UhvYq{WVhAO`TeU5J&$2G~B zqVRK~a22;JBYZWjGn@EcvG^fWxT{U^eH}R8JSWb#og?QP?^)0!*S;0hTDT@Tjn0A7 z<vDV?cEEVog31#<8bc9DQ(e+qW9Y#hR8##Ov|XE*elMLpVj?zTHsx>w;dkMha&UIB z=Um!3b1wZIUdZf5jfUXG%@d?ch-T&7G+vy>=*AiIyebFiHL2cB37&!V@=+qkZLwB< zs@H@TnR8GmElP(!jU&%FYCe#gkeZQPKpST+)ciFUnzxe+ZMTC9)okZHGjYd|!*K<O z=Oq`&k#mWES)hm%j(Y>n-^$5<0bx#@XFE^MbB5aqmnx?M#~g=DijQJwxR-52*)2}v zxOP2WM)h*wIfr;YuBrI<1&(u0=eT$^X(fAy7ui!g??w1QxZbMFdC*)>j+>(THFA-a zI9@*XoKHJ^fb0{mlN*#0$ms=TAFe5Z)u7M7y-2{@p7U<!&Ur6$&8!^guQ9!7-M)P} zZUkCqv9)rEzb1kP=2m~rnEa;VheEGf2DinEZ1Cq5;KBu%-MD}}S1zC(=>GZ!pA%kH zBu^!|FbvzqaC~>F%6|fXNBAGBevRIV)0^Qi==2zL`gpIZ$_j%f_(h5QM{wLAIPW`^ z`wf~fe|rsQKL+hCk5p%jHz!WlA0xHFpo#Q1xNwF%H_kB7l{26;hW@@MysJD5Dt!Yq zar&k@JfDVN(=Fh*xH~9d<t@D?yz<jvjUUx@f3PO-Mb6a=IPN}N+?~o#12oZHyu2JZ zFQn=f?_T9v;F8mn-VT^$&cfZJ^zf(g#YCgYHV~5yb)pGZ=^3cW9NHAa|84XqTqa@= zYD_Misa*hP`ZKV>Q15rbx5~S~E5|caLxZP3pQ#KI7tUqI%P~3vrqW`#8FDTD2-GyA z3it(SIuP~=()h`{sK$k-u@$HB;5m<YmpC~|j7Pbk_@~kwq%rz673`-?9QQLEjpUy< zRem1ykLDQrIhq6R+dGwKf;6G6o8p|=%yA!X;kff^^#elDBg<UtoliJcX$$Og>@xW{ zlqe!t6X8UY#u$WLivu*iDNQL{+rx1_d({p>&i`XWL*_ADQ$ttg2WX;Rbl0F79OU58 zU0VY+fz%2|0yWJe0^(e`uy&tvVVVy)&jyeBms40-1Wwpj{vND}evwYsiySu!&gpJt zbciN2kxEHJdBi1-D}ifv7asiCgWhnN<GQPUjUK}ni-i6wj{^4`x6H~D0UASPF}@cF z!Z)qXG-kb>IPZZjn0ml?k1DqU*PP08AsS-@wM(2m7OO}{&fnaQ^KX~H`D@}h*QSL^ zQyCqqG3M#Guo;F1zj}FCX9mQ(b4K&)SR-d~#({;Lv7MPSY6`f3djAvpDs_p&Sye-G zrW=+?{0d;xp95)?4Ojw=FL_4rD*nW8TmP@J{lCig|DVdXzdTC+yRu!mBwTZ(a$JPw zoO2J&0P_s|m%eLCGYzxfRgkF3)OZ)?D2j5^C@;?0<sGG&=B#PIYf)Q`ho&e>)dNi` z8dVHd(V(KQik>PusmQ6=_(;(wB^A%9Sf^r@iu+XDtzZ$iQ)O&daf^zZR9vs(8WlfO zaixmORjg2Pv5K=*%vUi_#isPLWj{_)1J!^G6;o79R540LgNmLix~a&iC_PkK;GBy1 zQ7?JhXlnWsD!*36DndHN?N%AvRotZF8Wk&4ELO2V#Vi$5RE$&6prWUWPAYOLHa<`a z+5jw~#ZFQg^(vlI@q~)CDpskuTg6Q(R;XC4Vu6a30i};0&XF-w#S|5z9&p{{0E5cl zRFv*35iY5CTE!|ASE^V{SZQ3O31$s^Uu{We4EA@!gZ`fom7YEt`^xMNnqcjkZgMoA z_!N6(vYc>IFw;fB3Q8CC$V^rc)>ir0l;*JOhYp&pc17oSC7@Qtri8jVD100XIC64M zzzWI{m_OkE6D;D2)W79w!Ubx=|8J2BMFl1N=iip!ln(jNh||n}4)`DVl^-Q(CKnm7 z`I<dB?jqby{21RQApJJzK=4lh_u%lI26`W`1@4}lKt};H;8ubr><W8y6zCKnJ?q(y zU$rE3he}ldz9%pcj^yKjec^KO6MvaOIK#IJ#<>!nIG_d2gr93#54;BF3;HH7GDP7= z0S~Km9q>91?uG!!0Oy4%84$)BmCAGh28Szg3A@8lA<}>wWj<!!CJ^UkkA};OK<P{v zB2+ry2XM8Z*8>m1od#VCv^B?J7c_^%@J6@?pf>@3gsZ?WS=0g_;Q(wxi#7sBwp1#U z0}P8+3Sk7EDT+Z;Lg*YYE|%jafF`^SS8qVgfFp4dUJ8B=FbLy<x+xeKiXJ}*+6Y_) zcLG0M@gb13XnXu{j6HBJ&aF|P=K<e_quN&hFF<?D0AD^tpKpi$hUXG+Pe=3_=zTz+ zPRJN^FmQZlWDI%&@O?N+=tH1u5^9fvx&fo%sP<;yayXJ9+^N!pPvNMGJ-gsQ+6Dc8 z4vD6~lLc1~ItMr!?k4C0;B+_%v>A8=juNc{j!RZ*JOTIsj#`WG<|`a`2l9j;r{I55 zf>v1rj7vrL;a61&H*|*|{7v19IPOgxi!<PXk&N38M;X+=%5g&lrKz)lJK(6)JAo~6 z?41YsMBoTGlFtF^d*Z+WzJU-<?harT9K|KP-wWdgG7o@-8Q|k%P;NGe-o5d^_`tNu z1eU>3gr&f>D!mVAQ)#XbE^Xj~A>R%-1&%T+0EYEdXd`f2Ci)&S640Zcl4cj+`vcJ0 z;C~1#x{80O1`;r6pdu6u{CzNL384ny)FF@oT>!kV(hq=dWGVc4Kzp3NY2FiVg{uX< z9oTZ1(u%Rb*>F@b!o1<o^Zl^%7JzsF7l(mEI5%7A%jLj1BhWSA&jVJ$Q7s8S9f`!i zr#G?<qcG_3qq>A1Im!U^1kQw`%*?=d;i!_!fi{)TjYjv4M*puwLKW~_f!hSS9ylcz zwE<lWoRo*!fX)ZLIabMdG4PjhxS|37958x<T1#NxX&8jyX96b|aNKi>3tR>#7XlbG zU6Bti0&y2E7NfTT*kT4gpadNSoC-%ZC;)0_D$VNz+&LdhGGqvQE>IH60LGv)6TpuJ z4lKqL1WhQxQO5QQv3kO70>2KZv7rB{>Fhz2!O;|23KU9|rY3YPg=z%88?ZYZ6(|i@ z3P*`92Ch+Q!h*MybO_fhR@Mf>X-kxat^hbr#6E&$dxD7hpS@IB!wHweQ6Cf5sWf3x zImQPfc)rbX{ouNQ9teDn$4uFv36H&p0So#Bkgrt6K^*Yu`zQ#>0GF*oSAt#+{PP2j zI~RcU{{e^@A9C{l@~8rMa5bhPYDxIjI!rUrJAuaa${J2czw1W)SRg%yrdC-4q~F1d z1DykO+6YS=Xe019xD?RbCXV|WP5`|dc(DloWWaL?_ymrcH1;#B`J17afDem3w*!tc z*a_Uf1&a&lD&Sc-l0OIZ_(EwpPoQBNS`RXWAHm6m15Vqn&;`Kla1^(Q2+NnsASwa6 ze5KHC!2CTJJV=PJ*Iq28SUxj=&Avs=K%0Pta5<o717AC&3`_xd8IHQL9(bSzhknQ$ z1p3!1g)jhLuSNe;DQ1)B2>J;^6+mqr$L#~{2^<7R>vk4!wMwr6o`s{DoddS~Nhw4u z@DLm=M76+(^H^>X*980-jt2N?;DQV2ZVde53z#Yo;GsegzJC#Yj{y3up1RBEGth+e z|C}lYO}HM8b~(bH^`ODe06N}8_kwl;THr`iS_zE%6^j@6gnz?b0?qx#ac$sM7C|Tu zL;-m~6Mm-ByMbO3786A91+Ke=oelJQV8Ct2qY%NsA#fbx;xdPuLwwL=$0sX&8e|B` zHg3kqCnTFU(S${0+$JGHvMUo!NQPjd3CUhdG~oo5CY$YIl_n(1E6EU&Rh4MMdn!$q zRqs0rpOCDakja;cbTxnsk7PKc$b{d*QAr8Ojz@e#va}IR=yX@13CRXVe8R3OO?I%6 z3cYJ|qNb&0my@8Gt|?lEJ&#A@+N1IC*RULmE;^>kZ+H*ei_=Sg)yG(`v@iWp>k%IL z(tqMG{GKoUvoAm%X*Z=;e+7Qz)8yxctGk}2YaBD1HDJafVE5XpwY6(ctd-U_tc_cj zhy@3`JNTQ{>|Rr8T&0QPxwy(ct2C!PQZU{SP_TCP+Tyi~E8Bjc@rkNOyvDWMI;VA> u>kR9R>!Q}ht;<}OwJvAfgmne$X0Iz=w|HH}x|Qp8P5(gi4*zbP<NpEm9Ieg( diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/util.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/util.py index 9d4bfd3b..01324eae 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/util.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/util.py @@ -703,7 +703,7 @@ class ExportEntry(object): ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) - \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) def get_export_entry(specification): @@ -804,11 +804,15 @@ def ensure_slash(s): def parse_credentials(netloc): username = password = None if '@' in netloc: - prefix, netloc = netloc.split('@', 1) + prefix, netloc = netloc.rsplit('@', 1) if ':' not in prefix: username = prefix else: username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) return username, password, netloc @@ -1434,7 +1438,8 @@ if ssl: ca_certs=self.ca_certs) else: # pragma: no cover context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 if self.cert_file: context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/w32.exe b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/w32.exe index 732215a9d34ccb7b417d637a7646d9b843ecafa8..e6439e9e45897365d5ac6a85a46864c158a225fd 100644 GIT binary patch delta 19564 zcmd^mZ9tSo_xH7zAPX*n2*{g&h>D1~@9g`o3W_GSiY}0fnnLSpO1f)mDbz|yEBP{Z zXKAHrYNdwZd#0c!SXNZtW*SOmIff;cDZb=BzqxkB?)(4$@O*hbJ=WPdbLPyMnKNh3 z%$&KX*dJI?7r4?OYFlN@E`KqsMD@#5^#j<l?Y3HpeC9Kssw0KetJVqWXX>Fy4R8LT zmLgUCGDYnW=wJHz@A}hwLOOQFG&_^Y{ntNh4#$lW1#(aK`+FiQieo=5953o33gNgz z$ZTK~jvFc`DWXjN^>@iwQD0tigDeuI1WJQZPBw~?qRXyvoJC;t|1@#j6w#_)aimQY ztU3r{IDp9u@H6<6WbkFqoj%n$6)YJbq9T_Gs3VztKhf{RhTr>S2_M(>U{{bgh<u`~ zwOjbdM60CY)d7J9!><<38#SuQ!g<;@I{OUfD(hUal{cMgdoD_B-l$v@ZSoa3<Lvvp zaGZm)hjSv%zJUx5h&S2n{~=<*KSfOZhX`N6e~BoAh(wW?ObJL(-ST?v-cB56XXCWe zEAbpxU3m{wD=qPQz3$^N4v{yCV?E8gr}&&sf|gH`!T?3@d&33RyNtE={{O-n5JGka z#PhOl<g<W57F&URKRQ;Ob5G&5+v5jw)~dz#sus6V2P7&^>Knq=)Z6R8F@{Rfu!Aq~ zy6(|e&>xnrjUn2=_(wo%XIZ<CY!85P9in_%9>sC_o;Io#x}2<UaQ5)D4LH^*r`-|w zT2b}l(|^8Gl|wy$v@SV}*$A-FLt$ilV6rsOC&z{{x^B=!NI?oYXD}X5o1#vCE9{jz z`}gQomFudNmXdpcG3A0J+9=^T_wfu*-la}?Ee@`K9`(BL|2_8wj;@1M0zJaISHSwW zD^eI^V|?9VuGPZbI%~7i&S<05(`A1G2=3#lrFyX|uxe6U>o`vi4ZMfKg}kE9!CBdO zNBRT`vI<Ke%+rp}HN@8ml0N>MX+O0(JIUMHwbT=SfQc;CcXI~i>W|I)y|q(SP8*YQ zECCgB+KMI@pFB3+bJcxSVsm7AU1G(}Dpx!Gg>2}YmXVCA)j2n;m?tZ36q>!^5p+JH z8yj$>{=REObxynWh_sd5=-fY=M{nH6CHfPt)2*EpH;1u?O1lt6P~XsTf_+!sbV<BX z*`zcmD@Dw48w0q(oZ@)Z($urad2+=0wgQE>pr9%{HNhKYAV-3t6P2h~S?83vpG^%f za|J5uEUnOmU4y<^X)5_MsMmx-0lx?9b7U9Ws<Ts-1y;Ixw9v&WXKI>9Wq}aKP45{i z3a8T>P-sIy>H?wKC#Y6b<oL^1Xs6%$3fBmQ+sGVoba{<{dwh%{dr72)YsgNWh!u#1 zYlUb?jbWlQtOW&_mx2PX6MgVTMWRgK;4)&!zzPnqbv34XO)7;i7(2#VU}uiS8<oaH zEKO|z)ry)i_9iq~K<n?YsSNkVf#k_fP0im_hNbO|N(DREprJx?O&phzfxJESD$~a| zs8Upf!f@X@J%y<q1EqS5)~_twN4ZytD!30nxEq-mJRoiys^rspVaZk2Vaes&%Rm}K zhm(!LQE@+`Z$(8={R5rn+jk3%#E_=osQA79V%OzTx0&<Ya>j=3>+BF)=^NpzLwb}i zeNbDhs4K1N>^{L4&_VaS-uzo<?N=ct-s|8BpSxVvb1;WxWu+T#3xh3n>r-5PQN?Xa z$q!poXHWi(jq7=o`s5s!K$QC!?{457VRk!8^XX^7g4v;~Nu};*5>uQp3awuPR_roC z!eBwZ1br+8UtiYIAH)0uj1fe)c4frR$?lLoyflcM4@oQ6V7{0NT#|;$)9qfb67$lU zT3yDsc4NH74+L~UpL~*Cm#g+LPTz7r%zT^8fjN_6j!NA|mQQhttM+^kp4QNYN|ri6 zbW`5*3d<-?R=?4Lw6sbxr2Ir+T~|0q+6;o{tfH>+Jaa*3g#l%PR#fd~Zbfmk$n%;0 z?7XO#x>ei>bxJDkggAo+>l^1@Yvn!HQSBzH)+SWD1$SOlR8U=c1CGLaqBA$(Pg}l2 zY`^k1vqTNL$$XvSy#CLyS3>IS3BNP-AG<}mN>a<mboDDtjPaIg6J3{uiRx|71s5jE zN}my=JNU^yWhOtU5Yp)u(~tg`iwaeG*%v&hk}Fj4@wbKg8~6u*b>&%%)M~4Gqnsf$ zA4bGHUZs}}YP-<CK*&(kX$4OEStgm8)U}(?ZP<kT0>Mn^@E~R-shG|A!k$xIc@-;K z*fy-R7u-EOmyc%kMvcRADRq}8rnm+;!Y5P@Pj;kO%V{VNF9r1o3N7&4W)+%&FBEL3 zI$*NZFQdBh5=;)F3L&ccW0vtWR9pE{kJw$umsXy1{Waj|SM8oQkI?PvT$F?hC10bY z{;T$?$|g4Lf{6&F<^NIaY4iv-DTs^_DxUX=?9|w)npJi1)lQk%MmNKXA6if3H8!LF z{tLUKV5wfg7QK6zatrLaD7D%0=?}t^lIGLC==Ul59+ZB{pbusK+Qt8Q?8@0_Y_<}+ zw9UfNw~}Cj-m1ly6m_0SV*N!ZA5JflW`W+ofsv!<63K6&y?Wol7F(4gMrkPhJCT*f zptSWQcK=eX-jz&gB9V2C%#Y|L%tq@*Y<S`5%^k486ISJ1YRF-`p_ih2*e@<oBicBI zZHvwVOVKfp7Y@(nafz|fbf+FP;fOA-lF~tmWNp_}enKKS+*R4*6lTO5H4C;5R}f`C zge>MMmjs9P*I!RzavWkBCLkWKjqd5gX5B{TBehYAG1%w?0Usv(>U%RvYH#vnnAvi7 z0O~_*2s-7B`d&SlE22u!>=kImZpRPZhye&*WgN~yFXn<$lg@Gbr=aZcbaFK;J$;p5 zh{VA;c;-G@AV*O*>6jzgZe=Q$_Qc4<^ygm060Q=Jk@?{nQH7}-rwwxcroTJa<jwM) z@tmbOsbpWcrvFk%es1xll}P4wgVsE3+`PBRopULm^_Pa6mNf!H%UTlCO_lUFR9eJT zDjX#ihjUIbn!3!W$LNp*GOJsD$ifVG3$PqbCnvl0jdKdBbj8?cBDCXa^X+$5`b8WG zjz~<jgWJ=F<%L;^(dOK9cBjoaO)xFd({UsxBAG`|pNQzor>V(`h(!LyO!7g*u+S7R z`3>lkSi*Oo8*xy@abf$~s&m?`Vf%ut^gR`Mp?guNCbom|d<?nTy(;)ve<&!ChNh7v zk*S3X&_7RmrPtNhAt}}drp68CoT2$NHd&Y}*SoDjbS&%#hVqDzcp4@bw?^jEb~b35 z1zH0qfyF86CQLeZ>ft|fEwVurLH0%Uj+6IzU~6Y!@mDg~*}#RxcSc2SM2%JEqvM`N zoS>`n+V!8gdeA-KYTz*M9DNU{hr6;9xHeBbubbdW=$dG<D7ue4k5xRbKjutm;7*;w zV(Dh3ufcT}ffU@pH8*2r)QLTL?Vhuh=drAe(d4V>*rX4XY{kSt#oh9Se!Pcm9y8!+ zYn9NzXcF8bG5Q%eu9Lnc<Gvb>l~$eCMmrN@kFosAoyn>m($GiX75x+NdnEayM_*Ap z`Kw16FENrQVjdH}WPp0Cw3?iZi5IOUKgU=?8bCrb^kR21C^jvqj2SBzNs41r`#AhX z&v)+-+6Snjjt1h`h(#Pj>m_7stcw3zLN3Hk7ClQ$ajBv2MRc&|MUXjh38BwDq@+fW zwQ)P+PvAJ}@8=A5GHvu;DZvET3TP{t8Xqlsmn?{%&nI>xZSmto-;<mKrD!Wzl<=r@ zCF|eIII3Xx$PvSFVRwk413~bMYr+-}ajj|0+UEq5+X=n9R-#z&IlJM7ipZQtlEaRI z&0nJgD0%adn6PyZX(mSd<dMm}w3(<;<ZkymQ@nYX76f^6T9(DMcB{^5vF6hR6q1}# z1H&$2+w@C&9w>QkROZ;XJ7`xt%xG34?{y_WYhG8R{_gyAk7j7T%|6<W0j+|I#N@l% zaXhCp!06VbxF*rbz^XL3Q^XW7Mk31VDOLDO`_cZaG^RDNvTk8d#mUmDuB>`EjR3iU zgD>Rh9_-E}a70r4Uq1Ph9=co4Fv&JIxIZPvo(bV!VqX>INZcjiT#=174<j>rN(XO; zA;VY}X`$?sFcByr?3McJ!QNuHHHp23&BSYH;Xh_V1A<=@=gGOAJw-D}d(ZTcg&+$v zu#IFSW<-5ElH)eTgbn8C^%2aiEDWYoN028I<NBl{U*u_TO|#M@hUy#U$9X!Jx(y4A zu5P$M;t-dYOSUKW?%KrKe;$k65k#}Pk><o6T_RXc*gp0WZ=y8o`CN{xE)zL;8$H^_ zG%TqKQh2eM^`M8kuzoTZL3aW5V3)hp8lR7wot`75Nqr3AC_9GBK|ujMg_Vd#*25<L zv0)Y9#vkNaEP<1XqIr<vYxz2u$(RxH&`?H7DXJ2CuD2c{o00~O=#~u&_q5aP7`=@K zC^&5E-(!&2D4~Kt_m|$*2+yztxPUHJ`uH_;xHN$)a+gXt=H3LQx3NK=C$Y&I(Pc6= zdGJ7)a0eFCMR2HgFB{ICR<>DzyU-qf9o!~#s5)mY_RD3YE;&-Pl>9q6ul&5=&Rt?h z0?lW;8y$rujkP3pgwr7cp$$&13fIA1a$$zVj(Ga(Z{YMsRWu>IaU2^QeR?$JF_O*W zhk1T3F+I+>=zvH3gkW0B2o4c_iS<l>!ijC5e1pKW4Vt)_b6n)?9JVj7v9*&Uq-c!Q zW@o`2T6V?jt;%W9x-PjoTWMeL(>(mNW_oh2dc>aKs=V*5V`u_163j{o_2hgHcK6~- z9OIWT{tW#1&mUN~w0AJ3lukgOuaM-F`s}rA9A)S|`j!f65!<-efM~z;6+3UN1nM?2 z1`c`=QI7H0g9EraZ&VD*pwi`}Pw&k5F07zv)8@WHC@9epV5?HqG5!3JM`rhS##cc? zg;C4|IRoiq;P$E*Go9W_{_dR~wG5{6+-M0`0Wqy1?i0y~A+0Bd)R_1QDDmV-p3C(W zVgX~7)+M9m^<++JW>+mH7jx|+Z6NQb_Uan)AEerIsh^30u3|iN{5Z0`?*Q=);5KUi zm0axGL(F~6G9!K^_mPPdGU>mPp8baN&*YQReyS)bGogHhm=x!Q6C>1s*i%hEi|B81 zy`P1rxkQoHJAE_MQCYW<4a(KstE{2zzhJI~d{shCYrH3*=0UeB(sYs~JZ8eP_!6Xb zBPY^SanV?*MecW6aBAfoqi_-jg6Ih?OD9qNcWK#7IdCBGO%E;@D25K$1i@y*u>>XV zyIyBdKJA6LRuO_-lNFy&poSlaw}0=*<zOrJxjwHdH-g>B%ZNICZd4Qu0`)uZZ|^ji zgA&EZC?8BdPFIBZPYm`7J^3?zb;OQ~IM<Jsc!r5PeW{RW=o=Tws{@{jNcWR%0$E4n zE|R#6r4ef`__!aCmtG(>89Dq0Be|7P7{))!aR;#1ah{=K8u=(G9+=5*X(lfZoSrfn zmcg!aij#`36<+TzI2t1q;NZsEhnd(l|K;T*GBa1SnM}$|O{~GFY=^mk!CZmIROxkg zcP|MLIYSG)-xSb~TF6_OIbAk@MQ|I|>*QKy|Fnu`OiB?B+ttdySVe83Gr-f(`lKU< zUW9GJ@9OWm!ssMo8I;=3H>Iu~1@u=8vxuf)kMMcSs!5kxyHuH4g9_-E7l~(3cGOJ9 z;Z?&bnX1vG*j{#>TpE-fQ8J3-Xd<c}vxiyOqS{81r83cJGFGY#DFZh<A+NknR!Zke zUdPSFzjqi(SD6gQWQ#10-`-3n%Cf^7sfELu_k^AT(kZl&)X4UOV}?A@wvEa0M(sy( zG=Remz^h6PWYc^I6i*utvGHD2V|QWV1xg1)9m}35KDIF#{A`zxwt7`fK2FjskIg&| z26w0+?aF)%10$<pOX319u=Z3e7~i!7I&By>QA1J{YEdJZpePY#lX^w+$oWRha~tjs zH=Q~(%3ATNPIdIe+XQDH<G>PZ#}17+P*f9ZZ*+jSs&i|wjcU$=s<?^3)G0DR85?@@ z9P+Hc=*C$xMww9FDo}=HKn~n$voELg<-YXg48B~wFK6-PYJItEU+x`WF4vcP)t8%S z)Iifr><f(G2&G@#LFz8K$9b06VR3QjSJY{A7;7l(Ij<FibHXNy&JmHy#*5C8B2~Yl zf9Qyv=Yy#dXJJL%<YP9Eip9z8Q=r5G?-~0zmTo%EbaTd@D`bQzJ7c#989{4j+(yR1 zc~o`eJ5{f~Y=Sp2wvB9}8Uk72p&>97LCKqZGr6B6t9yr*pYeLVD#qYZZ6j8-p&v6p z`{HcOp+NS2>uuWfzz(wQk?>LQPg``PRy{z}L@ugRU~YVtj6au4R9PvSQ93Lc7Hf*G z9vZGcHrFHsd)0obb5OmPw^l3n!{k{fPh&s8@x^1@Mpk6?mAs8p6_#QfJpDFOpOq}( z{(x0={u8^_C#T78S!w*{)1-$cF(kPSmrV9kK-$8hx=m(|%(g#`QQ40`C5trOMBB)6 z&Gw*Vh+$sKtUVanb`H}QVsW=e&;6a<MLhwZ+#sPkLw5t)G~Ml@u%$t8M2=2$zz8x? z*DEsPdsL2uvp;ecYAT@HE_uCVl`g4V+Jso-T_^;jO|suRj~JefmZ20!D%gxfQD<fU zXfh5hHaa5%8#v?}=BL}}L}aQ1a@H`tHxBs1(dnz(_8Sl|;JbV)t%0{jBnDDHwqFJD zFl!u_fZhTfqFwKJ_XONmLTUjG>maVOhbObg6x1l7%X63vNo#RF`*&p_X4!GW8)YHW z^m^4!9Jz`R;1OeaS)uE*7zo=PirMz?^q1^9h2ht;O#}LHMj^9%IQd2|$EBmQVSs2R zk>hs(DK;$OH$;;QhPk2^GR8QXZ;m1xjRSbe2=aw7kv}q={BG>czdoFFH*N0n%qc;x z#BTY695to!)+Tb*l#$V~Qtta!ij8JAVLfeJ5ImTx8gBQf1jKa--E1V;=79-QQC!tC z0C!*a{rj%OhCqh0ox>YNNwqmebcxiM-_E#-F(Def)P|z!9BRd?Z1Iz_K(dP%eP{xX zwp@3z#FE1wh#;R>?sl1bQs`!+eaau?v%y(>_#hH6M3?OEW;f3;yV2KCymow!j#d@h z^F78!Qat3<7(Y{Yq4OPVd8bI|P-RdLjK7GEC%Hq-aE8l<z8=8X$%>&1$y?b&ht2;K zQ?&OG#zDIb6IQ_syOF{5xQ8~z)!j<RKV*0glMA=Fw}x>WAJXNYlC+$pf^MLAUO0uq zBNg}=&|szaF)GgsP5xXPa^Agl$kCsH;-u|);WTo_1tD-&@!GYlW!C14gQ3>FtcV^u zN#4xqo%SmDi?oyPIU_v71JJoH=p1}u1tYr~!o7K7SA&f{b&_1kDda2uAVY@@<UegB zbBC4icN@uf!<77zCK5Kh5C4~lsE1n=eShlM-A?|9uHeeM63uxQha>zY(cg8(>i@(| z!izHhCipgaclbE|eG%~v&kUXOkRI_PDIb+g9?K1nkatk8yvX{V@*}Aq6*VB|O$$dq z_Feket#CNK{R5iBrLu@#1BOebRk&2<yh#q_#t-<!U)qO$#7bjYaS2%%k65$sO1T9@ z|CMqC;YUcxD<e`xZRFsHzU4S5jj0}$Djv}2{voVWef-HT^+i_>TG^|KpBnUlx*1d< z+&6{|*d`put8#Ftk`!%w9#^u{EvywB$*=)Rl9_%6!E(1*oT2o9P?UnA)}Qhf=eLQR z(N)C(+rq=h#F6P~x6fj1b;<fRr-c3{;1+$GD-?%9dZY;U12+<6qce-h&XJk?6NTi$ z$Pv8CO8P&N!nYTYyhqZa{Q<}*y5wSq_Iq6<PvvR&afRglykA46;7sRE=4j3Z^493s z_@#XYb90lZ8q}&7nt;UDb9$tid^5U#@NJy5P;jxC1U@=-*a~0Hg~UI#F+f|@o}*vU zs=RQ0{aifqv3L$`L}b!)wg@kYhz%vup+XG_o7qR2$@WL%_*<h$<D*+dH^?08o-Q|p zW$p@L4|IdZnE6K^A!TD;;^{eZZ_Fs(HIj@Ro7hhSrmCD9P=fM7l&_lY=kC0#Uc4vj zb0lnw4<{?ee)Jy#$LEjZBS(<;^GC#HLxdx+7_Wpq=#Nmaf2_ZrC86W`By9Kt#&#EE z#FE{2@4MpZE;b2J*1yh@(c_{M1ZBn1m;BT@4<$sLB^Bf3F+=?&X+jC^om}g_eCo9k z%WE-lt8pr1H+S66d(c0>?P&e(x8$v&*!0Iy=NMYfr^5>{*S=#u9wsr~C|V=1Gl4Y! zTVfucDk%rWcTw;CEtxYu*5v5#+ddj#F1Y_8Xp{zaH0Wl5)Q_|-$-J&q#cKKQ@*g^) z3vtBt-4ICK_~hVcVGOv~UD1#$<EMl!W6H8Pl8bG0Xis9ZCE><B-!{7^)57AJM>Zp2 z#dK2wb8Xl?$G8KR5>dghE{x;Zw_7`VdM+hn3(`gR$l?MOPW!b5`Q0s$>S=d-z0M&x zd70qj?~&NTfhZkbm=eTldZXG&Sz+AJFUCSHj<T?(;in&Hi$%op5IzZKLzLj^#Niew z$YVioHoxqRmTl!$nsu6dUYI_*j!ml+5;~^!mv4O2`WaIHLd7Co19erNUm(^CBb*yY zkNDX(cd*gn-(a3o1M;bDtT3lH1=r-C(~%GP#(pEr=>brDbNVJ5jBieFS|k={XZHdz z=ZwQiOHB8VLZl_^&5OOp?8B)k`fIGX%Jn#+<G4S71|jQTYwvwU!rqz}TXV(2xm&{6 zI6ZrggKuVCaM<3Z(k8O*t=|0a^J?qfGIR=9Gm_(0Z(149gbOKIv&kLsHoOjr-~4#M z=Qt#g=Qghnm=yt}$2)^WWwpcK3FiY=V-u^Lu=PVxK>kpabbI%GK6pDh{O<Jri?^~J zwt56tITKs-3=6jp5aGZT3E%+K0JfrxG!{w4pZ|_ki4gqi$EHfB1GmzFxfuPevqkZB ztk>*b6=`o_(_%Y;Gc<d^sapI!&ZwO}BdZ?E<R3pm>K~iN&pbl<PDl*-_7f-(g-@O& zqbCfQ7K*H2AvMRn-u>7Y%dh-`xns^4DvP77Ap1F9?%+5Ej)EhMS$}bU6m59O;{bYP z1&R^QWO3*p-zDEqnASB@IHe(u?Mk5f<0NlluIBo&4#8`H`kG#fnw~sK<w@to!%V!^ z6+S<bo_NSQ;~4pT;s%k1%$}s<5t4jwQeS@VR`T_visaAt3N8U{jBp<ds8%k)<1^>_ z#H$iQieGObbBj|`w{HPcpLRUQ3h`jPb@|Sy%6hzvu{&av_$zX-Sl1_jEro{36N-=F zvQ5`?_xXcgI$ov<Y_!!(Vkh@k)tPZx79oPS4@ePWlg<PieV^f2QH5AU=n-?qz!1XZ z6*vnAnaR_Wlcz?Z(IU18F^@thFWfHv6BF4F^p5q_T<2R~N0B~UUrbT|vA*{D+17Qi zQB$3NedU=+@|2$Zh&r<BgM{=taQN2IJFJtwb;R~MA^Ph~+$MI$dFPAik<FxX%A>um zV@7dLz!lp5<09OOgjkrO&R>o0XsrEfid7UYf;04}So{xW6L@$K8UJ{1z9pM1db|(6 z^$gkgc#>#7sefE*&Vec`Ova+>LXk6teMn;tU}~!hf}b(laQu%;5_Ao%4R<#Lbo#Rb zv)JfW2?52(w~2IGYTWM|*(|pGk6E05h|Hc=#IG7kPEPxY-#&!woZgGC8bZFBZjJ2& zrzN}+A5O!8*_OYoX9gbZy-^aPD_P59)=!u86@`<#C6?Hrw=ge=a^`!Z1T)CF*BUI~ zWL)Wx{>jWTT=x|CY$+-MxT4K>L1$@<kZ12&_-ce4Db=S}zzHL&B|K;rC~h(rbzlQj z)n0^qZ$E9I6rGr#1iMQmWlzKr(-X0=TiN-won4~-Cx@)(mzXrvX4Nya*rxxdjI&R~ zB@VDKE}smrh5r{@xIMLd8Z)&wu<jJ+5xap;$orTKw+|S!cn7meL_Iorb1u2Pk<X?H z5xb6HT@4Ibps<MEruAf_Juze=#4xw=$4BI(eJI~Hm~@>nBzgo~a8-=ED`Kv|d>d@j zUQ#;aRP+;AtiGa=z<fo&?;*L4`O0~CVQ_zjSO-2c=tV!@Bba|xBCP@DdugWc0L4?= z6u1a%>K<bn`NOd|nXNEvR46N=T?CG*oc1fFFfXsGg>8*1W~TNOmLXn&ukQw%Ko^$i zP1g>(e>Z8GnaT&r$<3L?;a^?!&jO-fu{3KpnLev`=Bj_AH|%!niYs+5J{#xi?rC^! ztktWgzv>06#g*VFUVAxE=3cDbGb>mWu?c$xt{7d8I#1!<00pZ_)046Mpwp!F$uZG= z9+ZYH9f{%iIKs);*#r5Y56N@06ZmD#<gM8;{Oo3OV0NPDCOJEMv#5|Po|6=j25W<o zI!<uhF0yk@GQTsEG|d?(`}qU7H0%wzY@Scm`1c0vbFLWS8#ugpbf?on_i0~TLo&;z z@YOYBWto&8IfxuBD^7m+M(T??j%Q=DFY~|;z>r6oiTbI&{N5F$_$dXy(nH8o&$Fb* z+`)r*=oc$m?1-iTOw~67ld)WRXE%q)j^{`V?$pZdf--?kT}@t`t5EumOx+7??#p5B zU&8eFizmARuRN;ZxLXYf0dTI!`YXKq$4>6s;<^3*e8-#miuGmk``qZn>DA~^YW0+t zE!;|^myliQ#Lou6@oJJXuOAQ2N9Lun<f(ZHeXQWb4m=!*5|9B%1tb6>0TMvK%jCm( zxsi<1<7JZV04HGC%cQGwdg#>^tl-vnXm9-rQtph6ZbKn*tY9UFpH#r86=bI~iNCsn zeCbSM$!({!592`V8<1=R)ByGa>Hv*^CcsZC$Y@t?q_2^CAOx&L*8piN$$PH+jPmz7 zcFz=`*hD6=moyljii3OeMrFV@a5UiQpD&aC^LLj&zYAO?SMhYAfycqwpSl*?4LJRH zqjqE6uxID6rQsrY4Wkf%lcFf!`z;G0++mxtip8s}2*M?^f=x___gj{i2zf(Cz765Q z{7sez`AxaND(eEA;fPI6c{&EsvV~7?OS`^|9fBV2rvH34GjDrflRs@GBNhzff7?n{ zEzn9n#KFmDtG2D=%mO#xdn=i+P#a3Ir~8j6o-JhK!Xg|G?k*e|-2;c4|9*3yR!t@> z(%_o9X3<Q3(s3esCW_y(oWwlyh%p=*gTpAGZ&VAe;Ram;#FNurR9$jua`6P;$FfzG z9UT{5+r{jZ6ZsO^@XQo6D|*%v9gow>e>eMthZH=U$X`B1o_;n(`7Bh4mh$Nn%O5mw zwWEQFOaGS!E-xdWKbu|N4_aeebCQrsXb&L`r(r@GNrQwmhI(HFpPII_WPUPj6Vg;l zg*1(}2x$gAE2L7|B&15(D5MFrPDnL$zmOVe4N~`Ui?@8fg}x)uvgsQ_noD01(owWp zNUgL|NXOB|LRv^&LOPK?DWp?qiI7gGlZ4bx#|!B!nkS@XG)G9C)F`A2s7gqmr2~<c zvrg=-fH~46M&8R#K{Qf8moYS0K$kQ0{tJv@B}4xdP<A7sR6y4<v{^vcGxTc#-N4WW z0o}~dLjt;up}Pe1P&vc52zVVs*9quxhOQLQMut`j=qZLS0xI;miRESq1^XE~RY1=& z)Fz<cGxQMwZDFWIKrb;=C7={&IZYSvtBjl^ph9daQb5}nHCRAzF!Vkw)w{Qyp??bK zJ%&;NWdTFlETHVmL;AIViW%AfROqvW<qil1%rDTLmE}xeB*Qle<QRso70?8RRtqSC zuy}$O&{T##Eud)(oh6_d44o>VQij?DRLRgs1XRONi+~y!suEC(XbSLj0ncXSBmvE3 zXe3af)1z3fvru4V=v}NP@7{3?Z4=N!hW;#|6B&9=K&LSD%cW#yMX!<DK`HVK3(yA| zuw$_>$m>3eSRZKdQ<nKCQhlJ^Pnj=J<}3B8SpvmIr!OUEDpJc$KGtx3V6LAv!$*nK z2afVn;(U}CeW2A(5&I|!`oM91%1yV>tz>;*p`U`o8S7}OK5(L+^0kkm(Fd0KDRmQk zB!fQC=_l><Q7rnv1%AqUA0=BK_^hAul8=(B53KN076_F2qx7n!0>ws4-Q@Ihc8TT9 z2VVKo8zlGnbUx(GTIcg!MSSN&WU2dfug9LlJv?c>a1a06e-95w*6*=v*O7%w`|v%F zlMPF^i)u(-We+^7lvMWNi{7hUUim^Nz9h9avg#)hfA>7O{Su!4Y6p1gJMo@G^83p> z_%~eSjTO`SMLgxdQ~$8h!d-aF!i8_JaI*mW0mlI$pc~#M{Z@`1>h6m3*q8X`puyFR z-dVwV5{#AMY4Cj8I#Rf{-W!9zNx%u7g+t({*+I#7f5U+$*h;aB?_W8f(9X_Pmr$P_ zRwsR5HK_$>Q5G6apsN@)nWOU`N<ZfL=E_DiQ(1?gNFWQ%ASg1!&o{b*k6wS7=vVdS zxvpf|s)3<bUw+U%dgEpC#;U|FHn0lc%ow_o6RSo>mV={8LrVlkA*LmMnWU~B8Py9E z)`!jyOWv&}?$tVe?5pI+>YgGcX<nTql9InyU&FoUH<BhgOQP2l;&;xPsr>O6^7Wbl z{Hrm<yQUem=2yPM@4kP13Emf9mG)q7CM+`4X;0eEUa$T_`Dd-dp9FmIss(&uYZLL- znYp%K0^73uk-<SgtE8=F@1AT_p^9bXskN!)m*KM;IE1}8dJQPL%x=UIPo)M-11hEX zWm_+v?u+RdJDX-lxP2MZ{oi2kxF*;7>e97>I5wdW^FPB-%l&LmJzxu98;Jj&+`BX4 zP<=vsJSbz^3=2J_ya0;t$~?1k$CcR|m>n^g6TbbY!^n=;a?)Oc^Yx9DJ*@p>rX%Q_ zjv^ZI0*QM)d(fnvSiK)LhWcKZGi`JfBfx%~DK=^WUf&*Qt2R__fT>XFPE!4PQhJz= z#p?unr+={D!j<ix?3Z_tGhjafvT&0F`#~QrNZ<Ki>~HQMN$XO1WdzAtr{+U9lf~-> zbxV(6*Yb{Mv4Rrv@w!L(Ei*{h_0w?DnY&)ipLUb?)~k?dL1w+3#JusN1Z%)nfT0(` z$jQX_auYOIM4q6kgwFWtvNf6}J^w&QwBPfj?mZR=ZFz5GxsS~)>5O1QYZQGAUc+xT z8-e1g*yzO($^WgIf1$<DN|Z4pcXY4|C*gmnK8{ZH^9<|Yp;sypX#cmTvHo_TM^eo+ zOx}GzDrg(Cs>#ohPu@@GuT_%k??)q&D&C&eV?B(WeWsiY5yuqgJ&VPT5V~tAv2E|8 z^nXqfV{c@&UxQ?K-X$Rp_r0SFbu&MX>!O%h<5cW2pvfx92iu4Dq^wABerVOCd+sG- z&J}Op5QaCoVJr~4nRMTg+KV-Na~Qtgh)48+wc|cvX?1%RpY^y}(18!!q;Q9t_kKcF z?udpOx9%tjnSed|7}~i0Jc-_Ubc_MY-^8v}2-slRw6QL#mvk(f?)1CG5A@7BC^)Ex zQQHW5u%nnx0@{i<gvqgF$OnDWEs%_Hb@4ey7KaaTmt5l5msZYLIt9%5GpZ^v9lDqW zo%n-`$p;@KD~_zgnDM~v$-!IFk}inY#?bc#8ZH2F@F)1v0^c7aB#nSH?lX{1_!>FC zVn0#WOpCm66k_q=1Y$FYGKn3*^ph!MM~$i9<!9J11o!t6P&SqdxPW{-XF<LC3(gXU zF`v#P5xa5{Hy`kNeV-W~M$`gNS_L#yK%ArL<Y$O$S8~tVdUSIWi!b5Bv~yKbt%^kq zUELi4bZtHPaMxhbI&x!ITG~{EHHtbNvr;=Mu%L|ozVk8swG{h<sT_Y<0qsABWPjMd zza0|@jfJ`MZg^?oBG%z_^s#`7Cqs(X)$Z)d>fI?O|N2nD7uS=|J{-cw)swK=fx&+) zz%d4eBS}te5{$B>)=@qb;SYS}<&V_jVM|Q6PXY<+C$FGY3I&OVL6iTu6HaGl%)YmH z@bR?8Mi;<f*?U7z_x+anpWzcQmZw|M-h*$YS!9y_3xtOsgbzb5x(HvR{79GlM?vNS zGJSU+e#8Re+1-<0_c?iQcW=?p<cr;DiTf^N8RyX6N7-KhjK9hPOhpbppMBO$*Ig!& zd*sOz(LWgf0wh&A_n@mDbm7wvIzneZO-lDDcsC{M_GBjcd7a=@oWJq{6iI`@>(&Ii zy3nih2|oAdH~d1v_9lp?kc_>tk=65=q)^8A3Q%D%q-bvvulj{7+Iul{p^KFZCY0c+ zE!{Up6c_J&aI`rx4>eA-LY*<}zNaBy?e8D8AEy`ugJ=^9{ojCpGLM8ENR+$-iX(;n z#lnbrM0+5Q?U@I9Vn=-EK(gpJa^iqSR6@K5dS~PctH}9dDLk70I_!T3{rBrkT@T(t zW9bWX$>@Xe{EWF|=E3Z=Ee<s8f82asz&jo{XR+@op*uWo#<Y@e4-VqH|4PIkjpSQ? zAmctt=hs~!6(8xdk75t-e+A3inDJsHcy3O@bRgcnx<nWn#IR4nB*I4&)KbPG!Gag| zzjNLG=;_`Wzp@wt3q@_J!6G@fk^QpS#izmdae8Kd-bM}$?X<ngQd^I|qb=X|7su@d zGy={5+5q<f;Ws%h6`%vy0CNFL0j~pU0H**y0&W62-{QDvKpH>;umT<j{C=0?E&!SU zdjW?)YXY<Yt^(R`aphbH?nDUyIUpNQ2yg;c1GWGT1I_^M1G?VE-y;L$fFXd<fQbMH zzzJ9mSP%FR@G0O7;1Zw>a0k#C!IKz38bApcb_bWpaX?A|6@b?PHGmU<7C;-IGkTs3 zNCT(=LjYp|(*aJvO2B3S$8L1^w+16y0@wif?|(%K3)egx5vhC&_r)Our6QiObnsMs zij%R@vZ_~9^n*XX=24gj*nbUhvG%<qy5RDa=PaBUG!0UAvTFGb)sU`58jjS0v>T(J z#X}aOBkj)UNMn#TeQV)jkyap$LmG}W9_fZN2)QA(BE{eMaN$T1{O5B2ZQ+uUww$(b zy*PNuwy!PR0uW9iU5K;>=^~_SkTM-CN9sj7tG4>&k*-7XO6JX*`b5c)ISa<kc(P=U z%UJ-$b5l7dU=}3jaC7nB96&NRgPV=tQ7CZ%n}+jT3Fx&W&t{6oLn1dG)T!L#s8<3$ zCu${wYc6t39;1~BvZeu_hyRixXPzMS3E=&>KBzelsSY^-CvSZBC{dp4MnvZ>_cWmN zKU<#6O-JpyoQa!Q`}4U0e9!PUeAtMNjLpx^PdfeA_9fR1Ih8YS7yYuXs9r<{HV+M{ z&_k8@q;REYRj+8WvN?wDpFrMgPV5wxWFemBo}JeA^QAwe`_kVA`qC+bgp_|<N*ovB zItR)zO>*+mg~U!&<16{bOg=@)APc;#g~{X-hLDhpiD6$4wQzW@WlHCM7-}J57h}WL z<UFKp$gz+W&9QuTE)x*cIs!!_d@?H^B?m7ib}GpCWzXAKi+sQ|A=&W+D-2uVvM|ZQ zzX>jgx)956StM|TxETlk-Ah6ezFp{VORXSm(6ECRq0WP@HTt{wAm0Tm;K9G)9|>~% zZ~xfB<+6y0|6dtfMNP2czaCIvaG+Vqsvmm%|L(me{=f8I<mQh};l-k1bLKud)#+cv z1?2co_Xc@5>r|(GY{`>z=9dhcF{>oMtYlgNmm|t|&c**Du)cKy$Iv-XK0af1N$tr? zS%I}pe1ho1U(*8nIagY^{|g}X{N&o5*FFsxy!|2~^MK8O4S@B4wSYB%<$$GtX8~n^ z>41p<D_|5L7m!Wfz0t2A2ABj80JwJn@du<gkX{8`0-Oaj0geOq19k&y0Gk0T0ZRe+ zy8=$+x5H<VcV4iNL4QT{Gyu;4Bml&K8_kdjXaY2nsecU`ItAD$fCZogYy+eL5&#lF zJ3i~AfYX3Fz<xjtU_HPInDRZA$M2Jf=4M$B=?^IX*}`cq!w=&dxt1=2Ipt3lD6F1T z+_H3K<?^54gYmB-1mqzkynSWhbUY1=t9_?^a(;O@1`#OjB8`wHN&89%NG(#Uv{*V_ zI#aqpS}t8GeNFnV^pNzfG)gv3=9IlC`#^R;_LXd{yj>own4nm%*seIB_(^e1;Z>-V zBb7Gg9Objh)yfZ)CzQ=fP8F!?r!uIDRP$ApDx&&8bwG7Sby;;^6`<~_j#KwnXO^oi z>e1?a^;Y#k^%Zq~R&iE&)*D%MS+Saan$ena%`(jj&1TIJ%{Q75ZKl?wb!+!(?`gwy zDLRF2oNj^cO<jh5q~5Myp#Mw1)v&{`*YL67qG6nIqH&IKk+Iyk%DB$B!Fb4c!uY-M zvQcD`m?BN7rgW3u<TNcYZ8nv^Z~DY^%5=;0rg^h@ubE5cI0I%?D%D8yq=nLjvK09k zd4arCUa9Dj^>fzWSuX7rZIy0}VUb~p@xHOMsfVdIguP;7l{k(CY7Nre((j~H+9v&5 z8Z3*F#mffDR5A;6w_Nrw*=1Rge6qYmK2!db{Au};a(ShExtz#fmv5ABlh?@i%j@K) z<o}kRlefr!mH#QfFAq_KDUuYaigblkF-(!C7^iql;Z(R4D+T4(Dvm0?V1MkP=%!3k zDwQVXbmgncEtr6Bl)S2&>I*fOm6T=5+L(1b>zAzSS^YFwnyH$((CiDEe`&U9%6Dk~ z)`V%VX>V&g>#}u^=n8ekx^=pJx^ueUbiMQ@{c8OzgVV6wu+?zi(8nk<8cZ`xWu`@@ z<)%%hU8a4ebEY3mcT8Q)k>;Lesd=1vjhUJ`3(jKDgcw#eLi(t5ob++&6Vh4Ir=-tG zYovRnhoynC5Lvctgv?ql8!wwEn<|?lYm$8@yC{1?J|7xcEnh3&CqFFzRNf^2UjC!J zReoK5N8VW>QA8@@6nzyL3aw(OVuT`J;ZnS$s8O^jZYq+MeU%x?A<D;;70Nrxo-n-0 zswY%6s=caHs;^YvsV=I1QC(I2sk*Hasm1D0b(A_$ovtn)q*khRYLhx!JwpAcdYpQi zdZD^p{i1rgdX4%u^+xp;^$ztRb-lVl-K1_)-&A+al4WVKELk;Kd$JB^eV)~p^;g!N ztZteTjYH$ntkmq$e59$@*tJh;%e8N4cWL)&`|Aw4(YjZ3JD}CCq1P7O@46KIAbqyp zrC(jHzo3^GREEKZ7Y)^hR}C4)8O91@n@MEWn{&-$%~Q-X%=64!%^#W%ng4CRX#T?t zL$h)?+u)oH6MbI#3Y3{5n<+adyDgh7UnH-Vza>8?KMLLb0G&m{cKRqX6$-@=#SF!2 z#U{l*MJMH0<?G7dmAjym@&ff!>gUxTsp~brYF^N`=q~I2LWkZqd}AoYtj3s6na`WK zDIEKs8!MeGRmx1V3$g_HXiQX-LaUstEK#mdo>rb$wkfkzgH^LsOI2^HPN;6G#A>y= zNIhA-0UG>VeOVowH8|@rO^$Yw_BriKhA)i4=27Ma<>uGfh%g*>jC9L>SNwswyra0U z;FX<~A<BB?CFM0`glej)QN1y%7T@tt*38!I(j3y9(0r@;QFC9T)7rJ~Xg|<4Xxp?Y zx{<m*`T_c-hE;~w4b6tDhE!vw;0cx*Ux6Q(Vt&fJ&|Gd_YJSOl(|pg&Rg~i}i-u$1 z_m)f7O1DeDlzuO)-F9h!f89=z&5*fd+hu!X^|Cv%PVzAMDEWB#RH%BLe6##B`B`~0 z40Inx|F&w6>NL#kwyKNz5%o;9Mf<pRo%X2qN9|Q@CtVs=SG(?6{SW%z^d*LJ!!pA` zLrA&NU>s+hV4P;O8)q5IjISAw7@LikjMRA5*k-(8Y&YIBa;A|ct7)96&@|C>)bzQD zny#5{nA%PEOg+uRF`f#HXPJ4qc_kL#TJw7IFXpS}HuDX0JJak+obHj>rL(1xvMkvU zS+1;5Hc2*HHedFv>;+jl^GWMvTj19Z%8tkyWnashWifI&#@M9zR?(~w!K?qItW^zE zTj8Kj>wdu)Li7pxA^LIpQn=lh^y~FI^>zBM^_TQF^<qPuVGKGKVb-7{yUpj!+y-1X z(1AbXR}_m>AFHmYLe#%y73n7Frs_&`Gs<;O>YmcMbk}tEbWwVRevbZCeY&9v>tGaI zO^NBSiL1eN0s=Cmon>caLGqsRV)<fuoMIHb;2#Q`a-otacPq~*lT>z9gt{k|>Y%Lg z8oOqp=1t9anyZ=sZL0QZ?OWRY+S6K*E>ow~J%RR?=nm_ebP|2J3_h+@zgE8i3-N2i zw}u}KU5o>aQ;jo?ca13~o2k^4X3jM0%tOsK^DE|!<{B(S^rjA%AtX=0mwhAUW&LD- z$?l?aCb>;+Q4CicQ+%rEgJrfsIYU*E^(MT;-&qM7rB<&k)6LgCqkB%bT6abV0kKU! zL9=9K*j^W4PL^StxFn;pS7+yo(n{Ary<ltMgg<`Wj^?>Gob*7-mD;7tq}!y8FjuiG zRhEXCwab>G{x(eGX<3`>9wt&L*T@Uy3*b-p!%*Ah0g7aWMKMKDq1b>wU29QXRfv@w Mx34{4bSdcn0N`!UFaQ7m delta 18962 zcmd_SdsviJ7dL#*4akVYK?US2ASepzoM)Ib3ZBtX3?vmTPt&A@I%8TQ=!i)ZKIpd6 zvhvttX5~5a6pD$am6n<%8W~S*0u#-Yl*jpg`<?-Ndf)5%{`vm<yjru@UVH7+T5GSp z_u4m=RR@$E45%`QT2~vhr}PM&p<3{ix*gur{-stTKcMLgb-0jv)H)$Os~(Kh5OPN? zMXFjbRsD=W|Juj@(3iFe>9}X6*;t#alcH9)h?5ULd+|xm_phGgMO{Qe9CrwWcf7=y z0VGB=fWQAK86oN&GW!pXGq3`Sh}&viozz{jQN&3)b6nL0j*DBJ6dQT|cNBW7dVkMz z+<4LIq?m3495WU=K>Oh!e!>8U$OBPANJctPFmV}xW286VNAw#R&G+g10QtZDv!LAp z^JdI`ej3NE3PAyS7MdBg_Z|L8(Hj2%L&k3w&J!{0yoGbOY;vS#apl#{=<<6GwM~nX zd{0C+ine$Q95J?oT{zCp*}^yxXB$Pb{9{Q-z(DbmY>uleZ+l3Z{iAtNXEHS)mhTlt zN&*J@71*YeT>%k&Y?!d(jJ`H>?Qz>|v^<u|JRXmoFYq|q=&~q|W2w6bX%2`j&C3V1 zl@ap11&94H?mWB5;}p|5;fz=8ZlMY^;5wP8uXV(`TkHXal_eMd*yb(~r`P)Bw61rz zR6Nn#xD=B<hB^D^(+?qlik!Rd92&rJEe{JTa$5DZC4Lnpt?j-2r-v|%(tOs_UFt9N zbQe8+@yO$Gw<xOh_d{Q+wynhQE1b<%`XnRgG^gj>wFjWfLR4Ds9R%x}ehOJ(IIfc! z>CT39XMOq~i+II7JA{gvz3A-=lPaKZbq6DB{#0!B{VTT7JJEra{`bFZdMTCJCQs4C zrjVKLuuo5;V5YvSBXF3$cK+{8eib<_tgTm~Q9q}paLSpQC)~}hW=Xz%07Rs?SK(}> z$z*k4TE;oltjxJ<t;}h&(m#a0@45vMzX-E(oebAMat^P|X|;Y<_Z#_7VBbgu#^gFF z(Vuc&Z1Pjw8^Ze897IyYy@RcSv6SC)N<0x3QXtkOL&%%_xhzg`vSNA473ADGVklAJ zDJZDOPKonGXvr>dWP%hG%c~s{*W#40VrPJ&+S0@;s%;Izbf=L9aneNpCN_0_8%!g+ zD8Dj0MOk2_Bv%+>g(D@+tumky`#sNPD+;I6yHG)`e@co_%^%eY3+;b;3vKi}Z=ppf z{FO}Y9BH2<;9oox7)3TmcMDgWoiYjLj8!lSO{3K))^vuopr8QCDeyQj22Vsd%7BL3 z778t_L<??kK}4WNZ7Ige$@4^{c)2j+@gy`TCL;}b+eV>Ame41d!#oij$mgJ{u@I8m zIO>9hJAAT<JQ0-2Zwe!egVJNpqe>plfLl?VDX+$w$+NY9G?wls$Acnba-g%qLWr_m zhIuw+GZWkj(h?LA8|f={{yNiT=G^xk(V?GovWsC-p(L?OkJ5%vUv;sfdS*o@*D1b$ z7D0A-FuvJnC)Q)<CM^B6I6jMmy<2I>17?X~GhO;*XKz$-nUeED*H+s$3VpWx<?(pi zIVnL?t{UD|3$=Q=uw1CX^#!;v3dn0_0tVij4QF!uIHH95O(BdpjNIyy%BT2|NJ(1K zF+cA}os!z}dW^6hrqYyBS<E+eWxU^qkSUUgQdZ)15yxz?SzNvyy3BxxXS$3mpX?A< z?5E&q3a%|@sU1WY<=wBbjN;793dx|#@)qFMQvuZ>S&-uljMm+%s4n+F0I)CZO(xxa ztuUL)mzgtC+$(f{t#6$l@k-Y+(jtj5SOty}#!;A8S$-Fb*LtcGmwrFLz%I5m{+HR8 z5@TZPTya(Z2XiEgg*wAq$&la_#hKuCr9g-?GZUP@3Nq|v+YT;B+e$@3<92?ES5WJM z$=kuvrB~XEzq-%*_LkY&8w>Kg52?@8*Enfqc{7`0e#P4?rO)E0qPzjWit5a_S!$E~ zj-HspU1*9?8KX_xPMrg)Td3hW8C5xqhez_OEuE?V)_E`e_=Q$?i(BZSy`-a{qP!06 zxCPPwABFDIZb7q^R<sx8S39%(Dw;cSq1ErSUnQ$oQ0b6~^XdEk@G9*Zu{kg%E4_Bl z;~DMQ&(!Y`6ztj0M(o)y%w{`y^79Mm<<87|NDJt1Q0h6F0?nM$2Qzo&<o~E?oDMH< zm)NRqSvZ;!Pf9~04K;BbSIb4hkI|Vx71d2IDXQI0iQxXR-CT!eGEI;a@Neh}c;AW~ zvA)(BOe^BZh0yM4j*bTGk1BIotTxsgeIr3|sZE=#9%op79$gVhBw<l~Pk_0G?QE?P zuz(u32aX}%?G(eocBeg1XpbzakkZ4kBsVOD|23A(4^#Fy0-xcDm<of!Vv4ZsMAnME znj8!3t6z}F+Odo2+MeD9^Qo>Eq}P&9AHr4VQwK(zPwNHztngcnDv)s^N$zU4l=Ve5 zG#i9Lc_Ql6SzICgG>zkI4)kK1gde&IQf3awIX=I@R*(&f3JeFwqwLrOvZ-r7KC&<Q zvTIDpE+*FfF_?sy+Ix~eyQ)OfNqV=8h-*n4ml^1|sed@n<k5I8yRXnMlF00CTJb_O zvb3aOIoa7QTD(%o)W1r;?566uEFJ@6k~+$^#*#43Ax5*a8MTJiMw6uOc|ngbc*Zg` zjV$TjJH{qR$QhMSci<@DZo!TP3byVja;$qo!cB0yTi}!+?5NC~Hb-~*4EWgO>C`Cl zpnD>ZQN@M#PX15<Eq8Jc@z)0|?3~3pBHcs8?%?7e9GHd`qd!RonI1kQ_#<qsKGPZ7 zgM1P`ubUYaLqBO@W)S*Gpp_0(5Pd{p@JX1Lk8xHc*%VRHc~~DvDS@s@C5p(D+}X@m zw3d6Ez3q~s%z%^_^ckE--(i!5+~02sq}O3DFpTen#9ce3s3|;;mM{id16nO7fdwn7 zCr+**A4k@T;>im=dc~Y!<}Mf%J4iKVu#L}&^>s!;_VpO29D>ohPh)*o<hJV1I(yJj z;Hu^Nv9*u|)Xg=11UxfWJin{pv}t-c(M6@ojjZBHeT^fomOFQj<C?l!=~k$z5Twqv z+_h`4{c5p0x7B?GD?FH9>`qokMfcn+XKNw~0`Hn9jAMX6khoi#By>Y}awaMvas`(8 z8SlDqHHTsCROYtOuH?T_<M?ltWNfrF_(r!5VU~9ztD<|0PLuu7#rzEe=@av$c&Y(1 zvC?UzA|_V!9N89Q33>x0sGTn8N^ZrZ1)gVSTGf>##HOU`d_~b+I~qNL4OLN1Ujqdz zHHuCMBC}#u{MsP$PV5v>F?kf55}XAdpF$f#$-ua{;Hbxx!=YqC+()r&!}IZQKwYOj zjWeJCIH}_#DLzuPh-l*%@cTo^uK4kyog^SZDSCnE62?ed*!U7c*eEh{#PB>Yfe3ms zxTCpQK+#-N8ngXQf#lPKUbzwA7aYsTK(<0U9E*F1WV_25Mp+&2XjsizuKHV8Q=*C? z5@$$0y#(EfawM)9VO(K8oopZjlcZTIHG&|mdJN<Y#OG?o{&>7H%TqMD20k>b1*05@ zR{v*|mtcmzNiQTDlj234$g!k;L80IiMwdhGBxQ^*uyEX#s8G1cxn{Ni7j>pTnHb_6 z=~lI3Q^uVn%T*%g9C|jz9Vjbww>G6&>AMWo*Di>0cbe%kEHpa1;yjPT!IwrdFS(ag z$_5pMG1vp?m+WA~j>T>8=OtqOBj&?TIm7A4>{=tNiahdpvQ)#P&B|htozJJ=-eHFy zNkyQ-0~g<e7J}8gwCHGdz=!W|Xo}6tx6;r1l8j!dQSX6TgTrG%0euDsICLwAt_y3( zxnBJv?`A^X?pFFQOgEnnR&nq}pP5N$N{_h9x6u*yn?l!23CEVRAX6(F{zWn<MJuW& zFQsG+>KE&EaP#5BZ63BRpZlFTV{lKf6*F!PdacY^hy7sjc?(V|nX1$9;m$7kxYyu9 zw~#za$t}GIpA6xO?Qt}ZIdhtW)h!Uh=pcd62oD8G(chGUKE(D|TGRqgPlN-_ZaIlI z>65tlZ3$;udu_0fOH8$li$Y~gjbeLe+J_PBBKqhiQvE3hykKd#z~qJ)?&X{mIXZ=Y zl6$(z&mL4b)|zi)*J0Ywgab-WeWtU)*~v=ZyoEE%ar`t5z?I7_c6YAGt+S4$9;SU5 z_YCUJsRO&Kq=94nWsH9#erj{ta1Fpct)9kphGwV<V{RnFdmqn!mCd6VqsQ0+K`mql z$CV)3E?!{U>vEtjBV*toC=vY`b1bxV_UEcS5m6|Egv-e2J_GpQ2a_dzdhnM9lQ;S} zVrOHOlo`dWnj?T3(XdCgwKrsR<tFK!)=#vQj7{qvQ-ubgVqsO{y5wpNFNt(jhgs=( zvLr29R7KXM4G57#mC&rKIuU)XB^T3@Lj3-RwAZiiSy5mehD)_0$fbViLmGhR)4FEC zYN60Bu2Vrv&vKkU*OY~mXp2yM^zX%CteEGTlI?*u|8H3BcbdtN^r3vwaPqHoRYX+_ zS|5dzxZ<k9Q&G|YNjwN7-=|wd_ee~Jg+FT|lQMesTZajiS8rl7b$0hCchT96P`8k; zh}+c^>-OK}oktaUKSL*Zfrm2OLTf?fR)#9(FC*sa+Qv?EoPAWKaL}}4dAy%NO#OFf zGG*DZC3-bfS$+k45DL=~W*LT^QsR2(aRla3f7XyYw{42z%ZYT?&m?U?ukbRk6?uj2 zQ4Q;s1?^5JQwGe72!TzYey4-2epws@Ev}&t#pIU(3O}1wLwXHd(=F`=tY@?Y`@P@S z3W=6R-5|9C=XTrnlb4(?kahI6pU4R5@@}F3AbaXby)=g}&Ln+g6GCsm7(Ru^=iGzE z^lmm;B^$se)sthg>B(jkv8#dNjN%(aHh#e=5Vu1+H_jHRV>iEJt4Owdm}niTl&2(Y z#dPwIasF9cf!kE>addYr@fSIQ3q0QzQ1#E`Yk5wWwO|oE$gURBQ_(l=;g8TpA<mVR z%0F4f%mjzOyS8bLJ&K-TZr$Cgf9MRQ(@C)+rH@ya&K?DH24-JK2jcSTb$S((8=AUQ zn3@6$=!hHSup&F+7tFZOqxMAjEE-03zJ-J<`*q71#9`M$)f(H6dbXf85UWxqI!Tr( zl|j=nz8ZU?t)+!jE9Xfz;c)KTC9;W0CBreXSQW$fsVA?hvcnEi3y1aY4n6|JFZdv- zR~-b$dUZnn2G))zVkeRl{+!#B?@^@$F#YZV#odB~U93lSy1SrpfzrXSk7Z94)oe-x zKejSQq}8K3@8u-Hn&<%s!QcuOT5DW@X<%kyx3Lzk<051W$4c>@z#-GxA(PU`c#T?g zgt#>`L>6*elQ?3k0(x%2G5MZDhfY~99@V*yk$BGI)Dwkp5N(BnDuiD>$@&}Z@2Tk2 z)H$E-y#}h{9%2vGWTG}YcrMGc@nRTSvQ!&aI#Zww$v``BsS~_8r8hUin=^QG25-*d z%?<G8vc0)PZ*G`3*Ug)oWK4skS+gQWum{tx5CkZ6&1mD?OKh;X7>q08GzN^t6S~x6 z#pE2YiHmhamYL5lt|JwheG303BX*SSOqKs-7t1L%`EFG<>e{RjVu9zfEt{oVPBPhC zwrPcopt8$0sgMz*cG+cQ9GqLVk95{0^=1m*!q_%3Mb!qd!dFc&6hX*ayqereth!#o z53XPfP%#F#Y6B_L8Tv5uv*lW#LxJr1&U1cCyB%cP!r`3Y=BlLRXI(lBHdLPsbCc_3 z{2>#Wp-<MTrC2iTARbvcI80wN&xAW&8%^|49RrVhcx$EdAWWVO@*?(1oJriq4dgR@ zZ^^3zS!LYE*owHVPn1k$R@Lbb>{yoXNW3A9@A(}WYDfqyyM_By*)1~L&@*r)!&fpq zf$TDL5p5tx3_AnMP?I^T=Z%NL>wkd8f-J7q$a%jjs<E~Bf8IchCPVi)oTv(2t)kH7 zf$%+cKib7iUNt3!$JL>7I2`+DS0I@Jx&a49a@^FjGy{V^{17t01bf<=uOb+gPp6}l z-INf(Q&d~oKiUWTYd+0O#(s_V4Ga3^(;>)I`sb`=qCTDemEG^cU$zD`kp6w1l~%zs zBi4vkkJ{=$JjOc5>DN!7qiN?h&wl?$O0-%)Z+Xw*Ag;EBB_iHjg&GBPHV&T{MpIE9 z``5S#inZVML>LG$>s70Cn5nh0T#OZEg?P_lB5WrpVq3wT3+x_&>2DHtnpTk`0-3c& z;%AYI&J(pIT{Mq8jo;a1m1PNEA4Nj4=7}znrCFo-_$YEAE1hpPkw;kx{JTbyFsK** zqLB<9w6)8<uY`6bwvD&Rok3~**>6bP;Earp#qr3yIB>uI21{rYyFOM7wYgOS;%vY$ zj}XV;{&96Eu88-?P0#h{ku#w-fT3)&z$Kg<9h@w>Lh1)^&Zx(n5EERUkD|&PYQ;jl z=p$)BvWXada2yV#mI(4ub`JknIQcXCVV9d{g<*!<s&A8rIU4>!3Q-QxCHjWh)jh;! z^bXWYmwt_bRutLt+{Pnh)sQ!$d`#Vi!FRBo{)!lfDg)PG{)KcBSvb@T|95ccoBoWQ zd^U6u`FhykA-B&#MF)<slWG%m2g6^n4;kDGdt^pAyIbk`#|%#i44wnK827=)^bgOG z3B!99yb6kY1#XwzXJz<FueH)e0_nUj_pDH*8aX-@D1w&m6&H~+F2qIM5u3S=_00O5 zg)Pr>fE7{AIr7!;UTNLHUzj<i&C$(0)E|TEg2BQ4J%rQ>UA1AJT(Pq@pEjQ*(IY1C z^|#6V5&ij`vt;v#8T^8?r1MB6|NA$@JTjG^z>}FHEsFj>cWh|qv4^s|UKP4?mxLj> zBhf!}MC<>+9j?=3w)e^RBggYycrtL*0LcQ>@$HAv<e5>jahV;2#+TW!`y%gg#^E~Z zafa(3F6igh4$jNB{fV0eJqq*3t=iT4$T^g9taoj)vyk4yPF<mGvbv)R7$a#O6+7S@ zXsk4qo?xX>O>sE1$0BmcDu>YzL9FGlra5W_O3ozq?Ejayb>G>Qg{lg$#Ra-cB=GSO zbSn6~cPW2Xqq3JgIFXT(3oFF#+f7Hv<lMfZ2J&)l@6xfAqf*4_r(HjV`W=rw)8%+! zqizVtbx;G_sVSfe2j#I$cKNh3u2>a0IFd;Uw=ct;?4^1(N}S}deM%CUs0U&ZY_~Xq z=~SVJO`z%LJjK=RB1dFJk^lCv(B&_4WZ>vNX&bRM;9RP2aY*P|0k`N|oWVFwQu7#? zEBsGjK5ZIJmW&?2|C&oa8a<rfpF{o{oy;%HCGlg@NJ~PDI{e2Db$xOj4Ppq);vZ`} z4%&{QH&N!(Y86mo={Y5-0efO0+^5Hp?8$9d7>Jw7%BZHUl{xj+JX(uFQr)wEsO)jg zi`bD#Th9UG?gH_)*63y$YC!(~ViN8hFp1TVFe<M}45x>|$do>r?gj>v7%!NFdk0yZ z7@t?}EA2yHVx>_{2-q)*XL0fnuTe|~*=H1ijQ6x>N)e(=9RUWMxgw}pU=o6;H|og5 zF<Sojk>s^8znPNiP$d!j9a5jtAgvNS%0WN+KNgGis<PB9ZeCA%|0?UAUPIz#q-9sh zQET7Mhr9(FuaaBVr%J8foLXQLI{f|z+!NLA6BlSjZkYb~JUo=KxDlP*11h;vSTVT; zF{lLUFVsM2fTI!t6$53ed(NM2|90FrSgc{S1+_vevP06~Pb$WTWVl29zPgv?Kg?a@ zc=46t<m<5&B1#I!?e9VbKkN)*?_$w;X1>uxp3HlNfBy>kC2!QwP+V1+N6KkPpWJeR z;=~|9%&P^@sgIur@IVQJQ?`dGN5dcsUAfJ;zP9LdB<wtvLl%uc{69UJpBT@7WFW6S zF+7?DH|+sMh-vhoTP}Bq?SsqY{u8OAxjQg~{UEz1i(L;NIb-QOWSA}rI(XUi4?qm% zR~L%)@q!{^=tv)%w1bU)dx@ByOChWBL!$hBrHz+ZhfD!YpXAY7`K0RKQRG~HaPU5% zfGtmY<P!PU#OQu;s8WMb<WWrl)>y|a7;}%Hb359h;g?9?f|SV?P`oas;UY$Vs%e-I z{f*3YO6GONb1IZ8rvCBXgF!9i>Qx+D^(;!oBE^TnJ3oZpL;ZG7w8tJf0XJ~6t01xS zLRcJb>x~1*_XSgfS=_{Ku_qSg)0<&r$b_DvGBR_*bAmWZp8aeqm{~E+4P*NmPDEo} z0cGU&gdts+z!q<A>g0|uBZCV2iCV~<LKQA;TMF~K8_=q|)#dRx2H^m0!oJ-?IzQPT zrJ5&`16fT^L^GNCWX#}`<IpZV0!%gphk}k((v*wPRv4R{1Xtg??d`EJJ<~jUO=ZV3 zD}8i<9DTCi=tQRQ794vz6#gMHOyO&gb~M|#_MZx0;bVKYgN=q<fWmw8qsB3{#@rL! ziG;<zdeE+qxuP#H{e~?9#jD@7Y$`P*#3mK@eF$T=(vxI}?I~$Bt`pe3$D{QAmBQF+ zHg*>A3b$F<pl6@QMS%qyDoW~b5Rbk@zOxPBOV5zNXQuHh&yd;ABm~{WDWwL5u5ZcO zXVM4uL)Pb8zr=~;AbeqI<1d&Q+cV&jVyO5r#|K}L-=FCb-vCBmotJ?&{tN8bPFa9? z{sEF~pBD0B9<w0?sGV_i!dHaYhXsc`uC)70LhT8WKY!WL%nG0|&?(pz$?k|J$lPZK ziIimDvpT+S9%*^DH}AKPM9nVazx$26IXjM@wwD~7ox&UUlIyc0C1Y{eILEHA5pCz! z%+aN`G7Z;GnOIbVPyl_UyVt;f>2S0K`BXECOqtVHB^kv$76OZYKnf8Kb;RY<`y+wF zP{yMfOn|v1)?h1S&5rFO$<aB9PsxP?4g$bXfDm&Gv)PyijG%Xz&4VwzX7eM`$7Zwq zpJwx|kL_Rw8=d$CGn=IKY$(F*G4Nit6tN0P9=!w`!dDm#O`&w=7sS0gPI4Lbyk>KI zB>C{UINrLOoPBOgQXmwE8x8LFwhgm!WfOvQifZ4ELu<a-tAE}q3hRMmhFiq~lTdfS zv7Y3u;$D2lSW;b_${(#KzZUnz(LQjlRHP@`xrw4MQaCr3-}Vt%FgGQp^drnLwS`43 z;R2f)+OIg|6xlbokiRyDgwFe!ZyinU&P(DijV3+jTch>ZlWpuZ_*fcF>iKyq<1_HG zjOTq)Hh&%8cAmsIdW-x?mctUg?gKVgXB)l|5R{tJ*3?<R$(xQreZwz6EeJ~EqnC&< z;EEP+Fo2~JA<v$VJP~3N;MDh9%zTw^?gff_%=eA>5HoKr#QCdFW`GofSkMzj6h-V# z1%LD+dDj^!Vwo6n-q|BQ7A68_#+{LgP350eVo1V*82*J(M7JQ;SFLzKQ1_M0D)yd$ zCaxj~gmpKqmQ*fC=imH<99obNx)r7+1p8R%Px(IibwNVVJaDrWDn3O*Ul`0AMw9U` z42qn2oNeo_5Dy&=%<I>0R+F7CoQqsu-Ci^cn761)HCeTAf$}q^nX`zE;4M3e{`Q$* zkQE8^OJKDe9_=`K0w^A+reftIqUtur5$&RqL}v4ds-morrU@MQ@~3eows?=Tp84QQ zi&El+g@Fgs5XL6Zg++7E`60b@j6}Ye!snTY;l-k`KFv(h*i{fMi>Bj`ksUAg;&1*$ zzJ4((Ts=%!ptwAo5Y7&2db(-v!xuY?`me`Ugss2J3CB6S3wYFYB?!GvfTXVpUA_tn z!R}X?(_lr3yPn7vkBy9NuMxU@1m@$d)rqWI++UY+#3vej-qSb*=4bRg!mTE@z|N!o zbwpj+@X+pw|9U*9avt$bCZW%hjl{D!LDWnVO13K5^w>s%hskqxKb%FH-8pwt(Nifq zmv|!LV0Msn$4MmqFb<uoN}@^Vk^ugJjzli$k6;Mg6+HHKW*Ik-5j-|~Dv$C#mE#3_ zDd=8BrXC`WB~$q=hsfn6QvP*45tkN4dc&MgFnQUuC0N_A<_#msiqhWv*bU^PQUz}* zAwQQcW67+ttbxhLJRa<EVtXV_0a|!3AQ5YpcXYLjY<Tpv;6^|3fFJ>2ZSRop$`nfP z=H9&^-}P&#>z7ddqoOI!fW~bIxZbZt0D^OdA8+JcH~d_GiRbnGW1A=CHLi@9mPRIo zmtsIEm0@KTt_tZ|WUEr}V+A-$$-Jd~cyPY8G=(L{m&T=X;H(_Z-A8!?pbl^ma1L-1 za0IZgl<>=jg)>gKhqW99NC9ZeNYS$C!5fycg8Sd2zfnuc*UO?KH=+<ZR#3GI_(ecl z8F{p<C%<7S>FG*i$#9o6m2seVE0PI-sQ??G7_bmf1|UnxJFa2j-cELcun%w&aAhfZ z;L6K*Z(qkwHXJBi;bitm2TM_L@Nu4qMA!wkX`Zfni!55cuXHo)!Zo8A?<88d9o)*O zv&5Fsh!IC@XPXUPdqbCp!ARXkA>x;aqCC%cEP%m2Rzx_CEl))K!dSr;)`;gjmTyD+ zF>i~ZBVRA%E$#U&!+@1n`#ZvHr^&AJD1_(Ely6Ua^|g+-;{W4fI&)9EB?j#w>sJin z!}pM%R%A-P-`!3x*-d)C?BYl4CLg?<85|0_?{KhZ7rFg%AvW@nD@R1Wf!*kzPu-8! zlMhyEajk7w`7D3!D>Aksg3nq{Y!xGo{*V|vP66%zrr=xd(i9+Wd@oozqX8Eu#HSQr z(v&qF0~a2-#q137@$00eVk){F`-&y<Eq1c_#|t~Zj{N(T1b*oEr1q6$WidpFp7QAZ zb?qHAcXV*S>i^Qgve!twJG=D6S}gnpVtPnOC3KIFhS42D8csJ0X%u~vrD{rqY$A0F zX$o}-X&QY=NHgesA(hf+g;YtW327XiD5P4NC!_{C5~=H?#Z$V#LM;L<n`(u07?ld? zD4HguR+=cJ<7t$TPM~2zI*E#fbSmY9bUJM%Y$`U|BBZk^71Cl_FQg87MMxLY^Fq3q zo<>^A263Q_Z6Svk`2aiE(2oUlB}2Cg=&KA}FQ8QnT_vFG>`Yw(x{jd>1#|;L=LqOV zhCVHzTNzp)pxYTbN<fd4GCWJbs~M^m(31>J7tqrTO%%{`42=LPjQKpv1quZR8QKOb z@*KFr(Axr9$IxE{w4R|?1+;;o=LM7kEv26ecrzm(5>O#B_OXDrFzQwTz01(`0@}*Z zRRY?^P?vzRa3x(RpzK^k=Lo2np-%%9#w=mE@j?Of3v~FJQa0#thMNR(6hjpP8pqHy z0YwOyqwxZo!q9F4n#RyT0nK1&+iKRBl%cl;RLRg^1XRn=s{(3Z=y?IPh^7MnT)?v# z`H+APW9Y{~g+Y&ExlKZWm7%W-=y-;@1#|*K%LH^1L!AOTm7(_4<nXGb5ko;K6h0s_ zV8>!td6kzU)(2R8lwMwnR3BjTQMwA01xmeYwm`|Jo+{G&Un!;EV?Cp}Fnz!<A8W0b z60Q#z<)iHPQlj($Rv%@HmlCHB81JL3_EHk{0TX<bQZFS%A27*Bnd7Bs^#R2`%EXCY zl0hHf@R5dlDHePn<)di4lx%&#Vjm^dOBtpQDDzRe3zP+;^s40oC7<HyJEh7dnYF#$ zD|gvW)>ZZ6W!v|jtO^nFD^8J%t1l+~S&8S8A@AY5SiS-09Gv*msRmh}$Npg(IkP5} zcbz9KYj%n@lDA0@Q8n35Qu)ps_STUVe*9tS-sx}rEaC&|$nf=eZ`}LR`s055PyLAL z?GJhFGII0nX?;E{VNMf!>S|))R;{*h)quD)*jE7#z#%|0Auqf$df4~)qQdnxK5(dY zcBM&h@!pG)yVm_((+J^G*)|rRwc{AiF7CqD=XfU<u4FjSbhgs&Y=3C_np|HLHS@pN zAKXvi;Q|j9;kMIvEFASmF*^WHuB(__FMKiZ2zSzh>p@NA=-Wtr9oD$NZQO)4Sze70 zPJl0jQ|04xcJR@t^~C*dZ$3YU?0vU?aQDXrvFpjrcN4lS07dvhXH5)=+&CiqJ?ttK zTKXyypO6QxBaV$DB8Gv&#?b$9$@w?Pw;Of*yiFu@Q@kjR^xxD|ltOYh-NJpyxH(O9 zj?COV0l%MZeu|GtCCOXT`I{+Z%$94Q_22qEegn6C4c_mzNqewI92VB_+n@Fyk4JyK z^lX#x<$1z;7Vu4ZF9GkZOWx}fHyU2RD*~Div_jfs_Uz9_75d~Fa{RrN(hz|mj=8Hy zpcpc{>Pg(?S}>)TOYzHgV|+JIM4KH<^Y$<sWwK{(r{gmG1C}J4?q^6JUxwK}@IOJb zy*{>e?QH%W)vp$UD2uuwh>{v?YA@mFkku@1lKdVh-V5;YZn!XeFL|DT>~2|{js4*U zwUYn-J11>77LB)O_RjY=J$Iw8cNEdgDq`QBJ+S5w%<AyzVDB6EfPA`#5fI6BB<Iuj zfFExS$geb%Z^X5dzJ7>Y+upOE)63#<fc=@ju}}U7d+s69`~9B%GXBP%_&4_Of3W)> zBG12{!oQM0UVC57SAImke1BlqvIG{fc>GE?mjv$^!)GlZMLVY9=yPI+ntxbH9_&yd zBmF?lYn;UP!HtM}7Kz2t$+Zw)gvH>>BBts1AcTA~@yDbo^P%!xZoxAk<Ro~8P00F; zHrq{(&x#<wPAZDth%=|FK9q$%-1|?0_u1;sp$C>rt4Y!1UVQq2y{^dyKmWi7Xt-u- zmH*&YAO%mk{3E^ra{Z~N{Vn(sfMh?t#=j9aS@Ofv1K~!3riJnT0rxzfy$Lh+i2P68 zLCM9LAMibFWa+HwI`=f6OF8i&zF7Fc!rcXo*=gb40{k0r6VQON_Ai6-3!vaaEM$_u zb68vkPLuAyinvbru(K(WwqWn{*?em$Ikb~SDu38H!pG*4bVA&uDT3M`v&}0dmJeex z^Fd)otnVlh&ia3|r5Gyr@$~56p@)_b@?mPA_zx_KzNO@|5Bo)biijpc>TQ`tVrR5% zV<=u9hp_PEI>LXX=~0Eni-<S<Z3!$}aTQ<P*@Ngy%ZT-(ROMQf;VuznOJmJV1BWZO zLAXTL*}Kr(WoQ>+D=`Mv_!Kr$&>k)$?|(Ejeh({BTpf&FT}#Bc`u<P1!5^n2v2O1T zL7XWTQ5M#Z>y)L*<u1A6cGhD6i<XjcAFHGHeu{Pwf~tpmd8$%!lr4b@5sT$+`glfA zDx7}}Iz3!Q!gig=l|u4c*i{mNC-?;`8#A}K!!LBFJ6>wn1sk#8fgcCayHR&XF&*#{ z>A8DQKP6hk1iN@Q0Tv1Kcg<+v*e7g`XetB0JGY@iOv7Jd(J_AcOXU6CiIL54%k1j` zcTQY+^^z{I{U~~6J^68WoJdRBcFXy}JBf79wD9e?2D8sk5Ou;5VBg^dJw^V#$JFQG zqK>sN5hxpM8Jt%h-hd!b{dLC-yD^XMB9gs12@?=l@_v>%ZzsEWFDjrn5XyqQMpNk` z^8DUJe%5}ndT%s8YCqYzHwzyr{=PRYErNx+{A#Q;Za)TrSd;#d<4JrW!9J4sYlpY2 zfd2YAG41Qy*WeXHs4MrbhxT2_2G<YcDWJJiz?tc6b%d~b<|(9VpMvkUpM1V=5dRgv z0omWbbIk$|p1j)VzleE%Pf;V8vfm!Q5&<B5H0KNZ;w4W^Z<A~L6Q)_%+F>7}w0}@- z$)|&0bu5A(?|M|f;A}pXvpijhX4}7?W<gOp3xvmCkM}^X5MSxspkw}r?s1Z$1F1<d zf2-Y;75Z^*82-%XcIITUqYE2M&T+E!KrhiPa{548!c$k_r*fze3&J0bv1h<SJD<ls zhNd}JNysPi#0fB8tf$#XDstLjFg@r9XZ!f5*-55;qTu`2k=H&M(9_3j1Fzz0;|hq8 z{(zm#r44X)p+_C$#wV%#h#yGjgK=Xff#?1e_7`Wv*5eP!dY!0v;x2o0=u3-Gqm~P1 zm1hENpEo{#ceeC$-gQ0lgf3?f02pMm$4S{xfBc>}4*nQC0<PERW^c~hJN47CqL}8n z?dP~EU<i%ly(WrXZcE77LwzGoY{2O7A@(K|`o1%NeJ<&II6<<2RZM1o`VcdhC=Tc1 zG_dq=Jofr`4=0N5lj_4-(G>F6;a;L_5`QFm;D6p^f^2^ij;000tikp-;e+_7kZUx; ziwLj6x?(cpNOsx_HgxWL`+ZixJKlaHe-!M3z5SlJPA(i7$RE8%_@g8ES1ys!NBi+P z*T{=U^`-@zJRaYN!b9_!sSA-oM^DHE5qm4ns31UP>{C05@Wl%4`Wy>p3ooYRucI&Y ziu;WPOIW;Z%Pt&&Yc{c8W(WAZ`Vp=P>~G1)vB7>D&s+9B^?M&tX&bI#L4V?JM*;l+ z*?@6?>41fRRe&9UQ-C_a9YDxEj!OY(0mA?j0CvEOfHi=vfS`vQ_W(heE5MrpZ2<9o zjtd9$2514J?{lTxR3HvO72qSl3BV74JAfEm4Eq7pfYE>=fE}<9umVs8*bX=d_zLg~ z;0}QMi{m7KNI)_`3CIRm0Z#*p0m}dz{=#kf0FZNl8-O-|1fxy{7y#LTaeyMgbAU2H z6<{Zz8t@IE84!orvVYZ><v;&P2U@rf2P1+u2GO=d7Vi3NO9x9?|I-%kCouf;U;AGL z^C-v#MBM^h+k5D^u5)SG4;D@gS{hPjlIOp-a6w2{BE_+elOpZP=vORUH%3R=ozam- zA+7$-!bKx3MjC^ZLmG>u>N2h)NG(WljN&+?J&|ZHS-3=`=Pz2gBq)Td{{}HY5RM>S zgmfd)7m+SU%I@!rkz$2#llH!S_Op<^8-K_Ur9Sc8Q%}#Hk?43naoW5YPdR2J7SDTr z+Kl=06P@#)d2Uu>pVZm&`*0+wZVcH~7eSWR{knJMwKP7y_!d6VL{sDPvhxhjP2HB& zq#a$R>GfUn?mN9ZkuQH79AwpCCYcy@$?Bv?BCU_&9Wi7?eS+VU@fMO^AMbZ1)te^u z^`?FLdDA!3g_K{EL4tmY=~UO>!nO1#{eDXD`%LLA*`g<Qlz8+QgMqclHyg>0pAteh zn=Kq%7Mmlt+iW2p{}dfMJ?k;8ILkr?)<^T<*{p%U!#Ni2s~m5ePmCl_-bnE4Gsc_! zU>xg_k9kT+j?Q3(p~GicSj)n{h*@arr)YlaY=PsXgK_X=v5-{1EsVEno6y+6Q3owT zo%W%f_6@N;-}R8UoH6BoDzw{o&tVI<{STp<@Uw?m@Oh5A3%}6*?;L2Oh^(Q<|KGiZ z_5W*cK{o&AJX!wBgU&gkJjXozH+=7hziI;Z{GJxjCuJEz`Ty@gemXW~??-=p;V;|p zqlH@sSPQ5Eyb4$eSPobWZ~$flrUJ$TMv+fj`;^83ivolL!T@4G+jZO|0L_35<QtII z0WJbg1F8Z00NVju0UH5p0c8LO0DslMf_K8d*~m`^OahDti~?i>41kR5mTp`e5DB35 z8ae`8B+vXcaHI{`RDcy=07wBTfOUW<fCSK1hf_1)3g9GQ7hpSJBj8oQbbz(aLjHAU zGSNOL?veNt%Ku~G(td^i!Z&>NU9z~8pDmE!aZ>is62i)pe}?bEzp@~Z2a&M0s(|sh z*k$kC);1-tlr4$?X%}fXX-{cyX}Z)RwMwT+H%m`QFG<^^on#5JJF;=|XXPKr56e%> z@5mp?LloJHLd6Wl5`|l_NpVbZNztqbRfa3o%8^Q&vRt`VxmkHkS*yIJY*7BLe4y;C z3Q;Ah2B;LOmsM}7&Zrb>lX{A}Tzy^LtnOZ_$=2j*#%rF@lxyD7e51Li>7*U0U7|gs z{ZTtF^R>*qnKhXf-5p)4PNWy>`|01+|66}Z|GEAPeVzUneUm=W(ACh#AUBLM6d0x$ zo;NrRD-53*&KPbP?is?2J&c2ljmBF>k?AFq%e2n4#dOll9RGGqdX97hrdhgM`njw? zm8v$X_iOs1+k=@}okcfRH(BS@eXZ+ma2s}^!CJ#bL%pHdaL3?pl%Uxn<22(O<1wSu zq&68$gG|FrqfJkkCYh$0?54S<g{D%|%cg&s-av;tO}kB>n2wviFkLeJWctN)&-BnF zG6$OD%_-(|v#iuS()^@(mU+I}Wq!rH-n`Ac$9&mL&G*b0q7(q5>mrSi>ZQ*~OQb8L zJEY%BsdStCJNZ3%gkquMEyYpAX~iQ&h*GN@uFO|XR?bnDDwiuSDt}R)Qk_>_Q^l&2 z)dSTk^*r?|^=|b^^=);C=4p*Vo2{LseL-9LmG)1qC{v=##H>f?Ch6wumg-*C?a=Mk zeWAOqYtY@)arzYfbp1j775xo;w4tXV1>)8i42CSjSVI}ay~=RO@Pi@T7;Q{6rW!Mh zGNakJ(fGdcW24421X6k0WP>zTncg&Qgf#Y;4hzz_Y^pOgn0_mTK-x^5&E3t>=3eF; z$fC?#Z9ZjgfdtyjoCU`+NT8dvAI$O#=}qZ<=|GuARwi2^+aTK{dtdgk?11cutVVW5 zc0qPUc1`vl+3&IkGLOt(9wZNuhs&eoJ>@BKg?zYtjJ!ZTNj_cvjC`(qfxJZis(iKl z4SDHCdA0nM{JLDE5G%qIWr}jeD#hFQLzB-HUnqW3sFXV8MCDXiwOhGbsZ|+OV^nig zOJT`>sU+$I^$c~1`epTI^$zte^)dAwwMQMU$$@x3($r}Jv|Y6Y+R55k+BR)wrZux# zcR|-hAEi&$OZA!h!TJ~U%k;07>bL1X)}Mea#~a2OW<Zp840_`Z;~AsLG{Q95^t|a6 z(;A3wHw@?((=F4Vrbi|}b2oFQ`Dxh9%jR#Hoxq7_BT1IprN^X8A(KAxEcrOOTfSO( zNcpKMOw&ViS?keu(Ix8+>Kb*=8B&Y`j5mz8jW$!M(=@_7-h9{0O~r8ujmV|brLRis zWy9sq%boIf<qh&CIj0z^7^_&USgY8hxUA@;j8YC!KBIhIxl4HztNgyQuWGF7CG|V% z_todrv6^MtiJ5aU>oTKtX}V{13v^|=)w&Jn_^vKlKSlqHeyM&37O%hIN5g*%SSTY+ z<FQcQGhH`PQ(v>vY%-656<{RAI0Yk#k)_KNvP_vtmMz-_ZGQv(4#DhZ$}8lr%Qwq+ z$oI&P$$ypKl0T5gD0(RdDrPF4SG=Uyhm~;#gAG;2DwWD7ltobD8s!_xZOXY=1Cun* zYgTDCYD)KNK8L1KwGVW0`X}^@^&9l>>5szF0}Oi%8OGJdw~Y~|fhH~1^&wNU$=}=s zrW$3AGbfsdo1ZsVn%A1wnKzg>nzx#_n|GRbnID+BGOvT-r2bN|R3e>+g}YAru5_z( zyL6}YOPFw9nN+5fX=MhPr4+LoCL1Mt9rnCYwpF%W79~%RpTvCb$Xn%NMX(}4(OZ$B zP%8|IA&OCoe8m)n9rIhLSf;4N%D<s#Q<T9RD^<Hy4^%`wO><4dY1eC)8(ueTHykpY z6I@oLG0mtmj)L1NHZC)+HNIy&X#B#+^j(UNT{scOb^>l?lx(`}71?IxdR0Ho9L;*o zA<f&FoejZou@Q!7L%bo$kZO3>u+#9l;b%in<51%d#!Rf@Hd8ZG*G8`qM!-?k$qvX@ zC_Yhq3AfZ=`GvAgnWEaP>ZI<g9;}|OUZMV49i=&;`BF1i`(vs04c#*RoBCt=S`!-C z<u$@C(jl@6*?!q~vT8*-yq7|)QR~%aR3ECIsD4ges@|&p5z8$|(@#^P*`_(B`9Tw? zRcg)JIog-B%e5!9m$lt8HJRC&voqhxd@r*h^NQ}8?pIw`y-Yt#KUXg@q!}g|W*Y_= zOO-~8akz1k@h#)OjR%Y;jB`vUP2Zcu=Kkh~Y*y9Y|2dIJ8Z6D0PQtKq;cCvy(&ZE6 z@5!H6tWa%L-BbC&#p*O!nqut&biY))2Hm6XdE6zDOodc!ve~j?*<v`Zi*Qp7x@N3~ zHl4p-^2TcgqNJz6Rga4#5{WqshIAS`05UB&q#_YZ)1<?sHtBNdN|?}TX#-RxmZiaj xtg=b6g|fA>b+UtKwiULc#lWV^%jC?h)nnl%DlD)Xo8ptW38IET(sC^2{{bnq7zqFX diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/w64.exe b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/w64.exe index c41bd0a011fd760ce20ba795d9e535e0d2c39876..46139dbf9400b7bc0b64e6756ce17b4eb5fd7436 100644 GIT binary patch delta 26888 zcmeIbd016d`!~MNhQoX~pd4i8FewTsGAIZdDCkB(aW+R3BsEhWJecKh&;urJPhv~c z%FJxCwA8f3p+rT&8HW<JlCohFYLuopp7(R_y`krMzVGj!-}PSC`_FrNmY;jw>t6G8 zuY0Y{Q@X;Tbd^J8szzGVH^baXSi5$9=h`;snDlk+I`~&NTWi0NaaHYaGFH{T4Sd`E zi`q57we!2wo|5@1RQZD{9*}Xw%={UYCa3;#GdZqcxdT_y<;lc0UJDn>b#UcfIPMZy zpDFCJ-po(a-Og&qvmu(qm<2%^E|o$mHQf8Tz)}r&2{GD#EgG&s!*OYMI4*$w_<OXr zh2!e@H6eZ-x|^miFhQ>7BL4A!yTn}hAott7K|+X2f@@&C_%0d`HktQxmy<>EoErkd z*>VKLvd{4EvfbbE-85@ronEsK?C8XCf~fs5Si=eNH9|#Mc&rnPwGYls3_*ZZckeOB z*$hdIB-H@H4|5<`UR@F`lwUIq7OaLR5D>&uHiH|<RA|dp0auy%%5{!w4$HP`Z?kvo zV_vfl(->H|eN=AAa5?pEa_X}oq@;c!`@d56ZA(3kWGb|~Q~@RR13###k3j03{ho*S z^wHBo#l&zYp~4jIF;=kLG>Y0sJ=t}8ACI@aI4)bPL+MR{qV{4>=BVwdabn3@qj|?8 zDvqH#9obuRk}~6e&aRlAB;}#y(#6x|)d*V)5&hc3(t@Wpn;=5Qni0;&UyxdZsqi^T z(%1ln$j)y`w|@N?V&%4Uz3@0QzFNF2orhSq9Bom!=4*TOO1gMCOFWYy9+v`LeK`)a zAl6I2vLCfRW`l^d@@-jBPXxP1O|PK;4xr2J#Zyvk2PGF|fnI-9j<U4;lgqPeE*av{ zEb+1+o|Sg^ko;Lnno5q^pf2A&1QWFq8}#cAOKYE?M%J80LA;n=g*M_O^Xn_+{3&A8 zQxrjZNzZX|sm06|x%TnZc9}PYs8&?%mmsBT+2V0zih4*X$W+N`ostt}(;eBQ>-T1I zg(=!M!7kC}h`%%!Qfk)qa&uikMJ}Par03ZGNV2I@Tap9*qwrTA+iXLv6QgW7Dc=|S z>Gx{W5L}p^Y-e)GiMkHvyuX_5a>N^Q7v*70XIm$Rr)7yXS>l9P=IPieTp2H;$IxJj zjYoU_X&u{Ls8|%9C%uM=DOh9Hr?VlBA-&74aa?(|zN`#QbO;^DH7~}%{_p|Gj0&G2 zShK^&NjFe(K_ssTuh<~nWA8ae_K1+<miWb=$i}$ILmx~)JZUg$MYHL6vsO+~jwBt_ zv4@U5`CqQG7^iOTJrPxGfRLNbpuNVXJ4NJ<5j0%+36oR&1)E{zRgSBo(g>EC$fGp& zJ*e<LqXp~s@Y4I}+NC#3ovBdTE`I7HFE5g3{e$=m@h5DC(yQ!}Qy4$wDtq7*;1JP@ za;;+i&OzpVR}jr=@B>Gzlcqeh+2SuyEsHX&{bxu6hH>1!dp4V`HY2=f1`1#^w8#-g zV_?-uYiXWfdAKh}4n^dBe?!=2IHd^Hk<eU4s8tbq@&H12WuXrt)KjpIt|zI%iqsuN zssU11nBE0*Kh?W7b2|sL8Q2xp<eX~$zJa=_B-3Vyr<^PW;jySiPEq<k&Yt5k#OnA9 z85P;#kpliP?xTg&W;l;H*^ohj=OdSFu{QpMG`rPi6EmnMM}?2WYF42gKTreVjS!}e z%Ci|BsN^`yNz`l}`MOZQ(}2je87#!G8Wt%9Z=&?FiQf@?=J|$q;22t<1o3wW&4G|0 zKCl_A=uj*`>0h%tmq7Ek(z_ky`uIX7j%2JuoNR_%2+@*>ik_yG*fq=U2SL2wbdCms z)!+-k^eS5S1@WA;T3N=x4*|ct8r7qfEEeSvtS|cL_qrEiG<l|r-%1I{Y#+5M61Kw> zHX~9ZA|Z;TA$iMjUX^zvd96}7Nf3W1%t-ER^40HkCH^feQ%J%^upH$P5LWV=8gNMt zFgX=wRD@9=tx?8DZ3`1m6_g8zWsMdMvlSz_(DF=ZN!LP4N~p4^=pcm(P?_dXa9dKA zTAs>>BF94_y$WlVT>mw$R2Y-7&|(e>HTnLNPy!{iPEIHS389G85D`>?7^R$MUrN1M z50-v!o#dl%-KgaHy>^AD;x9tQB;<lcMG$9)3xarD5KZB^n2pBt$>|fP92G=ttK+aC z6=7m7@EM&xDt&bNnDnuNb!!q0<)t?_J5o8d*9~pcv(kYE<+*P#N)EaOc6k%yHvV3Q zwa|(B+d11B({e}I_-63U9n-~2RJ7@yOrz^K=LTv&E?I-x(;h?9dq0vXOzt}XOtIdC z*ojFXn_WTl1R+>{M~hjD>}c7nNEmZmTOX<6a-}dd2$p}Vq0c?JPt?t#0h3s;S}1Wr ze9B67!DdKd8?qYo5TrhP3nFPNdJ9<Sa-$w)i$_tyeN9hj?w!M05h=&5$Ot#u3|=Uk zcvjwDHIz+$D<v=<2^3bC7Uc@oMd7)H$!Ja!MkzYsk~9YM6@BR}ptB2;dzVOsX3H~S zazR`ER(9wBM5i8;o282)j~=T<k2%UchVc|4$GEM=_#KN*dyKy^Y>>l$#Gv_%{yj%p zMVfRS<5iCTE8+`g8n2C1?)Q;Qwzw!fO+1?;x5#};9|0l)4#=a?s+(|^8Uow%CFvkm zVRYys>>}3Qq<L8L(5(BZeWY+7B&{%Ph92K?oONVJY-^*!gG#z(VGTA~$KRGF@Myk# z>ZGqRATT&`FxEu54SkViVMT8t|EOSHN68|wQjDV<t9^#e@Y^@E-hDuf`06an30s!< zBuhMD&E*UK+L0yNg!|{St?A&Nko}KYlY7kQU$vBqA%!YU%0p|Ke%*0Q-{p5u?TXpq zN!e(f*leqAI3&>R&?nGj+1QLCvnvY1jnZ(;Lt2lru#lX_$`>gue9Y$R!~Ldq&~PQ5 z!r{Xnh4&BJY$g1iqr%}69!*z#*{AwM-J20u<vDN6W~oaA`$Hd*w=;xNY7j7ra%oQJ zq%pD#8jWi{gaxO2NvJfSJsL`_tc7Sd=``4tmGRZh!B7S>sljALX|cR<w_=@_9D<n1 zqo@1jaHw6BYlV-LAI^4rg!3PSvnw9G0&_0W{M>O~ZU_^t%u(QAtFjqJoM+LV5qWRb zLlr%JdLMe2YwF&GJPIA!ndr2UwD3zg-Q-xPiR2)Yr=-{9nw^qvccq|H(hn*YBMtaz zS((t3*@f-$EH%gFqF==;CLe5vT|i5-ddNf9iPUj#Vyx?qSXRSp7YKCu0BH&KSG3r8 z_@6<j^m`121@@Lj`X&LI-38GOt!m<J^;qbfgp}*Ww**VyQ^Sy*DHsutfRdhgAv&b= z(AeTnq+{&Z44({RH62n$QVU!9-V4=m8hx1tv1+wHkq3>Du_EF!$ntCYax@vLq>Dsi zHp83e$kReS8jHO3W$!7Wyd1jd92?QGTUsP!k{wI<=3j-Br^QnQQExM($`Ko+XbP-w zv|-HYb`GT^LzNKtH^}}D=h(rHJtqWHw^?-@K-w#-zJ>sWr6bn+U{z+8|7_CB9VQip zVLLAWxukQg_I!v^2PI%E0@5*>k3s_l+6*6_VS?9WC-edOttf_l=G8f@sE39lEv@y8 zJk>i%>y1>;2Ag3yIM8jLF|zw!!9lSymxi)`az=QX$smm)K{T(cG}y=z43U~HY?2|U z-{C+FXYv+H>(D&qT8l3xj+0cB0@(^X>><TISVxivIGOwfae#-oLKf3aBr#{{r8u_F zFiS^=1a&(78pL9~i#_{bncAzTMb4<-+aT5UVY|Jj1dWeDtQT`ohL?r%6D5AqvzQ`n z5d&FlCuUw6jqDI+ly*czaX>gHWnv`nvk^6cwl`5X5kjO<p&6P^QaxL!p3%wLxd<>} zu$_|1(U!<Z*St+C45UnGx=It;N$jf49i%Cck_NUj<EvSJpWMW0&L~6tF`*(8YSX<O z5xU4<f_R0B<t?4Tp{#8VKMT5PE(W1=D3#Uv^ziGRighZ#5ruh1dYJ@Ka+>UO0+@$y zU%p2G8|ItIN2apZeUtf5{n>fnsK7`xHA>n66%xd6sqFJnR<WVkO`7h{eEoK59N8(q zEZq#h_O|-lmxcHb;DuCH=)X)en>G0tcRZhoQ6(PFhN>_lOZ*lC;XO7dAc%KMWv>Pd za=eTlk+%4-?*d8!CJ@~jE1TN%yE|cu_Mn_gJlG3?M!wmDtqy#FALq@U1_txBH4MW^ z7Xv9Z!hUZyG3dDOcr0Zq=Qjh#S?X*}<Eb(JVkyCXnmcSva2U^G*}~u)evg5j3?8nD zVFAW?%>$NejN;eyV2h1ol0WupPox(}B+~3BwW0+osX&g<$wheyU93Z+poJ!eC()8M z<tTF-ljJ+27maCKiI3F%pL7mDg=CY)%nwOUK!U9{Q-~mD*4uI#7TB-wY4)H)0F?0n z1hTthUf2EAfwr!J8cg8>nQZ;ou3-nku_T4+%U98$`3$}apQcixw}6J++)Q?3Y;xZ` zMXn@48Ure$!r0ucy!xfCQ1E(KTA!8#O3#2@3mXpF*Hwy>J+%Bwhdhz@v*V0`mYYce zP6T<hN=4Gi!pKSM>2PAHNjl0l&Gu%-5D!f?>lqU09z;sQ6FZ!iPf5pzv56t^?w<W< z>#vo&JET8*J0v^85lO3^vKE>OwPy^}c^AtgjY(^H#Fu<83C_$t)Z4iZXE@qZC$e23 z9a&na&OECxx=#=T?|5JWh=1zKEGR4v110f7a^P7oO(oKxQD9YQojkE%+l{25`;xO1 zH-d<gv532u;v&N#Lis(@KvKpZ45viViLRJIrg*7|{3*5beZeWTq`uU*M6Zl}J1J#2 zI~O`!)5t=?I=Ua!|1;ABJ?kIV!yJXE+0beBLrsBw<FxeGZM0Q|15JM~X(}S96MYOw z(WlujM?40-h>m>Ohyf{Yj24q!c14$cP?0&QA6iR1+H9axt}!$I1noRH!b&dg@&J@| za1-g^P#d#F))MB%@+yoTmk`{x2;_lg<3>d5HxN_q>m$$wYT2W(o^dOXE&5Arm4c|< z&~x`N`B0o8O4DgA=WNX#1#w6uwT+83`xcuV{(8960qg_^a>gTyweOu1bRtO(YzCU_ z<F7UM6!NuxCz($~f_V`Z8PqD&()XASTS<vl>I}|4I8Hj~+qP_8kZd+75F<|!eNz>E z<PQOi5l&juF8UFP{(z}oDW#&M=v|k65W}wCOh@gc{U}m}b}7_+so+^|AbI*CIVo?P zD(pnUUqe{R8LhG~&NyisSo>(dmbx~PzF=}hE4)r}ACF?5orBG(ztU7EEgl1o0=rOP zAN<qoC$?G+hVMjV>`e&q1@pAj1OHailCUwn3L(PMn$3VIN1-Ljs^B}%f(a8w(+Er` z@NhbJ!r5pA$y#+0D7DS7dLKNZ*27)njjCjXkHY|<T`Nzqo9I4f$2#{g*Sld%Q^~UF zw4IF!JC2U<c^D`MwqkCF3SwEfoNk47BhnSDs@GDDQ<}Cbu|(}Mf(q>&SMrsGS5i4D zw0Dtqt@b3C&h}h&t+oafH4`GN!C{iJoiqa;tLiC&W`pWhXcG`oY0&<tD4ok*1|K!M zhCZ?xro&_D>kc8V#6^+wgPhG!uoou+%1hQRdsVUeF8g|Y-V1W3AjJL{loTM36_dMc z4iUsNHp6RRi$m<wWy4UG_)V7GIq7wFD6*@=D5NK?XSXB&)F(nsKdNY=qE<*`x4VX< zMI<6UVXR!&2jQqX#$dQL*9mi>BG8Efqm;n!)Ii57KUBYYe*6jOnYDM>ux{P~I}q0V ztK2t+LwB*!Sp~Z+u`b(cSQpBcb`$!(Gl=6#`$juaEwqyFIBGv#FG`UHz>gSOs)kJa zxK$T`G=!8Z#r*`UcJqDa5Y<gnz<Ngc4Sui(%Cu4gLA+{t;I+7;<*<ho=K_6Z|MNb2 z+IlA705mvOrUY9=Eycj-vKp*(VoEt#oQOm{@3VDL(cLa$HVBr(v5+f;9Hpnz?=SYz zBhjWWDUedKMda^c%~4_eGVk3U(Qj!yeuQj#m7P2aE=p`NI}p=z%vPDHm6@j`cT@>2 zN#+n2{r*2XG!I1y*2yAB7PUVmHUH8s;sFuQ<|v4)ltpOmBaymaSpV+Nc&wj9GgV)H z1)8c&t94;(x{nIp0h2*xybXsfV@lutu^LWqy-CsX?_)G}Ff#idkzu7M4yo2OH3iXd zg>3$!*I&?K9iYQzi-EPlY*>$O)AQvmpbiFT#C?^dD%@IW#T!knvwt&IYn$Oqq=G61 zq352L6LNbXdH*CQGXefAOZX$MIUEV(D50$s*JhZDXvirknElWrPE*D_WBvGlIkA}7 zu;JTpqgZqYQQnAiX}Z`d{v}?qJg{Mi%*;i16oeO{2eQLW@fXso)}jtl98PFt&xPGC z6N*BPM>aMFfhHl0Z0mNgcVi8_-#e@-Hi^HwiTxFu*yr!hX!41zHp2~2l$u~O)PqD- zJfNAlSsvN2)}4~dhEW??sU$-DX({m<o7yw6)2p9iHg-~GBaKxIh6#uaTV6+2-P6VM zf&(ls;*~7gKmW2B9(~HL^_;=4X<?ahf;kHPNb|`VBksLFF=^}tOSK0jb5vddUH>io zm9}s%Ib5BkSES$2&24Gqx643hJ&OQLup!_f++?3^&GkSvQZPi)D_|C@F*3J!7v>({ zjemV3>k~hX-`v7B#T)fM*rR`~ZgoNh4pU3FvorByhxoK$*TcYx63TCx{4EQeG$w<* zt^{vK@-Xl({GODaFwB;t9)j&`$t9d1oByD&A@V?RwWMfD9BhU^wz0VhT|7RdAiEQS zh-_>To42tK6GFQ#loz{B(snl(H--jE8JM-oxncYjIyXEL>})V~<@~|^Oz?6)H-u7` z4W4>w7VDVUt%H7>+-;p`YP|RzcAxHSSfaNc6qHe25%RPdE_}jFiP5<mZ&LxT36|9B zFf>SiV~asYI9UZx>HBMxncH>JRpn)33W_-U6UdW+VgmKQxT2nB3?_iR?R+yQEgQpH z5)ID`BcTdyH8mi0BCRVGx_#uW(2*4fa6o~B06ARLeM}r%#LACZMz4`+zd%b=^}k|} z&1u=zh@K=N>be1%Rv}}sx+|<MfM%GBBD@v9-kx3V)tNtal-YXq?X|&9j<*JIi1sA9 z!oW7Ol_s2&WqG10ag48SdW?{q@>?Y?>DC-O>G2g-+<TJytt060^JEYgwZ9x;-}esV z4<2EB(&Da*d6XO#`44Pca<8D6vmszJbpD8KO^PtS@{zK<qUl84mRq!j+Dtl|VLkX3 zUF9uyD=Cz}Rn0t;d+-}?vHr>Z`Ko(td2%1~o)1t0-IrLiXmW1)keXfYZNVyN_SZp! zl>RlSjcJM(bs|J@tAz^6vZpZUy>T7ag08h0mJrVZLwSW=v!nF>W!zj`qa*rcS(dbD zwXfCvPh!vX@zh*q!}~;=A&g-xP<LykUW#v=;)_&#GZden;ww^oZi;V#;(Pinr6QK1 zu@%NGg|V3!R;~XbKuR4l_y+t#I&iTHufrutC41b**R!@6;m}@UNytIk_$}+467Bl? zUF?^Bh`#g=o0Jl6mXmBhVYL~RzPW&@@bi6HYs<s#xl3-5-`*!J<izjj2OO~kxkVK{ z!<tEXQCpjOin@;=Avf#mhyYd)u6!`R+-R>cya}G#tk5i8OAQO?DMzl8EgBINTA>XJ zqmi%Ghp}f;dk3FaW+T~rY=*4&Xe>5j)Etui$?vgssh#=Ek?gC~Q2s#%`!Ur|_&x*4 zAm>!!u+2`e24E?LU2OoOBGpvH%M#DxZb$YboNhxpD}kNlz#p3V_-j(|c{ZT$KxeTA z(X4&TYS_uXJ)H9u-$dEx!6e+{xlo6Qt~Nu?7UtY<7azBi9qgCFuiMTZ^vmSGyw1{| z>1=*Fikc0!tP;#C>rgxC78qDSbe6ueu5)DMnuvg<Hkw=dGE%y5Yq<0gHnD?Vb~J&t zm#}eQ;Uo+8`5PFGCujrrf*@A7l_V&e>kBIq%b(Cu=^LA&_#JRz|NgDKx?@Q<Bry$1 zDA%i%NP*?1d4(1H2~CMETstNpd0Vw$X-%C!h!{K=h|}p))*~&D7e=z7Y2Icx5(oG6 zcFF_itTWO&<tUgAbFIp;C>#tk!#K#LS9us|Svez3!yep(Sqdvl+rf*>;YR6;79{)M z%+3GKROH;P#8WT$7H?vIrlrM14Wn|>Kvym7*<!UUjVmrRfFQ04Ps7}&EjacRo17l) ze--6JUzj@WL(6i~DW$l9mcGLG?1S{)aSfXwUZI_817H+&wHdw!C)Ht|ABNo!aOnaz zHhrHxVv|W~!zSk5KTea%()&mHU3-fD&2_>7*JgO`Z8?RK7)#$jo^s66e{$F<PpbVj z!MfNZ-8#epx7kNCDkiyQS4^@myt57C?CK3>KVWL$>T6VF(qxK|4bJ9bmOWJvT1o2L zY}tTZ{;CJNG$4h4b~N+Mh~eW#0y?sh8Akre2v(Buo~D-hW+w7q|H^VRdnSv&VBYtq zW4eEf%@v1Yt5*1t(p47-trU2gmK*If2nrRi;4Fba?WkYa{>(7VDfUBV7ztP+GBMIP z%cG|y5w+T0pfUb1D|f!hf(8Z!yR|cxyb0yxlI79k5-&1$7;CjH4>>krU=lyIfvp?Z zji2;0J3KI>$GJCX7U;`<rToZvNK5P%m3d5N4h<k>jZDDOODti~K7Psr_Q#+=_swq9 z5u;wfKY@>bVtzu6<|w-^Oy?_mvw>M5{PY2AW>z>q?0dF4%P^)6wX4`f)oQWX-CJG< zg4dTn1l-dO`H3cAIrbXvYk3_#EA5kyngi@Bw3hprGXtC|v`b{dtwQ?}5tlIu?!M0+ zW)%i+Z=^(>aQh(YHiOPeIgM3$a4qE+xVn*<2YYuM^d0fPGtntX3PO<d4cj`nmtiqY z;1aKWv^Gl4SY~Ur&v~&sgP-MV8rjex8~M(S?B<Z^{K8+@$n2;=$K)*mmusAKU^w-V z2xG0hJ$PG^Ni7NaH;EWU0_GV`a{f2|lNxp`JIq`orzIftP$_CIM4gk4DZE@t6W%Ws zFHQoS^oc@`qadrc^#=g<6+g?qJa8!IUD;rXA_+)~N!Yq@+}942uvPmNi6;+gF?E2w zoi;NUOETU)X{FIM?1da(O$~bmDt-<7C?}o2+sJ;;iRJ(Lo&^l;$^Y>q8#Xiu9Xn@e z0{(q(sDQzEb7(Se<5`zs)4h#aqzt9oApMGigQ^D2V_Syh!-(pT`?{|2N1P`VTi>n` z?Bm?7#!tYJZGEqxaMDKjDtwxkNDKtFx)~$bU%5%iBNU;Mcu4?7hQ6+_j6@!H#KJ*# zzBq8(?R*ikN4E2Q&hm%9tJ%-ABf{9)5n256ud?eS2C&W}7j$^)ieWRvN62rvcX(Mz zC#m0cwsWK(A9|gg9+@5;Pdl-t8oPCa$z6(2e0lJdxWb~gxIo@4<k1sYz>-Jx<X!UF z>`~z{H(N0se+(85w`zswPmB}n+>SImq8{PjV`Qg08BLwyFNkOH+Jcu##<Q<RMe?hs z?fz|44~@V4vI)-x&>v{NI_SJ=^Ns$Fy*;ise{(APc3c7f**%u}Y$$*AD$9FzQ}WAD zb#X*Yl7@q*<;c*}bPT;5Z!15cU-3Su6zvfh1@x8;pkb%a7F%(h$JVpn<GVR`LL|g( zxyojZpXK*mBqp$8$@iCDyh`a|O5^3oRd#!PCubY1Nr-yzb>=?d27m4qc4tC3ziS=y zop>;ydAZykE;hrz%4MoBdGU9qshE>j*0Mh*X8D|iwXJmVI2Uj%YhgvxH#>q{u$GOO zRLRf3%zmEqBLDFSHfeI_ZeLi)q}qtR*kChM!vlMh2AY`Vl&$F`MsvzhaZ)1&aUf%# zPCn)~v`o&>-DbG7j4hes%_qFZUY}AF`V@QHeo_UH&zEapz)K$8VDfHuYvXNM!y@yB zMt0tVlpj4!*JpeOGvu>2!-=J|8EH%WrP@nvO)7nNiEYaB4gC?Erf94UFfaN^yWvCY zt@=mI_Y%93m)?0YIPuqTf=g@<YWf-_iN8jjf<1^!>HFbrmNwPEv)gR!)NY;bU!+kl z)dQ4HZypQf@f)^bYG}eHnJJs_cFSUD2m}tuL(3dRF(@2yU;IbR{vx|QHB#4K=E&3F z+yxdkEvf$#=#gz!(+bdtpGucO9fXvN#X9+Z_gg7423y6cZ1D(G@5gsB3t&b*CZs%? z=O%r1fxSJgzvrK3G%YMVoOM<mw21y8tYw;yN8?QlFfB*3>P&_1>`msA-^2f1Vp@*i z^~~V=N+6c0;$xz&>?WI*pAtP1;c`->jx0CQ;!$myAsTv6JnITd4L5*x-&7a^Z?Y5l z867S+Q-ZZlDHj*5Xm)^oqnQO4M0qSnkq|Re$o~VAt+JVoFX+y1Ucy!ugz+iOY)3(o z@6^SJiK`@1mUAc@LDoVqEO~8u5B|Yo)^mEiQz=ws94VHt+0*-t8ajcpYPFSkNbj9% z8wzC3AAwg7Fon@w5!6`6C8%*L$w6%`UWCTu<ExRpGsf*U#A`l{Jw||?-)gwo#GXzc z8W#<&#mGta?Ws1cMr)RzSTHZ8@4_aIoA<mRUe6Z)$U6K)i#Hv!o7j><Kg}ifMqx;= zO=sJRBJBo=bd>SXAEiZq`b%#sT&d(ALH;Z=eir0+ufI=Pme@e=AYS&sa-VMFn!Tl5 zD89QBc6Zb8OZKt6J%hr^w`1?Cf_9OKgF;1S72Q8XOCN!uE=UQd*~vX7e*QX^_C**! zeBJKpUu0|CcAup7Jtc`)M%Y(h_UEUcWSYG}_75=iS=ioL_O8(D*qXgHe5Vs^Y}E?; zcQHeE->53p*gtDP$aDKc1N;wQTeci`qGt!W>H4pq2>JI=gle0?)5Jd5pB(dpY+9|K zhpeol>4M5uzwbG;U13H_Q;8oo9&c>=yJVql^O);_5PP3+q#b+U2m6&dAUYh}!ry&w z_m>CX;Q5G+Z1|xd-QS1XE4J(~TXZOl>7VPW&%TY3FOKw}%bhyOn+<xd$ob4IOqn`~ z&t=umB@AdeigQr>wRBt{(A`B!pcx8_-dc*%War<2vNjA${$NXLBWId1SoCT{>sw5d z(Li(e$p<KHkMCLU8U2Hrp^6~<CxmQ3i0x=o5&l&Ua>I_W^)sgFt{rMG;*3MgVP+UV z?;RF3^LbYux;7TdE>hwl_U_DyzO`qt_tSY5SG%XaZjUtXYvw&`3t#Xx+dnIu|8^E@ zoR!$&omujf=|dK?G|YAXp{&dEz4!wg*_7ua_{Oi;^5+Hj$zP$RF_;hHVrk`9?7Qbv zdL8>zwzLeVSt#Tm$8m0e{2Xi#M0%1qD)RkX^nN5F6kgo))M|5@SkCPDK;ZyR3+SNt z4`N#p6`2j7o9;o=k}YmKXFKqp&0<xvGu^vj<-!{@ye_dBM$TfcFU06m2Z2TN_gOS= z@V9Kx3&xo`@Z`qAfmPy2Amykj2n*!hffOGT^!XWbBVlt&28DZF93~MSiSTUkI?@*o zKhUOIZr~Y&VaGQV$?as$^Vi1pKc3<1gl7#VH`Hk00apJ)_qcoem8p9csBX8|FKFsc zPYKLK<av|G%q89t4nM`zjp+3ai=E@gZyLz5=lFG>(;f?h28sHixMD5lY>Ap|PP>3~ zbst+hr#nC94OTN}c$bb}q2+Kh(q@!`H3sY4E({c$o1eqhq6D$cv%OgETnGO1>fPh! z9?^7q^JfeQt>juyqd9OkeAqd?USvK0)s+v*W)uI_-_rml$t=fe1eqOs*^YntYkp^^ z{uS;QvlkK2{nZGV3c56fGv!hTZ)eWMF};ez<dz-MHWcg{AW*Uw!fq51a@FgQs3qd@ zm&ik~biqYS_Zt`rKCG<R+q?T0%5ZoYq0+w;$rrw4JB!VH)?n6i-Zg%97IQa=z5RYv ziYQ_Y!jUZcXxr%;R}|vq<JzDagxd_;r%|SXXBq(&edp|9^`;8<-Mb-dI~8A@Q4#pw zZuVSB0)JvEd%I*bzicYITap|(XsTQZJU>4xU5BEOCEk_CTqp49kpcoR0-dL_(etCt zr#4YN<oBnC;<j9mc_Rw`lv*sQ_v>*)-Gkuf`81<o7ym7f5`b9>stL#pu^}t|n$2MR zH+m0=Ksu={@GUU1>54rYXVNO_UK^cn!y1IDcx3n5i|#@FkVn@2i`f-{sZ<yUr;#lf z*KU@Go~K##i@`B#4r4^z!XY)?avFVlR$9xbZ^<B35{c{AaJo>6Mpk0q!ozIAi_b-_ zscskOmaP4wn&ak&Qnf#rf`VJ$k?a5|ftRb9<AP^nqBAL9Ikg^in~NATl^C$%1j{it zsWFCEGwXu9n0fy*#&?J0JP_lx$&^Q1jG9Bt@1<wV;^$Z-tyAojr8N+1<l^Z<{vFKw zKTjcdOKOWe65U$vrDIw3kW${o#Kg+rV5^0ZG~DvYYf(=K*O0L2)}q{0I7}j+ZimAB zQyTxOe7W%s{J7k>!`Y5DuT~ab-fb7V`jUy~2eI6jGko$sBUR7hYLj|_YEz+(e9E@J z{5wDAQ?_B@sIb$?Fvr=E9uOiu#ulo`UjS9h`EVHFwcD9~Q6ir=fel<V(%Y4^i;B!f zS;Ocf<kRELy~o*(MgGw|t_iWv3_w)r={D>+NlkCrQ^~QI6^>V@oE)J@ju3PJ%9vP( z_3t611u>@)7a(9^-~L$oXdCOXIKr(a5edj~9FDWR#Z&czNcYdIqw@Mnowu?2#a+7Q zU<tqw=~QT0e95oGL&zUpC%Q`4(nuq=nQBB=`F39V{pxPx5*JMedZ0(E!|AqwOqOc) z<8Pl~<IMf|ejl?9W+R__h3z*7@NrjIgSm@l>1(B4crRlpjn??H{-t~Pr=wVFX|$nq z43eU<B;qkP6cSYb`;S=i(hz=59GkebH=p<+Temda-T$M1n*XB@S?$snd}A@#>BL2+ zEz$l+X@Qf|Q&BHv#<QGdWl0A<MvI_~F8i>Z;7T6ed)kN(U~MpYDR-&+p?svJTLoS+ zzlVJnBb}4JD#Ig2!)|YjVAp5PmiB%05eAt64NzW&v&F+0OFp|W)h-YNHvs-P;nNdA z9%(d<SjsjmzhJ&<Ma{@?lo2kZi&D1uSGL$BtwlXB!;=F?B0jzEEXUq~r9gLQCaq)! z?CZ*-Dt#H#P=Mhi0^||kITQrhXkMlO8WWd^V>N7$CC*|Qdb*Pmk)ZGhW)j$|2;{X3 z{3;97kpP9SSHg|$;b-OWt!?2iB0L)tzCwEp0o0tK$nOevQ!Fm{+MrEt7^PF8-2frH z@)5PGcXDy^+e7Vguy9BC3Ps0qIW0Tox(Pca@?I-P*1bR>VvEgCmc!m$k)_kflIKhb zHbeIu*0RE@i?oB{o-=i{8FUl}rJ(aS9yFq4qHZ68ZH7D9EUF@uU;PprR?*Wv`<;JA zOT#AiYK6ex+r%0whPZ#c>7S6qO|0k21$@{hwryod=muKCLqu&QutK+zd>y6cx7)|S zg16c2mC<~+l`KG9!21kluZ!6M-LO3iQH}JtIgM7t?X`}lPs=fHE@4l_cz$6Pi(eJN zUmDEDuNug&8qBt=O69vBWR0r=_?wH_(^U!l!z`9y?U(Q{3!}dUM(~!U$=`o3@66)M zNZmT@AI8gI(4+JmCn5fflvKhtt&V0FtR48BFSB2*8~EePS?Q}u{M~_U|Eq%nT%lCR zg`*o1s5852?bO1ZS;zuccjLb}z_M2R_Yt5Fp-~O^rYR5KRM59Wv<2(~8<(H|UZ7<Q zkU!pi<X~i<k<#WN|9*Vc0`|jdllkixk()TgL;6A?e5CCPVU*reh)C&8g@~2b$b_p@ zp)k^<QiTwt7ZoB`nxhcor0EKgCrwm{8PZ6FNRkFCM3I#KqFK&;fs`b(#8R5g(F(nS zXrn?`678kX8;EvN=*>hwo==I0TZwK_XpBZq`dOi?h;C5mYNAgobRE&=YQ<Aeo;?cP zK=f9HZX|l6LQ6zfDs&6cWeVL&^a6z@gRV4Nq3Lmjl&8?L6>*e8lg&{Q6xs~aJ}0Fr z9@#J%tI)F5FkGRNNXAE@(};FcXfi%ZoI;b)QMy|qw?q-ql0q*a`a6X#CAwarR}fuO zBDcR-NuDYtU<1+H6?z8In-zL1(Q6fYC($bux{ByU3SCWfkwVuIU7*nQM2}PG%|vG_ zbc4nWPnzOsq<{p4mWYm2=oX>_6uOmY4~6EiY)keE?L>5|Nv^#I(Ki*^hv-IyHWGbF zp(BZ|18v5@i6zfLB_N6DoeE9Y9@6^?EfBpyp>v5|rO@MuE>-9}qD=}tgXkFwT}1Rm zg<e2(u0ofBHcJ_bX9Wc$DRd>#(F(nRXrn@JCfZA(w-W87&^w8KJWsBD711q3L&=<g z8*j1~7s`*_=_!xsETrhZXSmJKN+%(O2ZuxOyj*utVO7bA4DkTxgu?ky;fw>vrf}9O zoIG&;RyfP%vC6eZbD83MRtYyE{AGnRNZ~|+Q>1Vb6%Nd`x<Z8$rf`zLnW%6)6;2vB zBNUFUn5u}2;S`;qaBeA_8Q_dDE39h@s|c)Yg>ypTEC8p!!r3cx<kbeuM1{FUVbZb# zW~9QjDok2`!1Py`3y7&UuX+)WMrNO_)4S{0KO5Csud!3>qWEuL+ugcu7tinBxqI7& zFbzL{A*+95K-khbP#5<=qgBi{n-#Oodtj($o7h)buQ$6!eLonVMU_JX!hU;7(&I>u zy*$Z^Q&P%tQzXpkrm*b%YFK*r3}!3e4B^)dW}m(pue)h&Pv|8pyY=Ra!PR)}Vfj(t zjE~Ar3i*fe;-oq&zgn=X8>s)X%(;<S-g?02u3}3z=J3v|*qM#J`J~Rw;qBu*C+<G` z_BoB_6}IV}5I$!yJN!;ZUg*WXeJ9PyeKBq0TrU>1*~jVEMdWEr+}&@pR>MCCVVUo~ z&L<YK@831@&kSI;cT@N+dAn1$bm4U~%i4=~&%%n{57#y5QA1KGJ*Aj3_U`+^QLEyx zrDudE5ffU(-?8$3M|Qe6#BRn}Qnk~~+XOqEH`9F3J8q9fIq<Inst5kp&~!NP&rj)V z2kl_qod2-Z-F+LjI&W6`fjvJ_&z64>k`&P%Q|hB6CMXuWIw~Mcb^%hTn#sduZJ9`~ zEMwO{=<Rk7$|vpJqBeXk3;i&r>)Q|sNs{KvlCnx`2NF7I(_sfvvG8%$4IJ@*-N+Vv zn4h+EBiWYh8`JM)I69T;%XZUbURxpSp19qch<7m6G!U%Xv5-uD)@!in)|e%_>Gf-m z+6-^SGXIZ!`3148*GIAZy|>tmkG}3Rum>Gw8%Y7hCm5&z9%L%-rjd&8Tf}k0yFm4e z$vjF7U*y;fuXbm7TYK}z-ehlV4dLUuvx8eF^S%67z{eqcM?dz=$C3QsZ?KskNASI( zS>?wDe(xLXqmP&KJ>FoR+d^I5il)xem-Vo*zT5m*$tQ2}IU6X1-xtLax1Hr@MltR7 zsJQ6$atWZC<54#i16E!)r;MoMHp9E!<o<TG!+YY4l&9vZW+yzx?8>HXPxMP%)ZX%g zLE^P})dL*84Ti34*Y*HDxEnjS-OwizK}`z~K&j_KTYv6#%FX_;+`efh*YwiGoS(id z4kFTlg_L9fnpdh{$bvrg=ieL6Qa(+OD?)(9h#gxNaE8FYnST$h`yb}to$JW_%lkO8 zlb;%U{RG`l8Hmz-kVr@wP0Jmjrjh;kM};fx`$ur#%dr0%-8Ys}X|Kp1$V!*l(9Z;8 zuXds4{utBUQ*=Amq5z$Re0u!3buIhyvtaZ6m)f%|1KDPVq9@xv0=^P6jv1Ri3U8~@ zFiZy&A`j)b6HE4HL42s+`=`FVTplYYr3oL>dJ|K&0E5*U5rfR=XqgBbi<E{ST7DwQ z6Hjt5&E+=;4u>CUrSK2gr#k|B@%l16;zcdaNe@3DaW(EO*`vm7gh!1_QO|#XsF6`0 zsq!!joRi*E<?)*FoMin^`6VB`wzE6$-jT)bOyh5EV8uH}_x_~=WlERxc(&j)g3_lq ziugQq0T%8wd=xqvf2BawjV@4ZaVIoFvd&`#d%ANbA2XKC+_iu|6~=Dt>dSAR&q6=X z;?u*}3!g{u{22Db=e^j;&pYr{qh*pTc-s#-ckhBNV%`)X{{dPn7&JZww)}>B#izJe zwBe+i@i$pP9Bm~jmcBnsWj79W4(=UJViZsjfOlcA?I%K;Q3uQ55*AsV=x$z(WM>Nb zc!V4C;%fGMbp+pIDtoOu(zrZKj{OA5y4leUm$zhrI-o}1U}9IR!y}8qp|Lx-J)}7Y z<ECeduI5!F+*Qawk7;r6Ro3}%s3B4n=GuiN>pV8;a8G{3tE}>HAisM)+i}>t>x2-F zyWik{A?%?oBi?PF3r(3<P;wlk^g~3qUtuBh^{^SjL)h)Zk^D(3^Enb=|FaS1{CZr@ zXCE;Hbt36%JX$~A^d*J|_O)B!;PEWhq<kwgAITX>nr5YCp&M5cg0qZ=`sB4AQb9!5 z4C@R{28MEEvz={#T7+pRb|#+H6_C8mU<c8A1MqIrP4PbpX2C~0@t>?>Nk{wfvsSSM zM~%ixvZG;-p%30n^MrhQmXNRt=A{bzdW$w<6{|Y>9G^aqh14W>zXv<Nx~060D%0Iy zIWVQ~Oc71F7~2Z^T(u*Q7ukxMUZE?4<>JDWT^fjL%BF0b=nT>uNDG&M#YL>Krjzd3 z_LQYx=Q8_a8N5d&8+^=<zq^_h9y9VMgV>5=Q@YKZO9hZOk$X@cn&|aKwiuFA{<FU9 z2I&2L@eh6E@jmlB9_+DTF0K4e()DX=g_OkNxh(B?q@kKlzVtR${sIioFP)`7bJ*hJ zas3|5!FVda*QRP{ufUHU&7zjckcNCt4W)HMnvx^z*tpGqN$Rze2vd<XeGa>IJhAWc zgR;<EIi+e79xY;RNGwtkE-@+}?M+pL#;Zb`6rmw=*p%8h{_<yx)yDF_yugmtcGY~y z{;19LH@!gSV<*&2`kUe+$A!(sNO-~vY*^j!DQgv`zD&TagpiNVUBm0}F>IYvP6hsm zgtlomnjskz-&8_v)BQ7>?Q0&1LH}{USLTCBp+bWo)zK90j_@4%B=wwN^>UCVOk%dW z(8#&Ia!0yj?Uur3Q#W99l9#n-htR0A;q!k)&SqIB0$qNX1v@!;BUsUiXw4$__6ciY z{~V|+vxM>sC63Z$sC3N}1<S+67YASwy7*Gt-;XH2Cg?L7<S+HIVJb0n^`T7Uu9};y zy$m_UoPP@g1_h21ERP!Je?@xXHE^UOvsmRx|ELd;Ka4D})pWw3tCMoz$1BWlq*ZMJ z3zdL`S?t@BiGG=2D{lv|iuBFW?|`Wof7(LYRm&nyb>n-kV#81M_nG+%8DEwA*xU^3 zApGSOSgJ?<!nU3A?Xd@=GW`(!B?s$RSb6YKFP?8*29s`ZF=2a3vu3tsB2Age?wsmB zVE|s_(}v(CY!IT5<+UaL&DW#|$XSxKC9cw=85BcD?TqUeyxT)|(7&N!IwRq1@fh-< z=VMXuHIII25`FC;egmO7KeNiyKA8<L6y9$T{P8LEb-{K{fYMeHA-&d~s`L>^xt-=i z65nn@Kwst#H3BK&0U%QH{F&W49mIdLjOost@tgk~6$rWELoSe2L^ZpJ+Am*Zch8Ip zassc-)V#J37MG*V1%fy_4X@%GrQNLXY<!2`R-l(YN0>5xq>l>OwzIKrCE!@6gknLk zD`dZ&{TIK%i;X|$&tL4oUOX4nvH2Cm`f@r_4@HiU>OY-rJ(udWXgW2`gQq2KQWi!u zM$)Y5to2+MKG}<f)c1(piQ5rzr2Jy$$TC=sTM8&+QeZ4Ye2loQfF_fK<@er^G?Pxc zP{2y-)1z-pK%=xjr1?hi<+VMeiTPb#xY5O@q%%$IR(%}5PhuhG!(-lBqL_96`ytK6 zY5(IPO?V?KJ|E5Zk=W+*sU3~j&E?P2=$SDFyYyRs_Q!cc#HdknR$2i!Za#{}`p*kT zDRi2A!Nz_Z(UGNI@G)<PzKLMmBy(vQSkR3ygn6P(Dzty00NC>d?DrPj!il;cWd>eF z<6iy2Q`|rpuIpuy$fLH1Q5eFu2>KHTwun6Z3-hr2OT&j3%HIhQb8@AXXqHMk0Zux4 zS`J}(7ww}gUzH+z&g7~rbT6a1|3fI2{>%uBbyD^O_Vj{JO5A^^LIV4s7vd8iHEwKM z21Yq9+ULCJBu(AQhF|pM|H=n+#C?~p8%*#v!zw*nc`<<hlGt93G;yT2@>%*fzK(80 zFO<c3DVeoi%;NnODLI78XH&m1*yl>@7Ny8mUUG32aInLtDJP}-&$4$eh434vu_Kpa zeSd#h>1J5(vqbx9#d_}|*={n|Z+!WrX>!)GMi4j+SqJe?rXuZ5no9Q4H-pTj_<Wmw zud29N#m`i%Q}J6BB^4j5*a6Rr>DOJw!75Hvu~@|wD!#Aceg)0kS(WjFinmqd@M4;N z9x6tv*hj_FU6k;HDt@No+BQ_>wyIdAVvUORGQ!yPlghZOqC>cnu%C+b2^RfEsW?ML z{K+br|GJ8ws92?9y^4~G9ubPXQN<)e<j)BzV~mOgD$Y@HiHd7g+^phu6_2P`ui_6X z{;uL(6}iqz0XnD{3Z(qG?kXcq#i1(Zsra&rYgPP2#cCC=s`$H#oZ1C`Du${Ut758( z*<Cbgls`9FWz11=nTkKCC=21A*=iS!QE@?-27e7MBJgG(E{*CIo}-{NLBa1TfNMfJ z>h?4$jMFJVMCsz)_9yDFN^?1I_4sQW^$&OVxiZw*oc36eCIyFnjgJ&Rt<&@?RsA%n z=ohQ{u{g=U`iF`<Ry+Aur226Jl7B|k-%<5fKTzbMTF}qT<*E#*ILc4;`>Ec_RwaVJ z>Nl!>Ix5qz{=O0(q<Fa%sy|ruSKm{@jjBIa^@qs*ksOzIR}sJ&Q~sr?eyBV0yb z^e8{o|5)|5-cjP)RDZ+nD>uyg%$(`-=1qNWdjGi#M$LR-`rH!JSS5TQH<dHN&E||; zCRdFA=E51dncN)wn~jhXQ2E>pZaP=Id-R<Q%}DsCa?=oFIwVYxHzK?kK1zvrMRF?n zpy%N~BT|?rr}rG_E?fj+&I9&=PoZ%ZI(T;?oAI|N>-qO@fvFJqk8HUqTmcd&=K66H zcN^~MdC%3KzB#h|L~Q>rvy!XamSldWVTrBtLVr%e%_yqXAB{g9uBB0_<1EliWIsFE z8W&4q8iaHV;kb}cj&u1@5!3j(aK0uT=Ud>$`Hphsd=ua=HdM2K2S(l}pY?k%IJqru zX%xrN^NH?iTu;3{r%!O?^l`35Z8A@~bDU!jEHWg+G*1J1*lRd@?6;m?4xCqk6X%5p zUU44n5k7^9?;xBUf#MWB@G-Yl>To>AjZ5ITX=;vzpcnp|U*lrWxg<DnE^!V;|GU!E zCUV?)xVFTdby`lB(23J&yf}v<j%zFRj$T-Ad*j6d@&U3#4~O?^3;XLCj_Z)lac`+* z@a^u*1s2qBfeA;sK+V^jd$n7UYc9w6<YVS)*|bOf+oPu7BJL%)FV(2d)E52!$@T%l zp1^5P?<WJ<)kl7E&B?n);J9F1EM8Khdj4BY+ghh^Fvk_a{iVv3gWe9;;U^@3pJzvV zt|OYeW1L=YZXP_hY*-E9+TsiWy$nu`!{$BqZ7&18zsnuUaWQIo|6TmrVI21mE*Ig1 zzA>(xf5C9he^f5#pD>K`*9_*o>N`|>prxsG(og{39?o2l=?gBXU=J6Ru$v3QwXuiV zdPQ0^g#(|-ImGd~N`)Qeu5IgzWn(z*3S6;TAm3mo&LhEt^C)(!cd2$Na?EqcrHnAv z<#80(7Ev0{acl8>qb;JR15cT>&x(;#IBq7K^(Xe`lWy$0CtX?K(;gk#qSw#G9783x z{=_Cf?d8+PK5ybUyAonkYjfQC4H`{=0~Mg&o_+GvuYGR*z6_Jb!f}UH!CmkVg8$E- z*so8!jZ)?y=3W8j-Y93xK_||4grVBg(}DBEeDsXdRl63s<hAANSix~oaNBMu`D(l{ ztq|7>alH`NYlKG;+x#HFL!)=#^aZY%q%NF(gliGYvxWE3__%OBkoJMJ52StKls@v5 zg`FYnBn!9Y=Dn8VR>HmYBm2-6)D0cPIgDuQ9~aK0*r9zc?}ADC5}cb{F!qzJGnSBb zE*cr)G`+|-OB3wi!c(`ddqtBew^kj`mOS-tZ|$sVOhmY?KdpDrXhLNXqd$psZ!43v z4&PkBwY9u&h!fXgl%DI5;Ldd@mS<$1PyYI1jixheeBibIphgqfp8oA~c=cJ&aiedn zzo*dz4W>kSv?TrIp#-=hwORL&%msYWt@{7Jy5k09x?H60SU;K9tT)T;h*c5m60M9H zHwVrQ9pE-X8KAy#_MA~u!TA>X#L`%a#X}*5=d5YxK^6Gl^iw0vC0~5P>}N{Q_$9<^ z{4}wewza1S>VhWNjhho}^5lXGJh)&qMsTrDi+7{k!MVP@bzEoDW3F?-Bd&A8L#StW zxqm{HDO!oMhelncs_<8G+kQeA=oW0$sv1uG72s|C3x5>%|0(tVPpSXEsnlbG#_#{X zQosIX2Tg-Vfo9;`;uofxRHbq3`dmlN;~pZHJJmE}#Pk>D&YwPT=IrSsi>Bv~<uWxR zO~v?M78J@pvc!P7FHD;`XZqgJPMU@GU%G2HJLcrRt0<-aM-YdIse6ZdYksmf7x5bT z^ns7QWt^d6fr@!5PE>K6in%HZDyFFztD;dwFBP3sv{#W+FqC_&Q8I2-@ve$3D&Abb z#$SU!0i$}WRotWEb`{&Ay|4N=s<>9g6)KjhSfpZ}isMuqrJ|r>tb#meT)#9x(<#|U z<$I{;q#~zcE2mVaMMX))hV_RLz2|1tyF$eUD(0y;PQ_dmh4l{tG{ej-Hl_C)RIF36 zO2s5qe5>l;pyCP@i&V^0QBW~f#c&k^RCH3Y@u{4z16N1HdgBaD0BiYu*!ponnuZw8 zUa4xOlY$;D3dZ(To8TD*?Hzy^a!1!2jhZx<wrIS=-dRRXXN~g$H$`s7`Zq!}bIm{F z@8O;HfC7XQXg%l+KpWgn&|C+OOZVcqXzV5#z!+FstHF;2z6F;AdL!}RiXcOHyA$j% zpj&{CaVTwt4B-M?p$Om;E`uZa&4D<;9)c&WBO(Le#-S$z^k(24xKhww!8mop5uFPx zROwRSQI)O%-iKQO8TxzTJ&jOg3>q#0ctfS>3Ts{{bV<l;2VQ_H?P$hdj|bryhQo?K zG6Rl=qs+zuAE@+WV223&>3HzHfZO4y287c(E3H)ud_I!nzJttc;9fX;eBoLR{0Ytj z^v^(l9K(D-2LO+n@lPx~HNYR?Hj)r<aI{jOY@ln5l9?Ou2+nAYzK{n7^x(KWN(k61 z77CD$hD!pL_vE-T@K*pogrhXK0>|U1RuG`!CIY{Odmnsr0|<tr8uj;D;Ohx6n<8KX z&@NGF347oYIBL>T;ID8#pe5i^9KBLOmjPdgqgL4f{0W*_0r;e${n!Wg7|^i2a4@UL ztxI?(1^rJE?!q%RRc&@)g-W*qzj+4zipUMX9_c7G=p>+}KN1672K)k!(y0P=82~<7 zC=xgjj%r^C+zCfAg!L+2Kfug!-Wf{9vA|JqEl4yExB#vdbSZEt++)y{z%_83H(Cqm zlZieCP52HRHE$Jg6HbOc;BN;03`Z?T=r{-i9DFBW1YD6BB^n1}CC-Zl2zVcO8IA_v z&p>B7Cql*zco=RiWNLuI5G7H<*>EIJ_zup4b(AqM1ZP0v!_>t+r_wWk#W)u}hKvbl z7V!^dunJfMM<peEtkQ&W!xVlsP?L-GpcB>rpmhXpazIxCFTznemw??yDs(jPI2@%} z2OKp@X(7T4oCS+I;RPWPPvJHoA#N;GcDRXXMnWyleh0yK0{%Jy|L+W-VX)&oCMp@E zO~U6$la;}H5ZG~wQe!V*1)LLR#46y_JdTS5T>u<BRhcQ-z_ruN9JdOCW$iSM`y7r2 z%N}5-d?g`L@)O~xW=X(xa2qgF>VZK8I6fm$Bk<Gdn699!fu9sY9`ttL)8~}J!5qbf z&Bg5k_~F23ijXhpG&6`oILcrK@DDheiY>q+^H6&T)d0tv6#hhDN(ov5{8ZpPIO<~) z@EDv4i&!0S+I*}leyB0<>!lc^sAM%Td6|+<DsTmynIaQmvY?3|R0`Z$#&I)16Q-6c zg&PM9T+VTez&8R%zoN9<IN<kiH1ZpP8&@d(O*o|j(-rc0!1ZvH*9PDfII1vgu4eA8 z>LIKVm4pcUu2K?80~T48IZgP?YGnmX1NN$<O$J>9j9#NG&xF_E+)#T7Soj*IBIp^w zW|h7P{1+oW)_)U-$8gkKgdeWO^a8yVn7a{~gC-1p8-o*cIB*Ocjg>NB4II^`5$N?U zE`uRMm;*=rBB0L}?2_OU8sA48(1ahD@eei2R^W>tDD(ng6&yt-T(%W8Muak;-^Uo$ z=;Hw3%by@&(2IZ%;nF}q2LApT3J&@vaNG`MA0eE!Q{fi?>C*zr*W3uAau+rdMA!^` z?sN1#=u%+fZWID^5^xC|MJNTHhogaZ2{?0)(sHwbDTgqqz)uC9hogCa3Anc!J0|#5 zK%Y7cUeJX1>(Kv?;c10u$O$EbY~VIHs{MANPhntUaM8y_$!D-|fX)C`z|lIf3V2ba zF9DaHRmM;mu=_bI<B*93z7I!f5@yw-px~S8%^1-a(A@A4j=YF9AM_~TLpWL+9s~DY zM!#WNRsrK16wOBf`d?9|Qvh&_O1}?Gy@oLac|w=(P$1B5z!W$tL@JQ`0ZX?T0YrGg z(SRa+0d5iaML^qi?ByiWh>sKCXz^J9+zq!C^d8{1aFl2ZkZi-1SS1O`)=M-YSy5MD z3=xt=miUC`#cBZITWSCy**!^!@RCXs{-x4{x}Ox84nQ(Al04x6l_ujNneB*AXjN%K zGXH>|Clh2`BO$^jmHrb*#wg+wj)tQe5R#dP_=KWLlc|TSYQ!h3QRuyg-86}skDR(| oMtC%Ck+!sKY2D)Sp3i$L-m83XeL$%u(oXucb?<;u%~y8+3#e=V=>Px# delta 26060 zcmeIad3;UR_cwmdNp57ixe2+MWVo4#Ig%hmNJuy#A?7G5#E=@=G;Y+K=uKMbafxnI zi&pU|ZB-Q|REY)&f>1*gmA2H0w3^Zq<9*)ioD=$ezTfBf`{(z1{r-9Kvc1<{d+mAc zz1LpnoLjKep<szaNl(6EMSN2JRB`1>hgKC$p_zE7Vin@c*I6sRQE+L+Z3Rm!HUKv) z`lez9aHT`*ib{pQM3X<L;kOD7eJv}S%H;mHzw~5|oAizY_x1D(uTYX7{nx;Sa4v!~ z$DK!H2XQ!VK@@AwceHcg%d@_Gmo{qyc&;b$OL%TeI<SD}4kE{kzXqP0#B-d9eZofz zJB1a&eyo9a>{@sZ67C38wEYWdg)nhkX2;w~6U-Ah?od~-8W3U;F0qb6>o#sE?q7ci z99PycN0nA;MhiGs2!yajLTkPw`xO5d>^>v3<=?jtXok8(N%u2ruy2>%ISmo3{?b#9 zvl%06DWwL9AEoh}SX7qRUM#w59w1tc#~>g|r))-hk}1|L)C61<=DRmIu0Ax?s@uyx zw(k%p7LN^c6pPJa?xRG@?<PrC5ryKU$D;IC^wm`9M5?vVQ^TR^6zPOKx&_CjN|mU~ z93bgFj$%LPI`i&K=U_^0K~-9fOZy<NH7znZ`ethJ<Ve}yh2s*Xs-iNaH9p}u!^$J_ zoaZ*1C_%=W6edKUmm~bCwP}%Zi<T<E-gNM4#e<uwNG%BC6Q#?ErS=%76zQ~l)muSP zs+MK;m4i=y=tAVnvZb_pCfYrzdmBUfE#}HzIwco*s3j&^4`EiG6rtXRM1%M=Il4@` zEK8`5sz{Q`Q>4qHbVi<uqJdQOJhT!mN4tBX-RLsL+OX<~y!shhZ%wNer5_SYJyDnZ z`j!Ps{gkoYb2LHj=EiaD7B*0)+AaKj@t@T4m(?1zYMXj=nL72imoiYO?!n`KR~Lcm z23g0pv!%sd&-F9x)7@vhqi<@YJSzZ=w~h@<NRf`FNMmACq*);6iz(8tiQ^MrO&pgv zc3gQ?l`^{{M$*(qMx*<GSx0pgix-4t${#tQ9&3l)eb`jT;BFhQaa>WEp|B7=I!p>% zzYrSo3bn;LJS<zZriNw6S!lT^QB*=ys*weD*fGL4QOTR<7Y$uW3d=+v49F*s^m^aP zQ>b)hUQV3_{u)bhYU`E^QK<&H>S{CkUSo@$!UyE0@LbUevt#smoAJ}D99K%Ui<YAi z<<xd}YOK!)(K;inKwSJgtrci|zm`z5=%ZqBa*ilHiasBG!e;#NDtq7*Dono0S~~kX z+-O7#eqr&>f%#W|L>{a00yt8o{LN#VE&4q5Cnw3;CtE%?kmL40u-R-CNnttJsM=<9 zMk_??2+Vk;{2?i;)woa1JO`Qg{{vy0@s?8Rh)NR5Q-yY^Lc>VtZ$+pKLY+}mHA$tb zQeUc49U+CaycNs?)Z>ce?Htf%{PIWU;?gtUc13A*Hl$Of3S>Y2$Yx8D#?s&q56i$x zPAU%A)t^U>zeyA`l4&!hP^)NiEGN;%Q(8g>&0)ID_=8$zPqhVkC=jCHPY1tB(-pCJ z1`RqZc-PfOl<uJK9U%_Iu^F#pT(GbczhfrXfc%ScU31czJYUG%dq}Mv=x8%;q`||s zgoG;Ek6cphu8Y$1y0bK^R^t^2CYI86N9*mfx;=nj2Yyjms&t%oh=YwZHj8`=`wUZ{ z8Xk$#MR@@#+fM_9g6*&{^O5NpG9im>C3#DQpvk+Ee45%gS(I)}N$TF(>}%NPLi{c& z-&wSj3rGmfyR9YoD+y-DDM`hllqgDP<*`qiHi47WL8XDB)JXe4zAq}BV(Ar<*Jg?( zE=1ibE<y?&pf){zh~%cCEXM_E-%G)#B)CcuJO@EEfjSya4P1pi3mdF2Wv(}XW7t<I z7pP2EYM5c4-ISuTki1)>WDZLgi^pLou@8vSj4)A@DzJ1+wED-Fvz*0#{#Y}1G}yXv z-rAZe49fw>W=y@zy151A(+PuFJ>*Dpg28o4wRW)k2OB_0sx@1vZ!TS;2Asp$hgP|z zSdIyahJDE#nvi!h)!OAk51#vuHn`{$Dbl4R>6p#f<}$}k=^lRoDN=Qw3xvJ~DO&Cb zqIGFlrhwGs;;@m&b%WdTTtvEj2_vA6pQAd;+E)7>mZfO5sQdm?_8g5RG3?@2<FyCM z0LrP5Nm|lc#Kx1}?n$atE?wGR_l)C`q_a@u2(dUR%w#hzyY%uHI**)UmNJJ1WX3R0 zT97VU7lfrl9VUgPn@R1YOY(7p(qkwJCJq{uCr`2D_YPGXdWforrk=rOQ17M=(P}jV z=7==084b{HQ!KrM)f@-49P9s;W77YSgO)1h@@#7Hq_70p;c3&*?4f3-(lI2H`i(>} zH2(?8h&;fE9Hk17(i17sWhs^uHXPY0(lcwiFy-D?Sc2libE(!u;wtfH?3sIq(RXwx z)@uBiN~gjb6Ai15qD=WTcCb_oVQeZ)gUD2?o*IDh#kq&pk-dPyoe^e|hhq)XT1mim zf=#3fCryOh;u$-#!q}#&YRVKt5S=NIiPOL7$#Z!g;*lfnQyw0%+46+@<>HYO?sY$V zvF|*(=r@I68|1toD|ZTEHjnVk(m*P^hGsFHbW<<Cs>t|2rv5NyrDI-*Jn%&}bVS`* zQqis|uuDp!%j$z*vLs7KMfHd;YDq;lV*ixgTC&3Co!xE(!(gCVi+$wj!K}P_nDAvV zt83mhApR1qldV4>2dzjH-x3_`<u>ChKd_!H!ZW`(uk4)rY4Ox`45NrjhlCQHFpNwp zIjyyl3q6q?6!Mh(q0+Nc^4(UHbV~k7!(5bsI4$Bab;)7uNQ;8}K0~p*q$_40oEP0d z%hTE`s_953UMpg(YKL4_<9inf^kV?>A{;;%u~!i9ja2zgTS^P)q=>{v0_xpF$&NZE z*s8IwIEryMa{mx5@mGhSI&%;*o(CmA^F($?8DKO;pGbr{*^CE<unV4X!)Syp@ebiU z#~TWH<f_o!3qu5hk+dZIHprr@h9XpjF6m=Yn9cY;>}VKaG^3HI+fcY!O>M5En$NL` z#<mIJkm=qmPpJP*jC-CtUX%<rV~moqMs81u#m#KcLVZ~}73rs@AihnBJD+1!#?E8B zY1*v%M$C}BPchnzo+?WRkhg$Ul3aAtY)~dl&M{#dEV`N3szUd3sM-fLVH6S)q2J}o zQao!j?mf#gyk2$09AMsZ+Ok7ltwIYr^Bfs44LIg7>fZ9lFwZdG*m}Uh>D@noHS-P% z?4WQ-m0NaC3QI5><*_7);dPOR2CyXW2tJI>^bYKGE{NyMUQ$6NhNno!VDIEP$~m!+ zEw;mE{<a@B)b4#9%`HW#ue-EF5!25gF(-L$Cw9Vnntp=+i{*5;B}?$h_2`BjVxNI_ z7?WXNja<=<mHUhjoYo1s-bzCo=7~io^8DlpSR&dwyz9pjd|CdAj;Ic4Ci$y2IEJCS zoSY0z-ftsn3|2lye+GnLs?ad}Tt)qCpnkURuFFS)8OjEG9Ak-k^z~oJulrFYv|Qz^ z7o;d1uWv4ohm<_%1v9#gU8-_t6a5mp<hsxqb_{kOj;sf15>EGfqI89t<|SYBeYuV& zfUbK3iYT}1!7lrC^82tmcCV~jG^n@yHVI-m(1M@s%S<ifg>JrVe9L5EZFlx*%kIKA zKJ4d~?E+R}xY63?=#(g3qV{`%Bh}Qq$}@df8~?BQp6sfBiay8t#dtmQVx0r}3Z>oI z?0~m<kv$5?ZSf_ps>#yvRP5H-I3l48`D{^OpdiPwj{^HQ>yI+!&yDQ&z&!s`m?62N z@kMdpdSaP&qM8@#*}@=`a9__p4w@-s`!F|CkZ|}o>tu@5cY%~PU*ET2Gfc;QC$@OO z`4eW1rP5aS3Jt~?HZa(aKg*^BhYFjcm=v5Q9Bj^h3?9NevGyU+{6&@-(oXoKGqZ+_ z?EbaKi$Z#VL?QKlGKafz2{ou03|Dm<Edq;1Xlz(4ZCkKAwuOiB!`bQZ0JqPv%A{v@ zxC)(;KTl?j;n8k$6X<lVPzItniFI$4>befbh1P)xwxX4{?=|eFCAgN+sGpVZQdPL< z!X9HsT18|#;F1aT3RrH0Hb{RN3N4sR*qV9KV)uY)V4Cyfb^~as=(Zpx-xg^e=Q_z= z3XwMod52M6n5P59q6g-FWQ$(!Ple)OaDh6Rqvg~6*^t&gLQsD;z4efg85qWr=&LyY ziXIqNl|heM$muw~n+}d^ZtSPlorW1uNUAlHKOmq<DL6(K{e`tr>_98QQ~n3{LQUuI zfRudd{nDgkDbfYH9?a=Tt<3X+ePgyuEpENP8H$iky0gp(FWpCaRL@Qg4rIDEom$rP zp{Aaa8s*oin;6@Z@yaf<E`<$lV-zMeuqka;hczC?&h>5DD-f~9KRH9TPWOQOK(l@H z)%wn2mM-KhGqsJ$H)E$j)ghMn)6KE<<>}-=aP}i`@*XS}b(x>6x7lPP4oX#YlO}o} zyTyL;jL5|=L_a3cOIQ{qvO8La!FS#dG2F=R(%mTn4a4LV_rm1t8O-&gNHU6(cMj5o zG5?&r7sB#}ff@_fRZiXvmU0V_gMX*<&D;zlw~XW-4Pd_Qf}&&UNG-|ggFaAV5G6+8 zzj{BZ(Q+_sCyK#diWCnptK`;q+4OdS!k>eg)Xu+E2eiUw%s)h<s`F_99YIkUIQeMP z%v5ex@|7>x>2{rj8V~loop1iTUy!LRN`+xdP%x<+iV&?@)KUi5imo%UB;9)i#kwc% z6e|oXQRSa{@HjWmgXv_?l~w4fK+y?`3?DqD5|il4r(31bIX2^a-(l<0^+dk?JT!rm z-yclox6L>W2{`h~F}yb82t+LLUXbGQoYgU#205E?#zBrdKy@jGOsnr$P5b>_55z&v z9EjXsgOWX!X*RnZP+M{u*K4q)f%b{Y^*lwom|}NUUdAdqv~fs6dGd1hpu=B=_{SVq zt{Od(F1-hP&@ni_18jeJEqShrbvz7yS_)y-bVsOdaezA|&Q}v}wowzCm167X>Ssrv zfJt-q5oPq`Ma%b*qUDIAXgg(g!f@<^aQ$z}#270elWZ03Ql!dMt8qhX9xJa*JkqGM z{2Y|RR(Fbv{}W1B5FewbKI-Jxpk+9(dQpAwGt^yy&S<6@9VS+N7*!f~CAXJo)opvq z+#=gXkHscuI56GEpjc+*xa*=hu+c<Yc-?-OdTOUd=|{^W&xI{4N8IJMY<i@h&pt|o zQEWA?YNSF>=5|4$zE9bP$o6edLCw*rNEDI-Im)Ux9LV)CAb;HsO0*83#9{l{{m4+^ zy_UOuI<Mp1Pe3-Y6n8lE;s^QH;jBEW^T-l~32PqAQ?diPgmI=@uCw95pUvwBp#=*S z5n5U#@~iwp#2q3Y_3a>%tBBCfLn37|8xY;w{q`_gf`+0i*vxG@y@7oYJv?Zu18s;l zcxOy;@dLW>oWc4#Wy^Yq+3^c!t}zjzbI=^J@o3G7lJRF4grekm4;wk%Yf`0vvtevh zOk2ZlW$Uek4-a`)hq00vuY^y(p}cv4n5bDwKG#RG+btzm7UC(EuqRx7801H&sf{#{ zHsg3?L%|7Q?6;VxfCctcG43qN{n(8AvE^vw_;Pr~^mFyBOPA2-J@+9&o}{8$$R<jS z(mm;l<&h0)vcDguWKviTW+64q9DP2qbS3&Bf5@aR{=P+fsGk2gLZg%$8}@oAqx{<^ ztgK6{@aZS)ahEPVZtbRZDK*0L0*Z<eZN@T?=$LzvEzi~R#D*R0lzaguf+$(331W1W z+~FtoTGuY#t9N0cdaDbS6cw5<5SiJV@3vs+1@_*eREN>|5jIhxbR~t3u6s6P{Vw)v z*KFZ~e^^>KaY`trk`|m3l<tcMSXK6-rOcfQD_8DmcK=FW-jwdCq-)FgihKhz-BiZ( z7c#J&jw1mJZrB6%q+2L!6&oT<`<M-goha=5hwX?p8SXk^zOAm6VlnLZxnHyEv7-h? zz_TDhMcRo)f0$cZ<~#CcqbOm2@&1~I;Iury76+y!6zihgU9_FayMRl}lfNl#U?UyO zE|xex&%tKAy_4m4Z|%O9lI%{15~{I<uieS^b`NPIDVv_RydCDHbx?qugvG0#VXs^x zQ}u~xXM>kx+~17v;px^|IZYLhl|jyBp*`9*Zw8IUs9}Up$#31j0n(L?@8M+t1=~ko zfI4l)lV7pI9_`b&-=_v#6)in);BG7b16u_-9j&5=eElb?%=HGTpR!wwM-wM~1$nw6 zk5P1UNj0qv^vm9MuAY;BOnpDj*t<W;6zi&K{Aj8a-<<vgMT_+(RXC+W6+lUs^!tfp z3x9hD8y+_-;Sad3G@EK3$ohm-Yj`3_NctXtx}~TWtX?Xs8=!sx;;qzpkQ2Kd*Gl-d zk{No&cijdxN4`%0if@?w$2Mc`m$U@r6w5O{uUT|i9S$fit>}+D=R|9oo&59~d%Nd2 zx9b&{?{metkW0FY73`0mp~ChG);xY;8*4MP9R2w(*tCsaL3IltU^518XQlDsru^;d zK8lf(^t<lSW@$6)ZN}x`Tl5?6v1joi!u4Y;xK}4(`#m<aS0ADB30vK(NB+Uj(E|PP zzwkd6;%Xe%<WayJiB^$6Pzgg#K7r#wWtyWU{W^%^2>?2%$ex0g;f2Q(4VYP*aW?TR zaC8>i)i;y3{0LvmRSB2Ek&3Lc)2LIe9fL9(+}nfykxl5`J|Du+Vv%NBCwEn28EPy- zjb*DbKQ)%4#$46d95wd*3Y8)iXlkRA|4<nliDA{b90kNxqJoRyABNe;VY^-R7@Esm z6MQ``H&EwNp^p%9kk?;fT@%{7Jbr|O(GS^IG_siqVfjjtFYNgyH^#+_SP3`btiy<8 zlBJ-G$c@j)8oBZ(=AqGM7L^fI&5c`eR2Pl4xwE9-2MJ|ZmmvdKf$+j(dAZV|VtgMw zZCGJoF!#hz|6(O~rE-Z5pyXm*#|T;hx}XR)II&yM1$8yv#(8fuCT=5TtcBW~RpOnu zvHv8t5>}<K(}^L%kT~{NqMz7>ibT~t#UnO5(dv&K6Rw;%WJRf|$d@9Wu^HPbaio(w zJ5Wt*tt9?bFGOFJgD$XPefl}oS0bA={$?fnu}>$bay52PiMen715Z(~>7c_dHe>Qu z=G*scq0cw0s&AZ7^fhxzN*0daWa&w*@&^s1afACL4=0IrkewV429#58iJy0qBfrX6 zBrLYk;xZJH@r7qLD_|fd9`u9-fM+yEaY|t0Oq4FjZ{m^?eS(f^INFfb;X@Ks9-K`n z?y^dt4qmhw=WW4B2Z!hFqB4)XwkTpKijb7YP?62qtI1xWGjMCEqQ_Fx+oEt=nP_S3 zIlDhGUI7C+n(bn}lLLgcDI}inN#fuh#2&8PW=_kuM5P;v@EB^GA5;#s%qT)GvDDo} zyUJ;K7tYzb|Db=#(I?2qgr_;=UNp(yHK5r4ow@DbOjXWp74fu3E=8Le-!GxVe4MDM zIB+9h_)}pLQmHnhi0+Hh(y#<9eL8I;&#>3~wQt!rMVSe+_kOah<d%qH9s(@!8NaY^ z`gMz{-VE_#-JAAkHFT&A96{jpbeQD_r5gh-J^YGH+;5NE<O3<$%mVvI@kwk*{|LW& zJD!8h3r_+}lualIMR%~o+w6G8`o9_)hlxv$zA9Q5x+hu(I>5nRo>V-}HMMx0{gnG( zK+EpP%u}2Y@Nq3QnXHu@RD-*(lwwaE#50_jP3(O!T^QiOZi;cjX9HPqN(bTX6hI4> zm0}XoQdm*SX8t5=GoXubrk-UE=-hqX9W47kbPaCVU~|FM)2hq(jmp*b2b~;nmUbIm zE(FEmcT3SV#3tQgCkKS`XV~2Vp(J1lA3--3%aiAM;T5`mpb^8G-1af+I505C{{>_A z$FM*ySe`u1^Asgq(JEjc@@)FRSmDj<>^}qB3N!22xq(TYDn6pIHx!~RP<ME%Xovk! zV;)nOgZ#-LBcG#O$C6U_3vWDUw$uPOckFnn*5NbpUx2I3+NK@lkF!71CJS;0HgZs~ z@J)9%cTkux{ulP~AmhkW=w0zg)U5`a-Gg_qhB?7dgi8$fKsV+#Ex#hHWbTl%jh>PB z<62uH^|de7F|0?auVb-pnL@bYGE2l|ECTz_ne*T&K?iS9p^k8OOZuIlQ{t+yDG$J7 zIk7n4<6CU`V6PUVekT4kGu?&cKqSf6S?S=eMyoISmFKyiwno_ryKIH-EnjArK3ceN zi@ln@Ug&y@Jx!l1NOxG)kaqnml@kGbBPY9Hk>Pak0Zqq+m!&%yCBgraSlC~Yz$KHD zo&LptR>3X~3C*ui$`X-!xBxvD+ntq<sl0S56VbhDG?EJVT%|K8$*Oa_3Ba-9XW5?# z4%N)}1xu1hKwe0~*7+HSU^bHY>0bfe`!|@I!@*0fb+&YO;NW~1QNb1t_2nzrnxR4X zf8WqV!TvY)cxa>`{L0!7>nzxQW8;SfVqzB!i^2c9hlx<er^C7n)9qNd;gh{epjU9C z<Qe6=zr3)5=CfVHv-r1|*ND}6`Da`tto?W#VtfkQKcbCk7dV#K5JS<sXq>zSv0|V4 zMe;fj5Goa!Ffz9HE2>alw44qqsn}HC2Cj{W<IS*zKhF!0dw^SkcTk|^E^-GYf_pS4 zAAn5@_Zc%;yjjV}LjF5u92Lq;YfbJwk<#Qss)TvTDkIt(IASWLe2>Q$6|CE+0m8%g znPpU8R^nmLj*pttd>Xlr2KtCu4G)gI4R`aUpICZ^pK#zOHX|dk{V5#Ou=8=o*O=Yp z@6=c(VtFobbd7^U2-}aMbk9b!D;b@II~mM%bXbR_&#;Pr4x+2GIQ>M1Xy^KUeKU0b z=+9UXiH;_-cl3Gb3?7>c@`!XcZghn3M#k=UM|a{`tI&?VHBeL<bsxDatZdEpEWe6Q z%?zC#{26Y1WFQ2KQgXE|t!9q>5}$f^dNv+?LSBkQW*@d%{}{H8HZhyWvaMllLXU!@ zObi*U@?OM>ed-E`{v0$KH)<@qAJ#oSTb0X;kzWNxbJG@c(*}?CI;oS>AM6*C(?f|U zlT!g>hrJeVVppQ|?7%C|`oF=@TEF-TJNHUAA#((C9y3Y!rIF=~2@%|`vz22$a{06d zwx6vyR$f@c;>Y?L?>@u{ZCMNDhUYs~YQ!V6iVxWIv2C6DLK1TRHSD9Y(|Tn1V~0|G z<1OXQm#HpS>Q1RbI)bdLh5kxuV)S_oH80P?Nrb{0*0Mh1eiiDAS?gEBgj;J^+N%fs zn-wc_>1;D@fZ}3Sr*vO<%{&3ykbVsdAD`mW3(LJ=VTLm}`&PsAXQ*!ma{Frb&iE2x z(*@>}`IZpSkFCgT)%NEia?IAE5TnibJ0kFrb%2p&Ic2Lmi3J>2E{&_jx*f8L{hWC$ zY}z|Yg>E(@2U#*YR-^5x-cV=-fm*UxvXc{XLUNOEjdfO8cNn~o-AB}zz3N??c$XL( zJ8@9ffIX<{$@4@*(lx2PezeW_kA)6oU0zH1{y7>YClnz&o+lFH!__5Xd$csp%(Itn z)PIAv>XB5}9!xA6KlujoV9-lnW=%fF^jV4V?|~D271!4ZFOuru@8hDc7M(CG*#Igo z{yeN?gJlKiROxoA^f2Yfb6u)6zM_FiS#7;Tsc0YhX?2sH)Q^IVeVSEeg~Xgxm`YP` zfGqaG=pivK6T?}KW>GrwZhk2iS<ONwMd)8wIEqF-Im<>(igjxPPSf7-%~`f#l6Sy* z6SkI9snYkjkCfb_<@UgQOpJRn%T=yB%g#;e(_Q~I20H@{w@#~sZFR_#42uV_yUF5k zqz3`h1!!oo&Xde>9p{2}oi5ftzROZ4cM>k%WeX>J3p?+!)sy4ezk63JOGHTmN+P=_ zTQZITU$icvym%64cd^)*a+f`uoYdTo5-b&taX&0rQtu!I-2a0Oo6^pG(_+#ObF!HA zCv7Hwuw_#^3I_|=p(&xl8-K80r^NbN^N|yeb7+T1qiRIOG{0NGVy1Qyj0J4+)M&@; zzmruluYhfw+DqSW4As?W%X62{R<Zi2ejP#!(3eJH!EXO(5k^XgE<;gH(2dJgxB^$< zm?dwJ)oA*iMP(0)$^h45;^au6s$X@>F?2;I=FEzV-&)Ucv!>(qkyPo=lq1hD|6MlL zv)2#$@m1_}c5v5|Fdj6eya5vBs8iho4Z~EoluxK!`IDD|9Z#{^gFdV*FVax`7}F;W zbEiikm9iVN<>fdH0S}QyW7(Y9F`;Wu!`g_xnrJy|!;4U+xPWdjF$`Ap5>K*zg&o%H zcXoERmvd0LauL$qu4jMF?i08f?H(X`{sNOiM2fAvE(iasH?oJP*x<J&>ZKD+nk)O9 zV29oc6<Tg*zrHoyWf9sAJ!vi9s$`?*jP=dGhEtp_o_H!aAI5^VEjm`RGjlcx{-;^) z+%Vytxoqv+F3kteRc5&dxw#UcpKD%WH|BN~vOZ(p^TLI-Rjk)M(aoz0vxYf2h(|De zs@TeTaf1Fgc5a?uNZl#Y06Zexh7+JT{6|Fb5Trt<YlInwF5@YndVJcmi23ebjYnbf z!?YR;IXPO*lYWW~nr{*Y&1E_BlUoE~Es4@kcx+@d-k(EsIotVKfS>Y~qnPywtFm+$ z*ral!F`AceGTVGpdNFv)VBz8^FCc-qa&sVrbBLz<V#~LbLBgI}Hycx3LzhCNZy8Fv z#SPRa9(klowET)E=*Fox*&7SIg$GC3k_8>}KSiTS-xdKieZ$fqeVZH?@DI$PS>wn# zCEXX5{U-b_SeDyC;&fX^ie&${;ZSO<qvdDb+zsR8hvt+J_d!xDoM<j!Rp0=Y8tdKU zz!&91caX3evQkC8cS|{3Yr!*UhNislm9vBjPnNmRPk8<xwqW59;oIZvEMh<8vHFFf z!Ye0tyD$2l_a1s5x}cMHE53n%FvM^cY%gMU`E7&`Gni*VpAM(*p*N7}sLD(|j%mXl zbv3UgZ9IFB*fSn)8{k>tDZHC!DG2ksbqvKoAIhK)z7Q{n<IL%lyoDVp=)g~7e-`-q zA0;91vNvJLSi<)pifL<&rzXGs#ljYM^!n+Lsti#`k$qLE@MA1{alUYDB&%C|74`}{ z{<hTZJT^UQVDu$29pZ1EqkAZ>Zn!n$^^oJbOXrb}H>2L93ImF0i!6?BRnC@JiruDv z4`JJ>=rX)_H|Bd5R2U<?mcvFAju1j}*!IHi5x3t^^a3y0o{``E$c7hf{#KN4ANcfO z8wpUw&)#7AqV|I2OBPoY(09g{*rsOFl7a8~T}2SSLYvVHGD)gQiM|Sl@#`<?;Uog- z*rvp|H`uzOb|DY8<0kR&ht%SLzd@<~ZakiVtJ@O(^N;9MIWx`ki8M*^DR4MO|ADJx zqNNHO&>6YsM%swTOOqFYM~`9TjIBbIQhZ=Fi+pElM0dKgr~+-f>$aZdxY;4plVoa_ z^<&u%5ErogEIa(p=nmV4Q1wb_onQ=_kt0~m0nf_enUv!rEr-)tmi%sJhd=*&j+JMW zI*=pkzvNhOhMj-6cYgFiY<Sl3cIvKp5qHytlf|t2Si_&8tMJ4&C`#no@F3C0Vs@A7 zKPN-MW_GYuz;hX9dE&XCGlUnAu;kjHJ~;SvI>q!;Rpwu^uugf_FbZ)zYq-zZ%IlY_ zo2ano0DFIlSy(iTITk1RxR;WlVR5m^zwLW@VCU{*lZx*Mtq!vE_lAdx{oo_BqoXNU z-oCd<{%uedoDXJ4erqqQc(04VO=HjB8|L-JYiLVxa;<W{^$@e@6}mSrvK>;(_PeHH zW-8_T-{5Ar1OG{}b?X#Y05&EEWWy{DR5L^<83Ly(8L*dq4rx(JtF;*mz`~;ZOdj<O zyDf#gCiFpdN+pM25H6iyICUFMOs=H%`pTE~uq8`d3+Jw~ua|nZ7?nmwnazAua#4IW z^6R&E*DQ7BI}Qp*Ay~;(O$qVDfT*Ex1?@{NF!d|dr&e<AkIa2pFX8SkmcGnnx`3xT zikuO0NlRvC)Gfo1G!?V=7AslS+G}o@QoNz?zEYT4_at_DS$n<(d%SFqup^7@T;ASj znyl8E*9FTsOBpNc*X*z5!NQ$h%(JAMaO*3URublRYUj(j8ihv*B{O|x=qcTP=;kBY ze_t@k(eb%tkdO3cx)p`qS-UZO=%4d`#YUTi=mXvL-E89uqg$shalm33aPs68Hrwt~ zD~8$iXtn*tak={oWW^4s`0`VwBT%y%JfemFQ+=pYK3?%{?-x))ov@5$tU90n;d=Cg zJUvNaVxlCcO7~KwIyvcUvR(0(LyIyb<3-e>i^v0`is68>8NVAxTzc)sP{^>W;Q*SZ zoP1D@6Jrslgn5)e=U6^TTaB@b#2J``htMdzz)OPpBrskT;9dxPst6R20Hya<)5~xk zVk1ET%aruYrt~1Br(!QC))gUv1}_Bl%~i#1#@HsgheQ<XG9ZK-p`;spn2S>0PSf=R z3s3X<Q}(zD^TgD+8)h7lsJo|<S>J(Dqz0R@{aE(OniTyJSn^`rS#yldcxepVxyG~g zHjUH5X8eXsMzn%%zIf#gEtB+fk!&-r8^h|?gb2OgVTQGx-RwVmsT(ER*r2tduwxrr zyLO=41Q;4kCix#**<Wkt2v@hV=^q4#WY9()jECO9V*LP$wU9S&eW3%9TiK2e+6#Bq zvTGmA5vnuT$PZKf2ardkT`j$Qnn0W7_KIdzFp!XQy~GZE7%fC+u=^i|3kyawzjghD zn9*$Fx}L(I<80$Pe_{Q5?9jRxVN(YCdtI-XO&QSj2KcNS7I(jXT{-09-bLxw!7mwY z!H|w)4{#KtPs{RR_UcFN+57983z_e-PuH&%-dV*GHpB|sN3eMt`un$osh~8D9$vvk z_;rPy*0_aB*!2x<g^S0S(?>0Pa3g5fF&ptEMP?G5toSa1PKOC#<B{aQ8x&Q9`f>QD zL9<WG4f&|Qmylh=KKjU<|9T;6lLorWlU2e;9;*^2d6-H>$OBX&QchF|7rDF2NRT5{ zLX=ypM7kWP5*e~lB{F4KmB^OuR3cV>Y*FgWk^i#fE7TlWR#;L2R+W5BrI!$0t<oh# zA64nKM3<`cMxwW?^fsb5s`O5xSE_U=(MwdijOYa_T}gC)jvB$?%*m5fx`yZsm98Z^ zRi$O36I8l^=opo5BsxN+$z+!ORT@w2IN4pL6}O+gN|TdNZhV`1F6Emj^1B*QJc+d` zt+@FvsB|pJRH}3W(FawUyoK^kl_oEtyhWvRh+eDGbBJE5(gj2psPqz|&2KB?FO^Uv zTTNI?^jMY7COTcEw-KGB(mRQcRq0Zq+pBaL(I%CyB-&G@tBH10>5W7`U99xJhR;W& zL5<W>!cCQyiLO!U2BND}x{>HImFBQ*%X?JXk?3tI?N0Q1mG&XJM5Rqc7pimw(Q`oO zV-6xIGDA&>B|1~3={bcwT%|>#MU_q`x~EEK5FM$~nM8-FbT-jGDxE{Lt4hxy8jqD| z?h8QY%YPRrktLKMt8@v`*Hn5f(bX!wk?5l;y^ZKnmEKA8c9kwAdLz;FmJEG}1PAXF z<#lL!$3}9(>);zUDX-(qgB0Gt8;BQ>^-N`DD}@Yn2d7x&j8r)p;2c&tNh$|!P5n12 zC#ry@Z#Ly8sWB%t-GuZWD(9iP$|JyuR5`a)4jiTWRx0NQl@kk&zsfnHauUF4u5!Lo zIU+cYDrbYr$p*(OUu8)uD+erBl`~i6%mGJGIa3vmvfF_9&nL=o3{{!5?SLt(%y^Ya z`wy5`Rb~V+wc*tsoMVvL>st(N73*IfRKwOYX-hj{+4|k3TfP>A9}euEzBQB=y1vIs zw)G9|JRg?ijI}mfGVGD!<c)M)YcG!hL%ZzDYuJNrZQ5-Zjn7ny{zDDGRcsxdT==AK z9qvSN$ITINv6@3uv&!J7G>>KjJ_{CxjAk=Gi`H*h^P-^6YuJ~cy%n_4m4}&SsK-Y< zC&jEIctNBrC96!dtL$gkTj*5A`hNaMa9_uIeUT=d_>ft@=q4PDV#mKYE|h(+d)fB0 zJina1x+7T7FJ+5&v=ADS*y<e#j%Oq~#<wN0pLX~-eyT*4B<+5_L&pmXJ2KtQ)xxb@ zwqd79unl2{cg6`rXYPKwv$dcPUiPASJFF~ncbI;xKYB=(uZMig$|md%Y8R7;BRwfB zmYA?2zU-}>cjQ&NV}(}<H%HB@6#XUpVs~293~((bY*Ik&mj6D64Y&N+aq-{cdN%9z z|Ke5JvmIU~FP5;!UZ`!!`t1phy}q<5r+g123KLb|QYtkL&Lw~ON0l${dMVm_Df@U& zH&-thqjYXdx|<8xFMB$)83B>tSh=GjsTi-{N+=XJ@*@spUg4{mUvZWH^a~dIO;$qU z7vw&&uT6ZA)XcHKP`F#bLQaC)mL5+I4;w2=OUg(ItU7N<b|39IKys~3kz5UimE|^L z-`?!%H@<?YH+%3+q_FpM7P0qGkGt`7fvqLua}ZA*z(;2y-mF5mthy_o5yus8?%}I7 zsv(mK!}l6CW7l|Q+Sg4e`iza<7cBhPlP%cys&JqcyN1}BR?N09LilDIYf~C7-0i{A zOO3)?+t}37cZ5H;vUB@GocqSnL>US@+1RuFeyq#>4+XuFB24JP{@H&<@aVyg9cUNz zJCqtNfLV@@7pWQWJ-Rw3wL6Z_>AEX(?P7;l$CBcn=a<$y+Km3OEaclRet*E4(Of9P zJLJ@@D}7|cs|=^Ru{Xc<7b;@e@^6hjY9UoO2MJU<J~Ps{R%)|9qKt2X*(I@HA?If( zjDm<fODPg_3`3!a{q${1Vc10W<lDq3T+9`f;$GDGFGuBDTmGA)GG_}pDuwke*n0;} zT|b1uqqX!akWeR8o1Ed&RXmj+s9gEYmx9kN@Kl=I#xAF^xw4-=eU@)9x9>#LU04rl zY4v@e(;egV*_+XTR$>;tubI7>&HgSZ|Em{5a!-)aWy;-0`5M(n`D|$9sKnuT4JrX@ zYL|yP)T>V9gFZfBJvQw7%TQFL=*mg?_Ac6MIwZXdWwnO?u}Qsvc7ZoVu0j%RJrNW+ zOmgtPWz~odN1o{9ox9k~Ljhd{Lm|EQNfPfU;x@CAckaLQ&Ul%ZvW}yy5$&Qid3X!X z%B?hcymNR~_WiH&&bwIIp^n1)L9F3Wg5bKDMI9c|?X@7Pl^&(zC4<sesc~b&a_BR? zIT(!ue4+O$-b9r2##y)vVt`KYVt0M@YIf-GYr^#`)~0NZV2NUz%i@Je1?-oy6yZ^4 z*5OFFuxldw=tx)g-jU`)wo3MV&ELKhFL~`A`&wIG`1B37WLp2w4VloWnuSdpd#Spy zSAd~y?86tbd(+^?7|61xhYF(y?zT=(<z259P^(YL_wit#-I~!yuokd3GXw1>pnNuP z<}~|R&{Vd6=24+{KFfc7iM>-JDt39JfVV#ZzYJTR6XM?&{<9RzaYy=eEIrY1=$V-H z0B1{?%^2L4ozCg*=l}tmv9=9!do#fCN)AQ7ZNsA947M+Tvz%qT`IG%Ue1FDz<ZcpN zGj{)!`+*?Dr?a_vff%h99oUr1w&aC|Dc`G_!+cc7uJj|6a%!A@4EEev<KgFp>Wq1x z*^UGXev??x_pMrwX+=Fu`UiJTf4sL=?<K#M+oaO+B9OE|@U(X4Yawje_bzU`SD>WV z#4NlS++p_$w)OjPVR{xj^L>Qr^Khl4XV{)y?dY+Sm%Q)I7e!UQ$-K(LB0dENCCZCl zq|~ROiJjy0jX2I{wGp$<VOwY{VH3+kjAJ$7m>0tGksS6xd1ql+39Bj(NW6jWmsl1= zyK;GWlO{FV{lG>84PSmJ$33iRc|P>9Eh*Y{KRs@fccU-3RT>6LuD?On*B$S^gtE3r zBTNpcS(LuF{GvVUJx?AF9L1^bc;_<QF_h&VZ5P;=vX|i{`r~zbv4EfxzEn9px>nuC z%h|U_(|pPxQev5p#suT0*VLAYA<T5lug5a<AsO!jPNF26v2_R@BKL)>+EtAQAzqIc za?dD2y^kYFERM!w!{3%M^RZsSr_0!$V<wX)9t)Be1b5If^DHroUa`o5gD9ogzS^Q2 zy^Q^RY^pFbhmAYly<^+gn+Csb0|tPe=M@3t;x||^(hG6wig@=_^{ubAvZKel_S+Fm zJA59T`FpTjRcAgfyGHr+Oxn}sPXX#~r|y}7c)o}|vxRJZks$lcga<t-VVjkfc#W8) zys(c~B#X)x!RHG{!&!ML(on1keeg$TB9bIL#XBxtq^~#W6>6LDu8AG37_Z+jgX&XG z>9#XiWTih}&C)7^-1lI`BuUQ_4J#|exGp>1WD6=IjN}edK3O4`5505hBnJ#)`zxb* zb%8j2siGNdIwRsxg%ldZB>C)rXiRk?n6k8mW_j*h`M_o(%sKK0(^;DnUE-bLA%oBt zSe=v$uWDkK?Xq2oKFBkvAIIJLN)cME34Npry_>_<orn^~Y+`3mL<+$<j6d0iKhN5o zOm4aJ4RX0Vl6uai3Q+R=`dk#e<_#vD93uGaWIvq@NC<~mQg`f><~;g}X4fY++aUp& zU@bb}+uSodkQLqq<Q_rk?;|~pKJh#&T0I@)*2OIDRLI~l0m|IEVdmsfuhF=+r6MpL zMFUASZTL7HndcyqR@_l4uiJ#lefho_ef?^Zn`h(M0*MV_+fTLUx3MdytW#c3gW)$# zEIOapOnz?w=&_>ZaqU}uv6~wfH2s7@(N)oqRHJ+mmr7r1hAYOKGF5MM*R_D0`l!Pf zT8oaf6D?0_XMaa~Z!>V@2UA&9Rm*l)rczh&Az`I2^s`c4j5yvsy(pJ8C48kOWKU(y zPj~T~3%2@30rsQ#6vJ2W#pBl%<PP7m38&i%r;C~ObRXeX4Lf(*bHLde6b`%A94|`O z#dZ+AYFKszgJd(xKA5P(bnaV6!C{6kXz}XW6ENv9Ewrt({Lz%Anq)SGbvn~$%;76^ z9NVzM*Q$jrYFU3(9t}B5tS-+*?xf_<(<I=*6y8|G;|HeH7LS5crDLdvUY#3^SpA5( z<KX2LrHc@{bD340@oDc0{=*uvrA>TzL$sY0VOivb%f}~EJE&CoDoACpKKEwsXT1gM zW!C0wpx{x&QqG?C`+O1!Dr>77$Wo%}oh9A9xh%4JcwkTPnw)!;S;{h>BuXO^=+{1A z`)sO?Zf=30C;x^tRh8tcS?qRor0ZwkSjUH8yXcn1LeIUKeh@`tN?TYKMsS9rXqbiM zLHh*rw!vm>W+V${o}s9v!heAGpP-6*MNyPzM=<i_I}_0u?2yVgiWAw*b3Hvv;kFko zkDlkb$|+b2(6vvevY7L&g_&M#-1$z;OIM)H!<2V|zJD8z`!E;>=lunTiR|F{#P;5! zX{wvv335_nnN9BmeRiF6P<<yT<vMHgLzK|v8XNaRScl66Xzl;-PLN^pe|slr;}2}d z5AB7$SJ|~6dWu_cg*>3HS&@g2R&LRkCP!Jc9YyO%Uip~3Qa0h9j;eL|>4JJ6^;+<9 z4ay_Ov&;*TGb%eOuZJGMfFvYg1I31Uy#>Yq?*yYAFsDSAQkLJbL+Jw)2Hp<Ghxm`4 z<DrkSMRP?YqTCif2c#{0p7MVo{>Pj3RS6@I(?q{@A*H3u-)FED7hANme20Exg}SKw z+~*T;(9vBRzp0|gN2>Cfa$yGRb<rnoTE_n%u)i6KkjH3wBb)N7bzw}-Nse;eI@ISY zgiN4naa`!9jilwb$Ag``=r8=13ASf5MY&I9vc;Euo4Hb%Ul6~AAiu_9FQo_rRVgJU zGLwCB*=V0Gula*S*vU)IE=%%BT|7_9okp=6mx6_=@l3cJ>DzXpI<<J<fd|QD>H`mF z+3pWE@UpLvJzl9=u_V&Qqv}AR!+4bK&97wpFZX8+LPGu$6AT0Vt=4dxhI=)v*6=3{ z8#J^JR`dC3*j2-!8fI&_NW%{`+^OMF74x|Z8snyh4>WWLQ7Z`4FjB+ytyKO}4JT>* zIT|j}aHWPDHQc4)Q4KFCh+(~>F&yyCIQ@lb7^`8bhEp`0r{NL}H)>d_VXcM@8ajrl z<@#vYO2ZgJ)X(+O7=tvNq+yPR`1K^EVef0WQNtY?9?<ZphSeJWq9J|IPk+xfbjR2J z^cM)E`ngDrF+jsf8qU+OM8nTDJfLB<hIcf)ry-|JjjM+K8g|sMmxe=I@ex!%H%VhG z(r|}{N+SMC4dL0(LU_Q<E?fe&-BVPtW~7Q?^MNaZTj=Nfq%w|n1tLq3SJSVH(KeQb zV8R!irpDc`Tpi?;Z+faqlPyDkWlz+&lN#kRwKy#x`g7OfI8l|qosU&{4=tXq#jz`? ze_9;dobtEvkt&ZfkpA+yNR5HxU-_$TR0aIBc#altsl_=hPW}b@+xbwH4^*REwiXZ4 z;u{~R=_V~6sl|hp_%Q6mf2#s>Gy#(qpR2_=Ej~|+*WOp<pK9?MEpF4|rMn;9$~WvT z`zMhPeSN~j8Iw)sH%wW%lP8!bn{smB$eKKBmML%6Yp+i=wGN*#t2G<`a4c(h=*x~g zyuG`!(N*yHY}P-cx^`qKyCzQCHKt9;EFHV`cvi@@XuQ>pDx%Qw5ePefQtNvK^y^BT z&3h7MA~8}AJM`bf#BnDzG2U6=oTD5f*|jGo!SPl0{K){HCfS@eIG_>kYO-r;dEP(Y z!(h)DVw!P=D3_cj$v@h|bU|>up;pd&I&z+q5QaD7JYyU<&nWjC4^JTRkeA|3c|CCW znGjy&H8^mFNhlxrs1Dvi;2eeu=^nn$obOQ=*6pcBKINrOhj-$*g9ss7y(dB6_*eXY zYU|-hjyr+SRG*Vx$LV9dIX&;mIplC$Q@gfy#^Q?NxOkLJ$h!)hEAPSaCRg^xv*hMY za(8-goO4f(8*_sle-;?x_$tTI8?NdQIAaK$&}DRcsCy1?&vW(@*_~&7lpfH0v`gf; z*AQlFJ#ZqCJ}+BQhqONsc*qiZcsq06liWD(7(M4bv_*qQt)W_9_Hrf`_r+UucqMhE zR-T8mJ?D&(#2hOlPNOz5nH$JuCUe{_O(+ladW7>$nOfL$Eig?jq6{xGJ;h5NZTi2= z69+mM;rtEu;CW!zrqP%uV&$Zut^bGq>>Gen1;M?RkPWZ}lGaVL1&TCqIT$###m!!{ zn0lT&1apQEg<SOK8|28j$GCIuxvtgDWsW(`G9A(>e=O2i<Wrv>P0gvnbJ{|f98DP& zWr~6_ky1qokxg=aU*WjX2x+yeLio08mh!yMs#LxWD|+g)Dv$5<VzSrI<hU~k*|n?o z@?D2EW#5p?ac2>-YpHeU1L>;SmGhnC!uiI49%^i8QSDJi^2%a0W5y8{*DB)Nqgfoc z9PzJf*G%O3w&@-o4x9(_c|_6NI%hW3GZ>rAEQIMltND0kt+-&VICF-fSSw1sden=$ z@zX_0-MPlHHLe057UH2uJ3-n}k!~vW)gnwI!k8OtrU`stpgIH2oO7<KGc*H_tQ_YL z$3#`_nq30l3X`_xM-2r#zAMFh*zrMYCfM<Pd(c!=qM!V~l+VNH$Qff?IAc_k%D(JY z@z)&p9YS{Pn)7yi$N<W3!p8ppl;`w*7?KE0L+%^w$Tc5s;F`y{am{nx%3O0?GSR4B z_I#@_-$)lODCScxh+ogSH@MZlR2pmpsH+<97_FIO&qusimb*@K+z$vwZJ^Z&b;H17 zaAPpIxyIUmPw2Tb9M^~tsmad)?OV-p=}r0n4->i-GW7`BgsutF@$auWqT^dSQoACl zU6BIslkXeejPo|fVA)4=-h3qIlCHKq2Xn&*xpF~f4=!kuJ9=s0f^vNtylOqGo0qw# z`$p+Glld*qG-)<x8lK0QV$7V0&*6M)y{nC7S{Zn^N7<RM?bP50M4SG4k@45SadS+( zp^3+3fVuG=emC_0TF3v_I{yFDI&Q6~<o~;Mye8L)uURwJnSbn*%nvi?;$O<XA{TzX z{l1;<d?dfuo!{6jt>k^xc>DhZVO&qxH^_(o#ePD%z$eg(O4U%*FiFD%4SQ-Bt05M> zk}pC-lZNgZavJ`<=3D^ZDgOemidSn`rD3IpM>Q<d@SukH#89bbr-mCfT&W?>K_$JZ zeEbZW5?`R9S;anFw#LZRFkQn04P!No(J(?ocMZ8UivxM@HjSKGeuIXxhP4{jXjr}G z9K?gSY0<SB7HF8QVa6KYAbxOuqfMRM1`TBmYc;IauvEis8m`sQM=NfL7N4VGwub2% zCTJL;p}UH$ID3up^tsv)S;I07w-K%}P2>IHcNn}T!^GEgsI^mvBF9l15+@ZS<F$Bi z6>sW*(75t7rVu{CxhdN(x_#3^_*T4AhN~(kuK6I8e<R->8+?^J9?5y&JjZv`F~C+W zprGJK06p;3^lyKj^91H2EC5{q+=5U78N$`@|3!jc3oOKi%?C8$WE|PqptFIw2qa$u z{EBGYEqAuWz0Qg|$x1{@fWIPCgSLk&J05}P1mH-G&H?V$=sm#O2sMzAfx&?scfpV6 z!hk<$^aWs+iQ{fUW<BtG1S<0=kPpV)xJ5oH1CfkC2_mpgqkjhqc+Mo^^Hh7_dIahL z;m|O3v~qy-fXyGjoirA>6(I@qPT+ZjbkG-oF1R&ifOZA$M#urZ2Y4<Y|Bc0ub6o)T zXiF`^4|W5eAW&sbfxB>#+DI9It{rf}p@M*i5Nbh}0ef`BG*MpQLj;=2M&M&yequmB z1<sCCM`#Z4Cj>M2`L!VC;x@Aukp;kI2+BMGe~iXGAAGV4uXKS42O4e#4juz_7=iT& zB>y{ba5t5o4qU9!4ZzbqFa^k44Q!2@5r+;(0_VqL{v!}s5YKU6B2Ym)fkC}6?BJV# zy%1=GmH@XQkPKm^MiaXARx36ElMu2YpH7I713CxzCW0As0dO(G9MFZm^I`HNK&ufU zT#i6Px(&E25tV^o0=z&0%0>7P;UM^pKqE{fYLEz=mxNmy_$z@G2&7LJfR7MJ<|**& zWK5wKrV<$24`t?~P$I@6PzJ)~xJ4yEs08SN>l5+e#o<P2bOvx@3VH*XNx=CCRKWt^ z9t3JA;qMwv7&1WR?*#sZumyQd1F@tRreQu2DFBuuP(eq5euGrnAGjBR3M~aD4OT~p zFlGpRv&c($7s1h+=Nf<oLy;Fh&Pe#raE@yaek1VGNK7kexT4S-rF{5xGC1zD(W=I7 z2im=&_Shac7oh+d766Bg!72bf9N1$l$885quSFM*LkB<;ZbBek*#d0#s#*@&&EW{t zu?Qg3_$z^by^8t2h8-<(JjX3Us6{~qK>Y;t473U8JQ1z+!IA^E%2F$i0On1?Pve6> z2Y7O_+JP$IpEE#%-vD%Z9iv2ffl&z5h#26v2(&~G=7U)FCcJtODgn02RTCnB&1RwZ z;5z~rBT$AyU^NBk1mQt58U&e2U_&0}95i9!Y_+nDz{zi+1K7l}fg9$iqqPwjJ{R*( z+8zPoCIZbd;nsP$k0Zl2;0*+-tQHtNUo8|aM6Rbs6S^)`b%L-6fy!9|EM257W5Ra} z)J<#&aM5CDKgusyjP-ARTis#_TU$6T6A6*P9SAc(?*v8{s&ou+zeXPfCKaiN4dGpc zLdZ7&*IF?upb4ifLytkv0Dgo(`cVaRT~3|B{%-;?{C&(fgb1w&G*3H$84SY;K4Io6 zY_XsTZz0gA$iRK8Rk{?|h(LJ>zyAQEh><u7eC0#*81z`+fpzFG=!3u>`RlPmA<`4r zb|dB+bbH`h1Ud@{H+`b=w*YmU)XGeR2umPe3*5RH6@xAV=50ZK&~t!?5U8PLK#%|6 z5Ch*6_!$Bn)A`#$%-;z&Arck<J-$}w&lC6%p%(l`V8(t78P+smw*xruLB|3m1gc;u zupWVW|2xqKG1QQ;{|?Le5Vl*;g}@UCv^P`%TO7vxQ^fNy#|=cFf>MDS5U8aafj!F9 zmi7d$K82n^el0M$N>w@$cn{$q_<sYlPpgd})SrQa0{mEDLN&)d1x@IE4hoEA?{h96 zLxc!zAZ5V$7qJwPumJcFfvg?ZOIQU6wESv;Q!cA?Hjul*aZ4dXcm`n`=xQMSoIQ;M z_an-_iqQq1um*t?kg)SLoUZvuhyhW49VZ$I0lWXCZc06YZz22+KAiR3hX{&d0?A!F z2aP5qcP!C_<iu=`h7yt!l=y_?v?M;^7>!SuPku%cA|xLn(S+m*B%1KFMw2^`{CLDC zw5wHVcVLu86Q*f2VYWsSF4kzm&s4e}TwLTtQVSw9{;al?FcN_(BOIvF=|J+15TCG+ z=zZrp^4)kC%RXBjBl)48m7A(J)oiNW)VPV;oV__`^OEgrx3BSen~&I+^frIc?tcNP CAY45F diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/wheel.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/wheel.py index b04bfaef..1e2c7a02 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/wheel.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distlib/wheel.py @@ -26,7 +26,8 @@ import zipfile from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, read_exports, tempdir) from .version import NormalizedVersion, UnsupportedVersionError @@ -221,10 +222,12 @@ class Wheel(object): wheel_metadata = self.get_wheel_metadata(zf) wv = wheel_metadata['Wheel-Version'].split('.', 1) file_version = tuple([int(i) for i in wv]) - if file_version < (1, 1): - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] - else: - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] result = None for fn in fns: try: @@ -299,10 +302,9 @@ class Wheel(object): return hash_kind, result def write_record(self, records, record_path, base): - records = list(records) # make a copy for sorting + records = list(records) # make a copy, as mutated p = to_posix(os.path.relpath(record_path, base)) records.append((p, '', '')) - records.sort() with CSVWriter(record_path) as writer: for row in records: writer.writerow(row) @@ -425,6 +427,18 @@ class Wheel(object): ap = to_posix(os.path.join(info_dir, 'WHEEL')) archive_paths.append((ap, p)) + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + # Now, at last, RECORD. # Paths in here are archive paths - nothing else makes sense. self.write_records((distinfo, info_dir), libdir, archive_paths) @@ -433,6 +447,22 @@ class Wheel(object): self.build_zip(pathname, archive_paths) return pathname + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + def install(self, paths, maker, **kwargs): """ Install a wheel to the specified paths. If kwarg ``warner`` is @@ -460,7 +490,7 @@ class Wheel(object): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -514,9 +544,7 @@ class Wheel(object): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: @@ -605,7 +633,7 @@ class Wheel(object): for v in epdata[k].values(): s = '%s:%s' % (v.prefix, v.suffix) if v.flags: - s += ' %s' % v.flags + s += ' [%s]' % ','.join(v.flags) d[v.name] = s except Exception: logger.warning('Unable to read legacy script ' @@ -670,7 +698,7 @@ class Wheel(object): if cache is None: # Use native string to avoid issues on 2.x: see Python #20140. base = os.path.join(get_cache_base(), str('dylib-cache'), - sys.version[:3]) + '%s.%s' % sys.version_info[:2]) cache = Cache(base) return cache @@ -759,7 +787,7 @@ class Wheel(object): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -786,13 +814,15 @@ class Wheel(object): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - if '..' in u_arcname: + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: raise DistlibException('invalid entry in ' 'wheel: %r' % u_arcname) - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: @@ -826,7 +856,7 @@ class Wheel(object): def get_version(path_map, info_dir): version = path = None - key = '%s/%s' % (info_dir, METADATA_FILENAME) + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) if key not in path_map: key = '%s/PKG-INFO' % info_dir if key in path_map: @@ -852,7 +882,7 @@ class Wheel(object): if updated: md = Metadata(path=path) md.version = updated - legacy = not path.endswith(METADATA_FILENAME) + legacy = path.endswith(LEGACY_METADATA_FILENAME) md.write(path=path, legacy=legacy) logger.debug('Version updated from %r to %r', version, updated) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distro.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distro.py index aa4defc3..0611b62a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distro.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/distro.py @@ -17,12 +17,12 @@ The ``distro`` package (``distro`` stands for Linux Distribution) provides information about the Linux distribution it runs on, such as a reliable machine-readable distro ID, or version information. -It is a renewed alternative implementation for Python's original +It is the recommended replacement for Python's original :py:func:`platform.linux_distribution` function, but it provides much more functionality. An alternative implementation became necessary because Python -3.5 deprecated this function, and Python 3.7 is expected to remove it -altogether. Its predecessor function :py:func:`platform.dist` was already -deprecated since Python 2.6 and is also expected to be removed in Python 3.7. +3.5 deprecated this function, and Python 3.8 will remove it altogether. +Its predecessor function :py:func:`platform.dist` was already +deprecated since Python 2.6 and will also be removed in Python 3.8. Still, there are many cases in which access to OS distribution information is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for more information. @@ -48,7 +48,9 @@ _OS_RELEASE_BASENAME = 'os-release' #: with blanks translated to underscores. #: #: * Value: Normalized value. -NORMALIZED_OS_ID = {} +NORMALIZED_OS_ID = { + 'ol': 'oracle', # Oracle Linux +} #: Translation table for normalizing the "Distributor ID" attribute returned by #: the lsb_release command, for use by the :func:`distro.id` method. @@ -58,9 +60,11 @@ NORMALIZED_OS_ID = {} #: #: * Value: Normalized value. NORMALIZED_LSB_ID = { - 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux + 'enterpriseenterpriseas': 'oracle', # Oracle Enterprise Linux 4 + 'enterpriseenterpriseserver': 'oracle', # Oracle Linux 5 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server + 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode } #: Translation table for normalizing the distro ID derived from the file name @@ -88,7 +92,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = ( 'lsb-release', 'oem-release', _OS_RELEASE_BASENAME, - 'system-release' + 'system-release', + 'plesk-release', ) @@ -161,6 +166,7 @@ def id(): "openbsd" OpenBSD "netbsd" NetBSD "freebsd" FreeBSD + "midnightbsd" MidnightBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -607,7 +613,7 @@ class LinuxDistribution(object): distro release file can be found, the data source for the distro release file will be empty. - * ``include_name`` (bool): Controls whether uname command output is + * ``include_uname`` (bool): Controls whether uname command output is included as a data source. If the uname command is not available in the program execution path the data source for the uname command will be empty. @@ -755,7 +761,7 @@ class LinuxDistribution(object): version = v break if pretty and version and self.codename(): - version = u'{0} ({1})'.format(version, self.codename()) + version = '{0} ({1})'.format(version, self.codename()) return version def version_parts(self, best=False): @@ -812,10 +818,14 @@ class LinuxDistribution(object): For details, see :func:`distro.codename`. """ - return self.os_release_attr('codename') \ - or self.lsb_release_attr('codename') \ - or self.distro_release_attr('codename') \ - or '' + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info['codename'] + except KeyError: + return self.lsb_release_attr('codename') \ + or self.distro_release_attr('codename') \ + or '' def info(self, pretty=False, best=False): """ @@ -872,6 +882,7 @@ class LinuxDistribution(object): For details, see :func:`distro.uname_info`. """ + return self._uname_info def os_release_attr(self, attribute): """ @@ -960,26 +971,31 @@ class LinuxDistribution(object): # * commands or their arguments (not allowed in os-release) if '=' in token: k, v = token.split('=', 1) - if isinstance(v, bytes): - v = v.decode('utf-8') props[k.lower()] = v - if k == 'VERSION': - # this handles cases in which the codename is in - # the `(CODENAME)` (rhel, centos, fedora) format - # or in the `, CODENAME` format (Ubuntu). - codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) - if codename: - codename = codename.group() - codename = codename.strip('()') - codename = codename.strip(',') - codename = codename.strip() - # codename appears within paranthese. - props['codename'] = codename - else: - props['codename'] = '' else: # Ignore any tokens that are not variable assignments pass + + if 'version_codename' in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props['codename'] = props['version_codename'] + elif 'ubuntu_codename' in props: + # Same as above but a non-standard field name used on older Ubuntus + props['codename'] = props['ubuntu_codename'] + elif 'version' in props: + # If there is no version_codename, parse it from the version + codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version']) + if codename: + codename = codename.group() + codename = codename.strip('()') + codename = codename.strip(',') + codename = codename.strip() + # codename appears within paranthese. + props['codename'] = codename + return props @cached_property @@ -998,7 +1014,7 @@ class LinuxDistribution(object): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: # Command not found return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_lsb_release_content(content) @staticmethod @@ -1033,7 +1049,7 @@ class LinuxDistribution(object): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_uname_content(content) @staticmethod @@ -1053,6 +1069,20 @@ class LinuxDistribution(object): props['release'] = version return props + @staticmethod + def _to_str(text): + encoding = sys.getfilesystemencoding() + encoding = 'utf-8' if encoding == 'ascii' else encoding + + if sys.version_info[0] >= 3: + if isinstance(text, bytes): + return text.decode(encoding) + else: + if isinstance(text, unicode): # noqa + return text.encode(encoding) + + return text + @cached_property def _distro_release_info(self): """ @@ -1072,7 +1102,10 @@ class LinuxDistribution(object): # file), because we want to use what was specified as best as # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: + if 'name' in distro_info \ + and 'cloudlinux' in distro_info['name'].lower(): + distro_info['id'] = 'cloudlinux' + elif match: distro_info['id'] = match.group(1) return distro_info else: @@ -1113,6 +1146,8 @@ class LinuxDistribution(object): # The name is always present if the pattern matches self.distro_release_file = filepath distro_info['id'] = match.group(1) + if 'cloudlinux' in distro_info['name'].lower(): + distro_info['id'] = 'cloudlinux' return distro_info return {} @@ -1150,8 +1185,6 @@ class LinuxDistribution(object): Returns: A dictionary containing all information items. """ - if isinstance(line, bytes): - line = line.decode('utf-8') matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( line.strip()[::-1]) distro_info = {} diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/__init__.py index 04912349..d1d82f15 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/__init__.py @@ -32,4 +32,4 @@ __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", # this has to be at the top level, see how setup.py parses this #: Distribution version number. -__version__ = "1.0.1" +__version__ = "1.1" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py index 4c77717b..3ff803c1 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -136,6 +136,7 @@ def normaliseCharList(charList): i += j return rv + # We don't really support characters above the BMP :( max_unicode = int("FFFF", 16) @@ -254,7 +255,7 @@ class InfosetFilter(object): nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if m: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst @@ -262,7 +263,7 @@ class InfosetFilter(object): nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py index a65e55f6..e0bb3760 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -1,10 +1,11 @@ from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type, binary_type +from pip._vendor.six import text_type from pip._vendor.six.moves import http_client, urllib import codecs import re +from io import BytesIO, StringIO from pip._vendor import webencodings @@ -12,13 +13,6 @@ from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase from .constants import _ReparseException from . import _utils -from io import StringIO - -try: - from io import BytesIO -except ImportError: - BytesIO = StringIO - # Non-unicode versions of constants for use in the pre-parser spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) @@ -40,13 +34,13 @@ if _utils.supports_lone_surrogates: else: invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) -non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, - 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, - 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, - 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, - 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, - 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, - 0x10FFFE, 0x10FFFF]) +non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF} ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") @@ -367,7 +361,7 @@ class HTMLUnicodeInputStream(object): def unget(self, char): # Only one character is allowed to be ungotten at once - it must # be consumed again before any further call to unget - if char is not None: + if char is not EOF: if self.chunkOffset == 0: # unget is called quite rarely, so it's a good idea to do # more work here if it saves a bit of work in the frequently @@ -449,7 +443,7 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): try: stream.seek(stream.tell()) - except: # pylint:disable=bare-except + except Exception: stream = BufferedStream(stream) return stream @@ -461,7 +455,7 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): if charEncoding[0] is not None: return charEncoding - # If we've been overriden, we've been overriden + # If we've been overridden, we've been overridden charEncoding = lookupEncoding(self.override_encoding), "certain" if charEncoding[0] is not None: return charEncoding @@ -664,9 +658,7 @@ class EncodingBytes(bytes): """Look for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone""" - p = self.position - data = self[p:p + len(bytes)] - rv = data.startswith(bytes) + rv = self.startswith(bytes, self.position) if rv: self.position += len(bytes) return rv @@ -674,15 +666,11 @@ class EncodingBytes(bytes): def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" - newPosition = self[self.position:].find(bytes) - if newPosition > -1: - # XXX: This is ugly, but I can't see a nicer way to fix this. - if self._position == -1: - self._position = 0 - self._position += (newPosition + len(bytes) - 1) - return True - else: + try: + self._position = self.index(bytes, self.position) + len(bytes) - 1 + except ValueError: raise StopIteration + return True class EncodingParser(object): @@ -694,6 +682,9 @@ class EncodingParser(object): self.encoding = None def getEncoding(self): + if b"<meta" not in self.data: + return None + methodDispatch = ( (b"<!--", self.handleComment), (b"<meta", self.handleMeta), @@ -703,6 +694,10 @@ class EncodingParser(object): (b"<", self.handlePossibleStartTag)) for _ in self.data: keepParsing = True + try: + self.data.jumpTo(b"<") + except StopIteration: + break for key, method in methodDispatch: if self.data.matchBytes(key): try: @@ -908,7 +903,7 @@ class ContentAttrParser(object): def lookupEncoding(encoding): """Return the python codec name corresponding to an encoding or None if the string doesn't correspond to a valid encoding.""" - if isinstance(encoding, binary_type): + if isinstance(encoding, bytes): try: encoding = encoding.decode("ascii") except UnicodeDecodeError: diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py index 178f6e7f..5f00253e 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py @@ -2,7 +2,8 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import unichr as chr -from collections import deque +from collections import deque, OrderedDict +from sys import version_info from .constants import spaceCharacters from .constants import entities @@ -17,6 +18,11 @@ from ._trie import Trie entitiesTrie = Trie(entities) +if version_info >= (3, 7): + attributeMap = dict +else: + attributeMap = OrderedDict + class HTMLTokenizer(object): """ This class takes care of tokenizing HTML. @@ -228,6 +234,14 @@ class HTMLTokenizer(object): # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + data = attributeMap(raw) + if len(raw) > len(data): + # we had some duplicated attribute, fix so first wins + data.update(raw[::-1]) + token["data"] = data + if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py index a5ba4bf1..07bad5d3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py @@ -1,14 +1,5 @@ from __future__ import absolute_import, division, unicode_literals -from .py import Trie as PyTrie +from .py import Trie -Trie = PyTrie - -# pylint:disable=wrong-import-position -try: - from .datrie import Trie as DATrie -except ImportError: - pass -else: - Trie = DATrie -# pylint:enable=wrong-import-position +__all__ = ["Trie"] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py index a1158bbb..6b71975f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py @@ -1,6 +1,9 @@ from __future__ import absolute_import, division, unicode_literals -from collections import Mapping +try: + from collections.abc import Mapping +except ImportError: # Python 2.7 + from collections import Mapping class Trie(Mapping): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py deleted file mode 100644 index e2e5f866..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from datrie import Trie as DATrie -from pip._vendor.six import text_type - -from ._base import Trie as ABCTrie - - -class Trie(ABCTrie): - def __init__(self, data): - chars = set() - for key in data.keys(): - if not isinstance(key, text_type): - raise TypeError("All keys must be strings") - for char in key: - chars.add(char) - - self._data = DATrie("".join(chars)) - for key, value in data.items(): - self._data[key] = value - - def __contains__(self, key): - return key in self._data - - def __len__(self): - return len(self._data) - - def __iter__(self): - raise NotImplementedError() - - def __getitem__(self, key): - return self._data[key] - - def keys(self, prefix=None): - return self._data.keys(prefix) - - def has_keys_with_prefix(self, prefix): - return self._data.has_keys_with_prefix(prefix) - - def longest_prefix(self, prefix): - return self._data.longest_prefix(prefix) - - def longest_prefix_item(self, prefix): - return self._data.longest_prefix_item(prefix) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_utils.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_utils.py index 0703afb3..d7c4926a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_utils.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/_utils.py @@ -2,12 +2,20 @@ from __future__ import absolute_import, division, unicode_literals from types import ModuleType -from pip._vendor.six import text_type - try: - import xml.etree.cElementTree as default_etree + from collections.abc import Mapping except ImportError: + from collections import Mapping + +from pip._vendor.six import text_type, PY3 + +if PY3: import xml.etree.ElementTree as default_etree +else: + try: + import xml.etree.cElementTree as default_etree + except ImportError: + import xml.etree.ElementTree as default_etree __all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", @@ -27,7 +35,7 @@ try: # We need this with u"" because of http://bugs.jython.org/issue2039 _x = eval('u"\\uD800"') # pylint:disable=eval-used assert isinstance(_x, text_type) -except: # pylint:disable=bare-except +except Exception: supports_lone_surrogates = False else: supports_lone_surrogates = True @@ -47,9 +55,6 @@ class MethodDispatcher(dict): """ def __init__(self, items=()): - # Using _dictEntries instead of directly assigning to self is about - # twice as fast. Please do careful performance testing before changing - # anything here. _dictEntries = [] for name, value in items: if isinstance(name, (list, tuple, frozenset, set)): @@ -64,6 +69,36 @@ class MethodDispatcher(dict): def __getitem__(self, key): return dict.get(self, key, self.default) + def __get__(self, instance, owner=None): + return BoundMethodDispatcher(instance, self) + + +class BoundMethodDispatcher(Mapping): + """Wraps a MethodDispatcher, binding its return values to `instance`""" + def __init__(self, instance, dispatcher): + self.instance = instance + self.dispatcher = dispatcher + + def __getitem__(self, key): + # see https://docs.python.org/3/reference/datamodel.html#object.__get__ + # on a function, __get__ is used to bind a function to an instance as a bound method + return self.dispatcher[key].__get__(self.instance) + + def get(self, key, default): + if key in self.dispatcher: + return self[key] + else: + return default + + def __iter__(self): + return iter(self.dispatcher) + + def __len__(self): + return len(self.dispatcher) + + def __contains__(self, key): + return key in self.dispatcher + # Some utility functions to deal with weirdness around UCS2 vs UCS4 # python builds diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/constants.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/constants.py index 1ff80419..fe3e237c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/constants.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/constants.py @@ -519,8 +519,8 @@ adjustForeignAttributes = { "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) } -unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in - adjustForeignAttributes.items()]) +unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in + adjustForeignAttributes.items()} spaceCharacters = frozenset([ "\t", @@ -544,8 +544,7 @@ asciiLetters = frozenset(string.ascii_letters) digits = frozenset(string.digits) hexDigits = frozenset(string.hexdigits) -asciiUpper2Lower = dict([(ord(c), ord(c.lower())) - for c in string.ascii_uppercase]) +asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} # Heading elements need to be ordered headingElements = ( @@ -2934,7 +2933,7 @@ tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], tokenTypes["EmptyTag"]]) -prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes = {v: k for k, v in namespaces.items()} prefixes["http://www.w3.org/1998/Math/MathML"] = "math" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py index af8e77b8..aa7431d1 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -1,6 +1,15 @@ +"""Deprecated from html5lib 1.1. + +See `here <https://github.com/html5lib/html5lib-python/issues/443>`_ for +information about its deprecation; `Bleach <https://github.com/mozilla/bleach>`_ +is recommended as a replacement. Please let us know in the aforementioned issue +if Bleach is unsuitable for your needs. + +""" from __future__ import absolute_import, division, unicode_literals import re +import warnings from xml.sax.saxutils import escape, unescape from pip._vendor.six.moves import urllib_parse as urlparse @@ -11,6 +20,14 @@ from ..constants import namespaces, prefixes __all__ = ["Filter"] +_deprecation_msg = ( + "html5lib's sanitizer is deprecated; see " + + "https://github.com/html5lib/html5lib-python/issues/443 and please let " + + "us know if Bleach is unsuitable for your needs" +) + +warnings.warn(_deprecation_msg, DeprecationWarning) + allowed_elements = frozenset(( (namespaces['html'], 'a'), (namespaces['html'], 'abbr'), @@ -750,6 +767,9 @@ class Filter(base.Filter): """ super(Filter, self).__init__(source) + + warnings.warn(_deprecation_msg, DeprecationWarning) + self.allowed_elements = allowed_elements self.allowed_attributes = allowed_attributes self.allowed_css_properties = allowed_css_properties diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/html5parser.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/html5parser.py index ae41a133..d06784f3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/html5parser.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/html5parser.py @@ -2,7 +2,6 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import with_metaclass, viewkeys import types -from collections import OrderedDict from . import _inputstream from . import _tokenizer @@ -119,8 +118,8 @@ class HTMLParser(object): self.tree = tree(namespaceHTMLElements) self.errors = [] - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) + self.phases = {name: cls(self, self.tree) for name, cls in + getPhases(debug).items()} def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): @@ -202,7 +201,7 @@ class HTMLParser(object): DoctypeToken = tokenTypes["Doctype"] ParseErrorToken = tokenTypes["ParseError"] - for token in self.normalizedTokens(): + for token in self.tokenizer: prev_token = None new_token = token while new_token is not None: @@ -260,10 +259,6 @@ class HTMLParser(object): if reprocess: assert self.phase not in phases - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - def parse(self, stream, *args, **kwargs): """Parse a HTML document into a well-formed tree @@ -325,17 +320,6 @@ class HTMLParser(object): if self.strict: raise ParseError(E[errorcode] % datavars) - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - def adjustMathMLAttributes(self, token): adjust_attributes(token, adjustMathMLAttributes) @@ -413,16 +397,12 @@ class HTMLParser(object): def getPhases(debug): def log(function): """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) + type_names = {value: key for key, value in tokenTypes.items()} def wrapped(self, *args, **kwargs): if function.__name__.startswith("process") and len(args) > 0: token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise + info = {"type": type_names[token['type']]} if token['type'] in tagTokenTypes: info["name"] = token['name'] @@ -446,10 +426,13 @@ def getPhases(debug): class Phase(with_metaclass(getMetaclass(debug, log))): """Base class for helper object that implements each phase of processing """ + __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") def __init__(self, parser, tree): self.parser = parser self.tree = tree + self.__startTagCache = {} + self.__endTagCache = {} def processEOF(self): raise NotImplementedError @@ -469,7 +452,21 @@ def getPhases(debug): self.tree.insertText(token["data"]) def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__startTagCache: + func = self.__startTagCache[name] + else: + func = self.__startTagCache[name] = self.startTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__startTagCache.pop(next(iter(self.__startTagCache))) + return func(token) def startTagHtml(self, token): if not self.parser.firstStartTag and token["name"] == "html": @@ -482,9 +479,25 @@ def getPhases(debug): self.parser.firstStartTag = False def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__endTagCache: + func = self.__endTagCache[name] + else: + func = self.__endTagCache[name] = self.endTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__endTagCache.pop(next(iter(self.__endTagCache))) + return func(token) class InitialPhase(Phase): + __slots__ = tuple() + def processSpaceCharacters(self, token): pass @@ -613,6 +626,8 @@ def getPhases(debug): return True class BeforeHtmlPhase(Phase): + __slots__ = tuple() + # helper methods def insertHtmlElement(self): self.tree.insertRoot(impliedTagToken("html", "StartTag")) @@ -648,19 +663,7 @@ def getPhases(debug): return token class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.startTagHead(impliedTagToken("head", "StartTag")) @@ -693,28 +696,19 @@ def getPhases(debug): self.parser.parseError("end-tag-after-implied-root", {"name": token["name"]}) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), endTagImplyHead) + ]) + endTagHandler.default = endTagOther + class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # the real thing def processEOF(self): @@ -796,22 +790,27 @@ def getPhases(debug): def anythingElse(self): self.endTagHead(impliedTagToken("head")) - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("title", startTagTitle), + (("noframes", "style"), startTagNoFramesStyle), + ("noscript", startTagNoscript), + ("script", startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + startTagBaseLinkCommand), + ("meta", startTagMeta), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("head", endTagHead), + (("br", "html", "body"), endTagHtmlBodyBr) + ]) + endTagHandler.default = endTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther + class InHeadNoscriptPhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.parseError("eof-in-head-noscript") @@ -860,23 +859,21 @@ def getPhases(debug): # Caller must raise parse error first! self.endTagNoscript(impliedTagToken("noscript")) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), + (("head", "noscript"), startTagHeadNoscript), + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("noscript", endTagNoscript), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.anythingElse() @@ -927,80 +924,30 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] self.parser.framesetOK = True + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + startTagFromHead), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + endTagHtmlBodyBr)]) + endTagHandler.default = endTagOther + class InBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("processSpaceCharacters",) + def __init__(self, *args, **kwargs): + super(InBodyPhase, self).__init__(*args, **kwargs) # Set this to the default handler self.processSpaceCharacters = self.processSpaceCharactersNonPre - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - def isMatchingFormattingElement(self, node1, node2): return (node1.name == node2.name and node1.namespace == node2.namespace and @@ -1650,14 +1597,73 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) break + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + startTagProcessInHead), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + startTagCloseP), + (headingElements, startTagHeading), + (("pre", "listing"), startTagPreListing), + ("form", startTagForm), + (("li", "dd", "dt"), startTagListItem), + ("plaintext", startTagPlaintext), + ("a", startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), startTagFormatting), + ("nobr", startTagNobr), + ("button", startTagButton), + (("applet", "marquee", "object"), startTagAppletMarqueeObject), + ("xmp", startTagXmp), + ("table", startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + startTagVoidFormatting), + (("param", "source", "track"), startTagParamSource), + ("input", startTagInput), + ("hr", startTagHr), + ("image", startTagImage), + ("isindex", startTagIsIndex), + ("textarea", startTagTextarea), + ("iframe", startTagIFrame), + ("noscript", startTagNoscript), + (("noembed", "noframes"), startTagRawtext), + ("select", startTagSelect), + (("rp", "rt"), startTagRpRt), + (("option", "optgroup"), startTagOpt), + (("math"), startTagMath), + (("svg"), startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), startTagMisplaced) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("body", endTagBody), + ("html", endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), endTagBlock), + ("form", endTagForm), + ("p", endTagP), + (("dd", "dt", "li"), endTagListItem), + (headingElements, endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), endTagFormatting), + (("applet", "marquee", "object"), endTagAppletMarqueeObject), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class TextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("script", self.endTagScript)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processCharacters(self, token): self.tree.insertText(token["data"]) @@ -1683,30 +1689,15 @@ def getPhases(debug): self.tree.openElements.pop() self.parser.phase = self.parser.originalPhase + startTagHandler = _utils.MethodDispatcher([]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("script", endTagScript)]) + endTagHandler.default = endTagOther + class InTablePhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("caption", self.startTagCaption), - ("colgroup", self.startTagColgroup), - ("col", self.startTagCol), - (("tbody", "tfoot", "thead"), self.startTagRowGroup), - (("td", "th", "tr"), self.startTagImplyTbody), - ("table", self.startTagTable), - (("style", "script"), self.startTagStyleScript), - ("input", self.startTagInput), - ("form", self.startTagForm) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "tbody", "td", - "tfoot", "th", "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableContext(self): @@ -1828,9 +1819,32 @@ def getPhases(debug): self.parser.phases["inBody"].processEndTag(token) self.tree.insertFromTable = False + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("caption", startTagCaption), + ("colgroup", startTagColgroup), + ("col", startTagCol), + (("tbody", "tfoot", "thead"), startTagRowGroup), + (("td", "th", "tr"), startTagImplyTbody), + ("table", startTagTable), + (("style", "script"), startTagStyleScript), + ("input", startTagInput), + ("form", startTagForm) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "tbody", "td", + "tfoot", "th", "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InTableTextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("originalPhase", "characterTokens") + + def __init__(self, *args, **kwargs): + super(InTableTextPhase, self).__init__(*args, **kwargs) self.originalPhase = None self.characterTokens = [] @@ -1875,23 +1889,7 @@ def getPhases(debug): class InCaptionPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-caption - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableElement) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("caption", self.endTagCaption), - ("table", self.endTagTable), - (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagCaption(self): return not self.tree.elementInScope("caption", variant="table") @@ -1944,23 +1942,24 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableElement) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("caption", endTagCaption), + ("table", endTagTable), + (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", + "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InColumnGroupPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-column - - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("col", self.startTagCol) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("colgroup", self.endTagColgroup), - ("col", self.endTagCol) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagColgroup(self): return self.tree.openElements[-1].name == "html" @@ -2010,26 +2009,21 @@ def getPhases(debug): if not ignoreEndTag: return token + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("col", startTagCol) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("colgroup", endTagColgroup), + ("col", endTagCol) + ]) + endTagHandler.default = endTagOther + class InTableBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table0 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("tr", self.startTagTr), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), - self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "td", "th", - "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableBodyContext(self): @@ -2108,26 +2102,26 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("tr", startTagTr), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), + startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "td", "th", + "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InRowPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-row - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead", - "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("tr", self.endTagTr), - ("table", self.endTagTable), - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - (("body", "caption", "col", "colgroup", "html", "td", "th"), - self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods (XXX unify this with other table helper methods) def clearStackToTableRowContext(self): @@ -2197,23 +2191,26 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead", + "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("tr", endTagTr), + ("table", endTagTable), + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + (("body", "caption", "col", "colgroup", "html", "td", "th"), + endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InCellPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-cell - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("td", "th"), self.endTagTableCell), - (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore), - (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper def closeCell(self): @@ -2273,26 +2270,22 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("td", "th"), endTagTableCell), + (("body", "caption", "col", "colgroup", "html"), endTagIgnore), + (("table", "tbody", "tfoot", "thead", "tr"), endTagImply) + ]) + endTagHandler.default = endTagOther + class InSelectPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("option", self.startTagOption), - ("optgroup", self.startTagOptgroup), - ("select", self.startTagSelect), - (("input", "keygen", "textarea"), self.startTagInput), - ("script", self.startTagScript) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("option", self.endTagOption), - ("optgroup", self.endTagOptgroup), - ("select", self.endTagSelect) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # http://www.whatwg.org/specs/web-apps/current-work/#in-select def processEOF(self): @@ -2373,21 +2366,25 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-in-select", {"name": token["name"]}) - class InSelectInTablePhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.startTagTable) - ]) - self.startTagHandler.default = self.startTagOther + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("option", startTagOption), + ("optgroup", startTagOptgroup), + ("select", startTagSelect), + (("input", "keygen", "textarea"), startTagInput), + ("script", startTagScript) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("option", endTagOption), + ("optgroup", endTagOptgroup), + ("select", endTagSelect) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.endTagTable) - ]) - self.endTagHandler.default = self.endTagOther + class InSelectInTablePhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.phases["inSelect"].processEOF() @@ -2412,7 +2409,21 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inSelect"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + startTagTable) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + endTagTable) + ]) + endTagHandler.default = endTagOther + class InForeignContentPhase(Phase): + __slots__ = tuple() + breakoutElements = frozenset(["b", "big", "blockquote", "body", "br", "center", "code", "dd", "div", "dl", "dt", "em", "embed", "h1", "h2", "h3", @@ -2422,9 +2433,6 @@ def getPhases(debug): "span", "strong", "strike", "sub", "sup", "table", "tt", "u", "ul", "var"]) - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - def adjustSVGTagNames(self, token): replacements = {"altglyph": "altGlyph", "altglyphdef": "altGlyphDef", @@ -2478,7 +2486,7 @@ def getPhases(debug): currentNode = self.tree.openElements[-1] if (token["name"] in self.breakoutElements or (token["name"] == "font" and - set(token["data"].keys()) & set(["color", "face", "size"]))): + set(token["data"].keys()) & {"color", "face", "size"})): self.parser.parseError("unexpected-html-element-in-foreign-content", {"name": token["name"]}) while (self.tree.openElements[-1].namespace != @@ -2528,16 +2536,7 @@ def getPhases(debug): return new_token class AfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2574,23 +2573,17 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] return token - class InFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("frameset", self.startTagFrameset), - ("frame", self.startTagFrame), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([("html", endTagHtml)]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("frameset", self.endTagFrameset) - ]) - self.endTagHandler.default = self.endTagOther + class InFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset + __slots__ = tuple() def processEOF(self): if self.tree.openElements[-1].name != "html": @@ -2631,21 +2624,22 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-in-frameset", {"name": token["name"]}) - class AfterFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#after3 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("frameset", startTagFrameset), + ("frame", startTagFrame), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("frameset", endTagFrameset) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("html", self.endTagHtml) - ]) - self.endTagHandler.default = self.endTagOther + class AfterFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#after3 + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2668,14 +2662,19 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-after-frameset", {"name": token["name"]}) - class AfterAfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("html", endTagHtml) + ]) + endTagHandler.default = endTagOther + + class AfterAfterBodyPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2706,15 +2705,13 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] return token - class AfterAfterFramesetPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoFrames) - ]) - self.startTagHandler.default = self.startTagOther + class AfterAfterFramesetPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2741,6 +2738,13 @@ def getPhases(debug): def processEndTag(self, token): self.parser.parseError("expected-eof-but-got-end-tag", {"name": token["name"]}) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("noframes", startTagNoFrames) + ]) + startTagHandler.default = startTagOther + # pylint:enable=unused-argument return { @@ -2774,8 +2778,8 @@ def getPhases(debug): def adjust_attributes(token, replacements): needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) if needs_adjustment: - token['data'] = OrderedDict((replacements.get(k, k), v) - for k, v in token['data'].items()) + token['data'] = type(token['data'])((replacements.get(k, k), v) + for k, v in token['data'].items()) def impliedTagToken(name, type="EndTag", attributes=None, diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/serializer.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/serializer.py index 53f4d44c..d5669d8c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/serializer.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/serializer.py @@ -274,7 +274,7 @@ class HTMLSerializer(object): if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: - self.serializeError("System identifer contains both single and double quote characters") + self.serializeError("System identifier contains both single and double quote characters") quote_char = "'" else: quote_char = '"' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py index 73973db5..965fce29 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py @@ -10,9 +10,9 @@ Marker = None listElementsMap = { None: (frozenset(scopingElements), False), - "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), - "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), - (namespaces["html"], "ul")])), False), + "button": (frozenset(scopingElements | {(namespaces["html"], "button")}), False), + "list": (frozenset(scopingElements | {(namespaces["html"], "ol"), + (namespaces["html"], "ul")}), False), "table": (frozenset([(namespaces["html"], "html"), (namespaces["html"], "table")]), False), "select": (frozenset([(namespaces["html"], "optgroup"), @@ -28,7 +28,7 @@ class Node(object): :arg name: The tag name associated with the node """ - # The tag name assocaited with the node + # The tag name associated with the node self.name = name # The parent of the current node (or None for the document node) self.parent = None diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py index dcfac220..d8b53004 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py @@ -1,7 +1,10 @@ from __future__ import absolute_import, division, unicode_literals -from collections import MutableMapping +try: + from collections.abc import MutableMapping +except ImportError: # Python 2.7 + from collections import MutableMapping from xml.dom import minidom, Node import weakref diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py index 0dedf441..ea92dc30 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py @@ -5,6 +5,8 @@ from pip._vendor.six import text_type import re +from copy import copy + from . import base from .. import _ihatexml from .. import constants @@ -61,16 +63,17 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): return self._element.attrib def _setAttributes(self, attributes): - # Delete existing attributes first - # XXX - there may be a better way to do this... - for key in list(self._element.attrib.keys()): - del self._element.attrib[key] - for key, value in attributes.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], key[1]) - else: - name = key - self._element.set(name, value) + el_attrib = self._element.attrib + el_attrib.clear() + if attributes: + # calling .items _always_ allocates, and the above truthy check is cheaper than the + # allocation on average + for key, value in attributes.items(): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], key[1]) + else: + name = key + el_attrib[name] = value attributes = property(_getAttributes, _setAttributes) @@ -129,8 +132,8 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): def cloneNode(self): element = type(self)(self.name, self.namespace) - for name, value in self.attributes.items(): - element.attributes[name] = value + if self._element.attrib: + element._element.attrib = copy(self._element.attrib) return element def reparentChildren(self, newParent): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py index ca12a99c..f037759f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py @@ -16,6 +16,11 @@ import warnings import re import sys +try: + from collections.abc import MutableMapping +except ImportError: + from collections import MutableMapping + from . import base from ..constants import DataLossWarning from .. import constants @@ -23,6 +28,7 @@ from . import etree as etree_builders from .. import _ihatexml import lxml.etree as etree +from pip._vendor.six import PY3, binary_type fullTree = True @@ -44,7 +50,11 @@ class Document(object): self._childNodes = [] def appendChild(self, element): - self._elementTree.getroot().addnext(element._element) + last = self._elementTree.getroot() + for last in self._elementTree.getroot().itersiblings(): + pass + + last.addnext(element._element) def _getChildNodes(self): return self._childNodes @@ -185,26 +195,37 @@ class TreeBuilder(base.TreeBuilder): infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) self.namespaceHTMLElements = namespaceHTMLElements - class Attributes(dict): - def __init__(self, element, value=None): - if value is None: - value = {} + class Attributes(MutableMapping): + def __init__(self, element): self._element = element - dict.__init__(self, value) # pylint:disable=non-parent-init-called - for key, value in self.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) - else: - name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value - def __setitem__(self, key, value): - dict.__setitem__(self, key, value) + def _coerceKey(self, key): if isinstance(key, tuple): name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) else: name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value + return name + + def __getitem__(self, key): + value = self._element._element.attrib[self._coerceKey(key)] + if not PY3 and isinstance(value, binary_type): + value = value.decode("ascii") + return value + + def __setitem__(self, key, value): + self._element._element.attrib[self._coerceKey(key)] = value + + def __delitem__(self, key): + del self._element._element.attrib[self._coerceKey(key)] + + def __iter__(self): + return iter(self._element._element.attrib) + + def __len__(self): + return len(self._element._element.attrib) + + def clear(self): + return self._element._element.attrib.clear() class Element(builder.Element): def __init__(self, name, namespace): @@ -225,8 +246,10 @@ class TreeBuilder(base.TreeBuilder): def _getAttributes(self): return self._attributes - def _setAttributes(self, attributes): - self._attributes = Attributes(self, attributes) + def _setAttributes(self, value): + attributes = self.attributes + attributes.clear() + attributes.update(value) attributes = property(_getAttributes, _setAttributes) @@ -234,8 +257,11 @@ class TreeBuilder(base.TreeBuilder): data = infosetFilter.coerceCharacters(data) builder.Element.insertText(self, data, insertBefore) - def appendChild(self, child): - builder.Element.appendChild(self, child) + def cloneNode(self): + element = type(self)(self.name, self.namespace) + if self._element.attrib: + element._element.attrib.update(self._element.attrib) + return element class Comment(builder.Comment): def __init__(self, data): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py index 9bec2076..b2d3aac3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py @@ -2,10 +2,10 @@ tree, generating tokens identical to those produced by the tokenizer module. -To create a tree walker for a new type of tree, you need to do +To create a tree walker for a new type of tree, you need to implement a tree walker object (called TreeWalker by convention) that -implements a 'serialize' method taking a tree as sole argument and -returning an iterator generating tokens. +implements a 'serialize' method which takes a tree as sole argument and +returns an iterator which generates tokens. """ from __future__ import absolute_import, division, unicode_literals diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py index 95fc0c17..837b27ec 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py @@ -127,4 +127,5 @@ def getETreeBuilder(ElementTreeImplementation): return locals() + getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py index e81ddf33..c56af390 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py @@ -1,6 +1,8 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type +from collections import OrderedDict + from lxml import etree from ..treebuilders.etree import tag_regexp @@ -163,7 +165,7 @@ class TreeWalker(base.NonRecursiveTreeWalker): else: namespace = None tag = ensure_str(node.tag) - attrs = {} + attrs = OrderedDict() for name, value in list(node.attrib.items()): name = ensure_str(name) value = ensure_str(value) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/core.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/core.py index 104624ad..41ec5c71 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/core.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/core.py @@ -9,7 +9,7 @@ _virama_combining_class = 9 _alabel_prefix = b'xn--' _unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') -if sys.version_info[0] == 3: +if sys.version_info[0] >= 3: unicode = str unichr = chr @@ -300,6 +300,10 @@ def ulabel(label): label = label.lower() if label.startswith(_alabel_prefix): label = label[len(_alabel_prefix):] + if not label: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') else: check_label(label) return label.decode('ascii') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/idnadata.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/idnadata.py index a80c959d..a284e4c8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/idnadata.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "11.0.0" +__version__ = "13.0.0" scripts = { 'Greek': ( 0x37000000374, @@ -48,16 +48,18 @@ scripts = { 0x300700003008, 0x30210000302a, 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xf9000000fa6e, 0xfa700000fada, - 0x200000002a6d7, + 0x16ff000016ff2, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, + 0x300000003134b, ), 'Hebrew': ( 0x591000005c8, @@ -74,6 +76,7 @@ scripts = { 0x304100003097, 0x309d000030a0, 0x1b0010001b11f, + 0x1b1500001b153, 0x1f2000001f201, ), 'Katakana': ( @@ -85,6 +88,7 @@ scripts = { 0xff660000ff70, 0xff710000ff9e, 0x1b0000001b001, + 0x1b1640001b168, ), } joining_types = { @@ -387,9 +391,9 @@ joining_types = { 0x853: 68, 0x854: 82, 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, + 0x856: 82, + 0x857: 82, + 0x858: 82, 0x860: 68, 0x861: 85, 0x862: 68, @@ -430,6 +434,16 @@ joining_types = { 0x8bb: 68, 0x8bc: 68, 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -754,6 +768,34 @@ joining_types = { 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, 0x110bd: 85, 0x110cd: 85, 0x1e900: 68, @@ -824,6 +866,7 @@ joining_types = { 0x1e941: 68, 0x1e942: 68, 0x1e943: 68, + 0x1e94b: 84, } codepoint_classes = { 'PVALID': ( @@ -1126,7 +1169,7 @@ codepoint_classes = { 0x8400000085c, 0x8600000086b, 0x8a0000008b5, - 0x8b6000008be, + 0x8b6000008c8, 0x8d3000008e2, 0x8e300000958, 0x96000000964, @@ -1185,7 +1228,7 @@ codepoint_classes = { 0xb3c00000b45, 0xb4700000b49, 0xb4b00000b4e, - 0xb5600000b58, + 0xb5500000b58, 0xb5f00000b64, 0xb6600000b70, 0xb7100000b72, @@ -1230,8 +1273,7 @@ codepoint_classes = { 0xce000000ce4, 0xce600000cf0, 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, + 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, 0xd4600000d49, @@ -1240,7 +1282,7 @@ codepoint_classes = { 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, - 0xd8200000d84, + 0xd8100000d84, 0xd8500000d97, 0xd9a00000db2, 0xdb300000dbc, @@ -1258,18 +1300,11 @@ codepoint_classes = { 0xe5000000e5a, 0xe8100000e83, 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, + 0xe8600000e8b, + 0xe8c00000ea4, 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, + 0xea700000eb3, + 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, 0xec800000ece, @@ -1362,6 +1397,7 @@ codepoint_classes = { 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, + 0x1abf00001ac1, 0x1b0000001b4c, 0x1b5000001b5a, 0x1b6b00001b74, @@ -1370,7 +1406,7 @@ codepoint_classes = { 0x1c4000001c4a, 0x1c4d00001c7e, 0x1cd000001cd3, - 0x1cd400001cfa, + 0x1cd400001cfb, 0x1d0000001d2c, 0x1d2f00001d30, 0x1d3b00001d3c, @@ -1613,10 +1649,10 @@ codepoint_classes = { 0x30a1000030fb, 0x30fc000030ff, 0x310500003130, - 0x31a0000031bb, + 0x31a0000031c0, 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1727,8 +1763,15 @@ codepoint_classes = { 0xa7b50000a7b6, 0xa7b70000a7b8, 0xa7b90000a7ba, - 0xa7f70000a7f8, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7f60000a7f8, 0xa7fa0000a828, + 0xa82c0000a82d, 0xa8400000a874, 0xa8800000a8c6, 0xa8d00000a8da, @@ -1753,7 +1796,7 @@ codepoint_classes = { 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab66, + 0xab600000ab6a, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1827,9 +1870,14 @@ codepoint_classes = { 0x10cc000010cf3, 0x10d0000010d28, 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, 0x10f0000010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10fb000010fc5, + 0x10fe000010ff7, 0x1100000011047, 0x1106600011070, 0x1107f000110bb, @@ -1837,12 +1885,12 @@ codepoint_classes = { 0x110f0000110fa, 0x1110000011135, 0x1113600011140, - 0x1114400011147, + 0x1114400011148, 0x1115000011174, 0x1117600011177, 0x11180000111c5, 0x111c9000111cd, - 0x111d0000111db, + 0x111ce000111db, 0x111dc000111dd, 0x1120000011212, 0x1121300011238, @@ -1871,7 +1919,7 @@ codepoint_classes = { 0x1137000011375, 0x114000001144b, 0x114500001145a, - 0x1145e0001145f, + 0x1145e00011462, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1881,18 +1929,28 @@ codepoint_classes = { 0x1160000011641, 0x1164400011645, 0x116500001165a, - 0x11680000116b8, + 0x11680000116b9, 0x116c0000116ca, 0x117000001171b, 0x1171d0001172c, 0x117300001173a, 0x118000001183b, 0x118c0000118ea, - 0x118ff00011900, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, 0x11a0000011a3f, 0x11a4700011a48, - 0x11a5000011a84, - 0x11a8600011a9a, + 0x11a5000011a9a, 0x11a9d00011a9e, 0x11ac000011af9, 0x11c0000011c09, @@ -1916,6 +1974,7 @@ codepoint_classes = { 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, 0x130000001342f, @@ -1931,13 +1990,18 @@ codepoint_classes = { 0x16b6300016b78, 0x16b7d00016b90, 0x16e6000016e80, - 0x16f0000016f45, - 0x16f5000016f7f, + 0x16f0000016f4b, + 0x16f4f00016f88, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x17000000187f2, - 0x1880000018af3, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, 0x1b0000001b11f, + 0x1b1500001b153, + 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, 0x1bc700001bc7d, @@ -1955,15 +2019,22 @@ codepoint_classes = { 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2c00001e2fa, 0x1e8000001e8c5, 0x1e8d00001e8d7, - 0x1e9220001e94b, + 0x1e9220001e94c, 0x1e9500001e95a, - 0x200000002a6d7, + 0x1fbf00001fbfa, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x300000003134b, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/package_data.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/package_data.py index 257e8989..ce1c521d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/package_data.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.8' +__version__ = '2.10' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/uts46data.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/uts46data.py index a68ed4c0..3766dd49 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/uts46data.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/idna/uts46data.py @@ -4,7 +4,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = "11.0.0" +__version__ = "13.0.0" def _seg_0(): return [ (0x0, '3'), @@ -1074,7 +1074,7 @@ def _seg_10(): (0x8A0, 'V'), (0x8B5, 'X'), (0x8B6, 'V'), - (0x8BE, 'X'), + (0x8C8, 'X'), (0x8D3, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), @@ -1205,7 +1205,7 @@ def _seg_11(): (0xB49, 'X'), (0xB4B, 'V'), (0xB4E, 'X'), - (0xB56, 'V'), + (0xB55, 'V'), (0xB58, 'X'), (0xB5C, 'M', u'ଡ଼'), (0xB5D, 'M', u'ଢ଼'), @@ -1272,7 +1272,7 @@ def _seg_12(): (0xC64, 'X'), (0xC66, 'V'), (0xC70, 'X'), - (0xC78, 'V'), + (0xC77, 'V'), (0xC8D, 'X'), (0xC8E, 'V'), (0xC91, 'X'), @@ -1299,8 +1299,6 @@ def _seg_12(): (0xCF1, 'V'), (0xCF3, 'X'), (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), (0xD11, 'X'), @@ -1314,7 +1312,7 @@ def _seg_12(): (0xD64, 'X'), (0xD66, 'V'), (0xD80, 'X'), - (0xD82, 'V'), + (0xD81, 'V'), (0xD84, 'X'), (0xD85, 'V'), (0xD97, 'X'), @@ -1348,33 +1346,19 @@ def _seg_12(): (0xE83, 'X'), (0xE84, 'V'), (0xE85, 'X'), - (0xE87, 'V'), - (0xE89, 'X'), - (0xE8A, 'V'), + (0xE86, 'V'), (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - ] - -def _seg_13(): - return [ - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), + (0xE8C, 'V'), (0xEA4, 'X'), (0xEA5, 'V'), (0xEA6, 'X'), (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), (0xEB3, 'M', u'ໍາ'), (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), + ] + +def _seg_13(): + return [ (0xEBE, 'X'), (0xEC0, 'V'), (0xEC5, 'X'), @@ -1459,10 +1443,6 @@ def _seg_13(): (0x1260, 'V'), (0x1289, 'X'), (0x128A, 'V'), - ] - -def _seg_14(): - return [ (0x128E, 'X'), (0x1290, 'V'), (0x12B1, 'X'), @@ -1479,6 +1459,10 @@ def _seg_14(): (0x12D8, 'V'), (0x1311, 'X'), (0x1312, 'V'), + ] + +def _seg_14(): + return [ (0x1316, 'X'), (0x1318, 'V'), (0x135B, 'X'), @@ -1563,15 +1547,11 @@ def _seg_14(): (0x1A7F, 'V'), (0x1A8A, 'X'), (0x1A90, 'V'), - ] - -def _seg_15(): - return [ (0x1A9A, 'X'), (0x1AA0, 'V'), (0x1AAE, 'X'), (0x1AB0, 'V'), - (0x1ABF, 'X'), + (0x1AC1, 'X'), (0x1B00, 'V'), (0x1B4C, 'X'), (0x1B50, 'V'), @@ -1583,6 +1563,10 @@ def _seg_15(): (0x1C3B, 'V'), (0x1C4A, 'X'), (0x1C4D, 'V'), + ] + +def _seg_15(): + return [ (0x1C80, 'M', u'в'), (0x1C81, 'M', u'д'), (0x1C82, 'M', u'о'), @@ -1592,10 +1576,57 @@ def _seg_15(): (0x1C87, 'M', u'ѣ'), (0x1C88, 'M', u'ꙋ'), (0x1C89, 'X'), + (0x1C90, 'M', u'ა'), + (0x1C91, 'M', u'ბ'), + (0x1C92, 'M', u'გ'), + (0x1C93, 'M', u'დ'), + (0x1C94, 'M', u'ე'), + (0x1C95, 'M', u'ვ'), + (0x1C96, 'M', u'ზ'), + (0x1C97, 'M', u'თ'), + (0x1C98, 'M', u'ი'), + (0x1C99, 'M', u'კ'), + (0x1C9A, 'M', u'ლ'), + (0x1C9B, 'M', u'მ'), + (0x1C9C, 'M', u'ნ'), + (0x1C9D, 'M', u'ო'), + (0x1C9E, 'M', u'პ'), + (0x1C9F, 'M', u'ჟ'), + (0x1CA0, 'M', u'რ'), + (0x1CA1, 'M', u'ს'), + (0x1CA2, 'M', u'ტ'), + (0x1CA3, 'M', u'უ'), + (0x1CA4, 'M', u'ფ'), + (0x1CA5, 'M', u'ქ'), + (0x1CA6, 'M', u'ღ'), + (0x1CA7, 'M', u'ყ'), + (0x1CA8, 'M', u'შ'), + (0x1CA9, 'M', u'ჩ'), + (0x1CAA, 'M', u'ც'), + (0x1CAB, 'M', u'ძ'), + (0x1CAC, 'M', u'წ'), + (0x1CAD, 'M', u'ჭ'), + (0x1CAE, 'M', u'ხ'), + (0x1CAF, 'M', u'ჯ'), + (0x1CB0, 'M', u'ჰ'), + (0x1CB1, 'M', u'ჱ'), + (0x1CB2, 'M', u'ჲ'), + (0x1CB3, 'M', u'ჳ'), + (0x1CB4, 'M', u'ჴ'), + (0x1CB5, 'M', u'ჵ'), + (0x1CB6, 'M', u'ჶ'), + (0x1CB7, 'M', u'ჷ'), + (0x1CB8, 'M', u'ჸ'), + (0x1CB9, 'M', u'ჹ'), + (0x1CBA, 'M', u'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', u'ჽ'), + (0x1CBE, 'M', u'ჾ'), + (0x1CBF, 'M', u'ჿ'), (0x1CC0, 'V'), (0x1CC8, 'X'), (0x1CD0, 'V'), - (0x1CFA, 'X'), + (0x1CFB, 'X'), (0x1D00, 'V'), (0x1D2C, 'M', u'a'), (0x1D2D, 'M', u'æ'), @@ -1636,6 +1667,10 @@ def _seg_15(): (0x1D50, 'M', u'm'), (0x1D51, 'M', u'ŋ'), (0x1D52, 'M', u'o'), + ] + +def _seg_16(): + return [ (0x1D53, 'M', u'ɔ'), (0x1D54, 'M', u'ᴖ'), (0x1D55, 'M', u'ᴗ'), @@ -1667,10 +1702,6 @@ def _seg_15(): (0x1D9C, 'M', u'c'), (0x1D9D, 'M', u'ɕ'), (0x1D9E, 'M', u'ð'), - ] - -def _seg_16(): - return [ (0x1D9F, 'M', u'ɜ'), (0x1DA0, 'M', u'f'), (0x1DA1, 'M', u'ɟ'), @@ -1740,6 +1771,10 @@ def _seg_16(): (0x1E1E, 'M', u'ḟ'), (0x1E1F, 'V'), (0x1E20, 'M', u'ḡ'), + ] + +def _seg_17(): + return [ (0x1E21, 'V'), (0x1E22, 'M', u'ḣ'), (0x1E23, 'V'), @@ -1771,10 +1806,6 @@ def _seg_16(): (0x1E3D, 'V'), (0x1E3E, 'M', u'ḿ'), (0x1E3F, 'V'), - ] - -def _seg_17(): - return [ (0x1E40, 'M', u'ṁ'), (0x1E41, 'V'), (0x1E42, 'M', u'ṃ'), @@ -1844,6 +1875,10 @@ def _seg_17(): (0x1E82, 'M', u'ẃ'), (0x1E83, 'V'), (0x1E84, 'M', u'ẅ'), + ] + +def _seg_18(): + return [ (0x1E85, 'V'), (0x1E86, 'M', u'ẇ'), (0x1E87, 'V'), @@ -1875,10 +1910,6 @@ def _seg_17(): (0x1EA6, 'M', u'ầ'), (0x1EA7, 'V'), (0x1EA8, 'M', u'ẩ'), - ] - -def _seg_18(): - return [ (0x1EA9, 'V'), (0x1EAA, 'M', u'ẫ'), (0x1EAB, 'V'), @@ -1948,6 +1979,10 @@ def _seg_18(): (0x1EEB, 'V'), (0x1EEC, 'M', u'ử'), (0x1EED, 'V'), + ] + +def _seg_19(): + return [ (0x1EEE, 'M', u'ữ'), (0x1EEF, 'V'), (0x1EF0, 'M', u'ự'), @@ -1979,10 +2014,6 @@ def _seg_18(): (0x1F18, 'M', u'ἐ'), (0x1F19, 'M', u'ἑ'), (0x1F1A, 'M', u'ἒ'), - ] - -def _seg_19(): - return [ (0x1F1B, 'M', u'ἓ'), (0x1F1C, 'M', u'ἔ'), (0x1F1D, 'M', u'ἕ'), @@ -2052,6 +2083,10 @@ def _seg_19(): (0x1F82, 'M', u'ἂι'), (0x1F83, 'M', u'ἃι'), (0x1F84, 'M', u'ἄι'), + ] + +def _seg_20(): + return [ (0x1F85, 'M', u'ἅι'), (0x1F86, 'M', u'ἆι'), (0x1F87, 'M', u'ἇι'), @@ -2083,10 +2118,6 @@ def _seg_19(): (0x1FA1, 'M', u'ὡι'), (0x1FA2, 'M', u'ὢι'), (0x1FA3, 'M', u'ὣι'), - ] - -def _seg_20(): - return [ (0x1FA4, 'M', u'ὤι'), (0x1FA5, 'M', u'ὥι'), (0x1FA6, 'M', u'ὦι'), @@ -2156,6 +2187,10 @@ def _seg_20(): (0x1FF0, 'X'), (0x1FF2, 'M', u'ὼι'), (0x1FF3, 'M', u'ωι'), + ] + +def _seg_21(): + return [ (0x1FF4, 'M', u'ώι'), (0x1FF5, 'X'), (0x1FF6, 'V'), @@ -2187,10 +2222,6 @@ def _seg_20(): (0x2035, 'V'), (0x2036, 'M', u'‵‵'), (0x2037, 'M', u'‵‵‵'), - ] - -def _seg_21(): - return [ (0x2038, 'V'), (0x203C, '3', u'!!'), (0x203D, 'V'), @@ -2260,6 +2291,10 @@ def _seg_21(): (0x20F1, 'X'), (0x2100, '3', u'a/c'), (0x2101, '3', u'a/s'), + ] + +def _seg_22(): + return [ (0x2102, 'M', u'c'), (0x2103, 'M', u'°c'), (0x2104, 'V'), @@ -2291,10 +2326,6 @@ def _seg_21(): (0x2127, 'V'), (0x2128, 'M', u'z'), (0x2129, 'V'), - ] - -def _seg_22(): - return [ (0x212A, 'M', u'k'), (0x212B, 'M', u'å'), (0x212C, 'M', u'b'), @@ -2364,6 +2395,10 @@ def _seg_22(): (0x2177, 'M', u'viii'), (0x2178, 'M', u'ix'), (0x2179, 'M', u'x'), + ] + +def _seg_23(): + return [ (0x217A, 'M', u'xi'), (0x217B, 'M', u'xii'), (0x217C, 'M', u'l'), @@ -2395,10 +2430,6 @@ def _seg_22(): (0x244B, 'X'), (0x2460, 'M', u'1'), (0x2461, 'M', u'2'), - ] - -def _seg_23(): - return [ (0x2462, 'M', u'3'), (0x2463, 'M', u'4'), (0x2464, 'M', u'5'), @@ -2468,6 +2499,10 @@ def _seg_23(): (0x24B7, 'M', u'b'), (0x24B8, 'M', u'c'), (0x24B9, 'M', u'd'), + ] + +def _seg_24(): + return [ (0x24BA, 'M', u'e'), (0x24BB, 'M', u'f'), (0x24BC, 'M', u'g'), @@ -2499,10 +2534,6 @@ def _seg_23(): (0x24D6, 'M', u'g'), (0x24D7, 'M', u'h'), (0x24D8, 'M', u'i'), - ] - -def _seg_24(): - return [ (0x24D9, 'M', u'j'), (0x24DA, 'M', u'k'), (0x24DB, 'M', u'l'), @@ -2533,10 +2564,7 @@ def _seg_24(): (0x2B74, 'X'), (0x2B76, 'V'), (0x2B96, 'X'), - (0x2B98, 'V'), - (0x2BC9, 'X'), - (0x2BCA, 'V'), - (0x2BFF, 'X'), + (0x2B97, 'V'), (0x2C00, 'M', u'ⰰ'), (0x2C01, 'M', u'ⰱ'), (0x2C02, 'M', u'ⰲ'), @@ -2575,6 +2603,10 @@ def _seg_24(): (0x2C23, 'M', u'ⱓ'), (0x2C24, 'M', u'ⱔ'), (0x2C25, 'M', u'ⱕ'), + ] + +def _seg_25(): + return [ (0x2C26, 'M', u'ⱖ'), (0x2C27, 'M', u'ⱗ'), (0x2C28, 'M', u'ⱘ'), @@ -2603,10 +2635,6 @@ def _seg_24(): (0x2C6E, 'M', u'ɱ'), (0x2C6F, 'M', u'ɐ'), (0x2C70, 'M', u'ɒ'), - ] - -def _seg_25(): - return [ (0x2C71, 'V'), (0x2C72, 'M', u'ⱳ'), (0x2C73, 'V'), @@ -2679,6 +2707,10 @@ def _seg_25(): (0x2CBC, 'M', u'ⲽ'), (0x2CBD, 'V'), (0x2CBE, 'M', u'ⲿ'), + ] + +def _seg_26(): + return [ (0x2CBF, 'V'), (0x2CC0, 'M', u'ⳁ'), (0x2CC1, 'V'), @@ -2707,10 +2739,6 @@ def _seg_25(): (0x2CD8, 'M', u'ⳙ'), (0x2CD9, 'V'), (0x2CDA, 'M', u'ⳛ'), - ] - -def _seg_26(): - return [ (0x2CDB, 'V'), (0x2CDC, 'M', u'ⳝ'), (0x2CDD, 'V'), @@ -2757,7 +2785,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E4F, 'X'), + (0x2E53, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2783,6 +2811,10 @@ def _seg_26(): (0x2F0F, 'M', u'几'), (0x2F10, 'M', u'凵'), (0x2F11, 'M', u'刀'), + ] + +def _seg_27(): + return [ (0x2F12, 'M', u'力'), (0x2F13, 'M', u'勹'), (0x2F14, 'M', u'匕'), @@ -2811,10 +2843,6 @@ def _seg_26(): (0x2F2B, 'M', u'尸'), (0x2F2C, 'M', u'屮'), (0x2F2D, 'M', u'山'), - ] - -def _seg_27(): - return [ (0x2F2E, 'M', u'巛'), (0x2F2F, 'M', u'工'), (0x2F30, 'M', u'己'), @@ -2887,6 +2915,10 @@ def _seg_27(): (0x2F73, 'M', u'穴'), (0x2F74, 'M', u'立'), (0x2F75, 'M', u'竹'), + ] + +def _seg_28(): + return [ (0x2F76, 'M', u'米'), (0x2F77, 'M', u'糸'), (0x2F78, 'M', u'缶'), @@ -2915,10 +2947,6 @@ def _seg_27(): (0x2F8F, 'M', u'行'), (0x2F90, 'M', u'衣'), (0x2F91, 'M', u'襾'), - ] - -def _seg_28(): - return [ (0x2F92, 'M', u'見'), (0x2F93, 'M', u'角'), (0x2F94, 'M', u'言'), @@ -2991,6 +3019,10 @@ def _seg_28(): (0x3000, '3', u' '), (0x3001, 'V'), (0x3002, 'M', u'.'), + ] + +def _seg_29(): + return [ (0x3003, 'V'), (0x3036, 'M', u'〒'), (0x3037, 'V'), @@ -3019,10 +3051,6 @@ def _seg_28(): (0x3136, 'M', u'ᆭ'), (0x3137, 'M', u'ᄃ'), (0x3138, 'M', u'ᄄ'), - ] - -def _seg_29(): - return [ (0x3139, 'M', u'ᄅ'), (0x313A, 'M', u'ᆰ'), (0x313B, 'M', u'ᆱ'), @@ -3095,6 +3123,10 @@ def _seg_29(): (0x317E, 'M', u'ᄶ'), (0x317F, 'M', u'ᅀ'), (0x3180, 'M', u'ᅇ'), + ] + +def _seg_30(): + return [ (0x3181, 'M', u'ᅌ'), (0x3182, 'M', u'ᇱ'), (0x3183, 'M', u'ᇲ'), @@ -3123,15 +3155,9 @@ def _seg_29(): (0x319B, 'M', u'丙'), (0x319C, 'M', u'丁'), (0x319D, 'M', u'天'), - ] - -def _seg_30(): - return [ (0x319E, 'M', u'地'), (0x319F, 'M', u'人'), (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), (0x31E4, 'X'), (0x31F0, 'V'), (0x3200, '3', u'(ᄀ)'), @@ -3201,6 +3227,10 @@ def _seg_30(): (0x3240, '3', u'(祭)'), (0x3241, '3', u'(休)'), (0x3242, '3', u'(自)'), + ] + +def _seg_31(): + return [ (0x3243, '3', u'(至)'), (0x3244, 'M', u'問'), (0x3245, 'M', u'幼'), @@ -3227,10 +3257,6 @@ def _seg_30(): (0x3261, 'M', u'ᄂ'), (0x3262, 'M', u'ᄃ'), (0x3263, 'M', u'ᄅ'), - ] - -def _seg_31(): - return [ (0x3264, 'M', u'ᄆ'), (0x3265, 'M', u'ᄇ'), (0x3266, 'M', u'ᄉ'), @@ -3305,6 +3331,10 @@ def _seg_31(): (0x32AB, 'M', u'学'), (0x32AC, 'M', u'監'), (0x32AD, 'M', u'企'), + ] + +def _seg_32(): + return [ (0x32AE, 'M', u'資'), (0x32AF, 'M', u'協'), (0x32B0, 'M', u'夜'), @@ -3331,10 +3361,6 @@ def _seg_31(): (0x32C5, 'M', u'6月'), (0x32C6, 'M', u'7月'), (0x32C7, 'M', u'8月'), - ] - -def _seg_32(): - return [ (0x32C8, 'M', u'9月'), (0x32C9, 'M', u'10月'), (0x32CA, 'M', u'11月'), @@ -3390,7 +3416,7 @@ def _seg_32(): (0x32FC, 'M', u'ヰ'), (0x32FD, 'M', u'ヱ'), (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), + (0x32FF, 'M', u'令和'), (0x3300, 'M', u'アパート'), (0x3301, 'M', u'アルファ'), (0x3302, 'M', u'アンペア'), @@ -3409,6 +3435,10 @@ def _seg_32(): (0x330F, 'M', u'ガンマ'), (0x3310, 'M', u'ギガ'), (0x3311, 'M', u'ギニー'), + ] + +def _seg_33(): + return [ (0x3312, 'M', u'キュリー'), (0x3313, 'M', u'ギルダー'), (0x3314, 'M', u'キロ'), @@ -3435,10 +3465,6 @@ def _seg_32(): (0x3329, 'M', u'ノット'), (0x332A, 'M', u'ハイツ'), (0x332B, 'M', u'パーセント'), - ] - -def _seg_33(): - return [ (0x332C, 'M', u'パーツ'), (0x332D, 'M', u'バーレル'), (0x332E, 'M', u'ピアストル'), @@ -3513,6 +3539,10 @@ def _seg_33(): (0x3373, 'M', u'au'), (0x3374, 'M', u'bar'), (0x3375, 'M', u'ov'), + ] + +def _seg_34(): + return [ (0x3376, 'M', u'pc'), (0x3377, 'M', u'dm'), (0x3378, 'M', u'dm2'), @@ -3539,10 +3569,6 @@ def _seg_33(): (0x338D, 'M', u'μg'), (0x338E, 'M', u'mg'), (0x338F, 'M', u'kg'), - ] - -def _seg_34(): - return [ (0x3390, 'M', u'hz'), (0x3391, 'M', u'khz'), (0x3392, 'M', u'mhz'), @@ -3617,6 +3643,10 @@ def _seg_34(): (0x33D7, 'M', u'ph'), (0x33D8, 'X'), (0x33D9, 'M', u'ppm'), + ] + +def _seg_35(): + return [ (0x33DA, 'M', u'pr'), (0x33DB, 'M', u'sr'), (0x33DC, 'M', u'sv'), @@ -3643,10 +3673,6 @@ def _seg_34(): (0x33F1, 'M', u'18日'), (0x33F2, 'M', u'19日'), (0x33F3, 'M', u'20日'), - ] - -def _seg_35(): - return [ (0x33F4, 'M', u'21日'), (0x33F5, 'M', u'22日'), (0x33F6, 'M', u'23日'), @@ -3660,9 +3686,7 @@ def _seg_35(): (0x33FE, 'M', u'31日'), (0x33FF, 'M', u'gal'), (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), + (0x9FFD, 'X'), (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), @@ -3723,6 +3747,10 @@ def _seg_35(): (0xA685, 'V'), (0xA686, 'M', u'ꚇ'), (0xA687, 'V'), + ] + +def _seg_36(): + return [ (0xA688, 'M', u'ꚉ'), (0xA689, 'V'), (0xA68A, 'M', u'ꚋ'), @@ -3747,10 +3775,6 @@ def _seg_35(): (0xA69D, 'M', u'ь'), (0xA69E, 'V'), (0xA6F8, 'X'), - ] - -def _seg_36(): - return [ (0xA700, 'V'), (0xA722, 'M', u'ꜣ'), (0xA723, 'V'), @@ -3827,6 +3851,10 @@ def _seg_36(): (0xA76C, 'M', u'ꝭ'), (0xA76D, 'V'), (0xA76E, 'M', u'ꝯ'), + ] + +def _seg_37(): + return [ (0xA76F, 'V'), (0xA770, 'M', u'ꝯ'), (0xA771, 'V'), @@ -3851,10 +3879,6 @@ def _seg_36(): (0xA78E, 'V'), (0xA790, 'M', u'ꞑ'), (0xA791, 'V'), - ] - -def _seg_37(): - return [ (0xA792, 'M', u'ꞓ'), (0xA793, 'V'), (0xA796, 'M', u'ꞗ'), @@ -3891,14 +3915,31 @@ def _seg_37(): (0xA7B5, 'V'), (0xA7B6, 'M', u'ꞷ'), (0xA7B7, 'V'), - (0xA7B8, 'X'), + (0xA7B8, 'M', u'ꞹ'), (0xA7B9, 'V'), - (0xA7BA, 'X'), - (0xA7F7, 'V'), + (0xA7BA, 'M', u'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', u'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', u'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'X'), + (0xA7C2, 'M', u'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', u'ꞔ'), + (0xA7C5, 'M', u'ʂ'), + (0xA7C6, 'M', u'ᶎ'), + (0xA7C7, 'M', u'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', u'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7F5, 'M', u'ꟶ'), + (0xA7F6, 'V'), (0xA7F8, 'M', u'ħ'), (0xA7F9, 'M', u'œ'), (0xA7FA, 'V'), - (0xA82C, 'X'), + (0xA82D, 'X'), (0xA830, 'V'), (0xA83A, 'X'), (0xA840, 'V'), @@ -3914,6 +3955,10 @@ def _seg_37(): (0xA980, 'V'), (0xA9CE, 'X'), (0xA9CF, 'V'), + ] + +def _seg_38(): + return [ (0xA9DA, 'X'), (0xA9DE, 'V'), (0xA9FF, 'X'), @@ -3943,7 +3988,9 @@ def _seg_37(): (0xAB5E, 'M', u'ɫ'), (0xAB5F, 'M', u'ꭒ'), (0xAB60, 'V'), - (0xAB66, 'X'), + (0xAB69, 'M', u'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), (0xAB70, 'M', u'Ꭰ'), (0xAB71, 'M', u'Ꭱ'), (0xAB72, 'M', u'Ꭲ'), @@ -3955,10 +4002,6 @@ def _seg_37(): (0xAB78, 'M', u'Ꭸ'), (0xAB79, 'M', u'Ꭹ'), (0xAB7A, 'M', u'Ꭺ'), - ] - -def _seg_38(): - return [ (0xAB7B, 'M', u'Ꭻ'), (0xAB7C, 'M', u'Ꭼ'), (0xAB7D, 'M', u'Ꭽ'), @@ -4016,6 +4059,10 @@ def _seg_38(): (0xABB1, 'M', u'Ꮱ'), (0xABB2, 'M', u'Ꮲ'), (0xABB3, 'M', u'Ꮳ'), + ] + +def _seg_39(): + return [ (0xABB4, 'M', u'Ꮴ'), (0xABB5, 'M', u'Ꮵ'), (0xABB6, 'M', u'Ꮶ'), @@ -4059,10 +4106,6 @@ def _seg_38(): (0xF913, 'M', u'邏'), (0xF914, 'M', u'樂'), (0xF915, 'M', u'洛'), - ] - -def _seg_39(): - return [ (0xF916, 'M', u'烙'), (0xF917, 'M', u'珞'), (0xF918, 'M', u'落'), @@ -4120,6 +4163,10 @@ def _seg_39(): (0xF94C, 'M', u'樓'), (0xF94D, 'M', u'淚'), (0xF94E, 'M', u'漏'), + ] + +def _seg_40(): + return [ (0xF94F, 'M', u'累'), (0xF950, 'M', u'縷'), (0xF951, 'M', u'陋'), @@ -4163,10 +4210,6 @@ def _seg_39(): (0xF977, 'M', u'亮'), (0xF978, 'M', u'兩'), (0xF979, 'M', u'凉'), - ] - -def _seg_40(): - return [ (0xF97A, 'M', u'梁'), (0xF97B, 'M', u'糧'), (0xF97C, 'M', u'良'), @@ -4224,6 +4267,10 @@ def _seg_40(): (0xF9B0, 'M', u'聆'), (0xF9B1, 'M', u'鈴'), (0xF9B2, 'M', u'零'), + ] + +def _seg_41(): + return [ (0xF9B3, 'M', u'靈'), (0xF9B4, 'M', u'領'), (0xF9B5, 'M', u'例'), @@ -4267,10 +4314,6 @@ def _seg_40(): (0xF9DB, 'M', u'率'), (0xF9DC, 'M', u'隆'), (0xF9DD, 'M', u'利'), - ] - -def _seg_41(): - return [ (0xF9DE, 'M', u'吏'), (0xF9DF, 'M', u'履'), (0xF9E0, 'M', u'易'), @@ -4328,6 +4371,10 @@ def _seg_41(): (0xFA16, 'M', u'猪'), (0xFA17, 'M', u'益'), (0xFA18, 'M', u'礼'), + ] + +def _seg_42(): + return [ (0xFA19, 'M', u'神'), (0xFA1A, 'M', u'祥'), (0xFA1B, 'M', u'福'), @@ -4371,10 +4418,6 @@ def _seg_41(): (0xFA44, 'M', u'梅'), (0xFA45, 'M', u'海'), (0xFA46, 'M', u'渚'), - ] - -def _seg_42(): - return [ (0xFA47, 'M', u'漢'), (0xFA48, 'M', u'煮'), (0xFA49, 'M', u'爫'), @@ -4432,6 +4475,10 @@ def _seg_42(): (0xFA7F, 'M', u'奔'), (0xFA80, 'M', u'婢'), (0xFA81, 'M', u'嬨'), + ] + +def _seg_43(): + return [ (0xFA82, 'M', u'廒'), (0xFA83, 'M', u'廙'), (0xFA84, 'M', u'彩'), @@ -4475,10 +4522,6 @@ def _seg_42(): (0xFAAA, 'M', u'着'), (0xFAAB, 'M', u'磌'), (0xFAAC, 'M', u'窱'), - ] - -def _seg_43(): - return [ (0xFAAD, 'M', u'節'), (0xFAAE, 'M', u'类'), (0xFAAF, 'M', u'絛'), @@ -4536,6 +4579,10 @@ def _seg_43(): (0xFB14, 'M', u'մե'), (0xFB15, 'M', u'մի'), (0xFB16, 'M', u'վն'), + ] + +def _seg_44(): + return [ (0xFB17, 'M', u'մխ'), (0xFB18, 'X'), (0xFB1D, 'M', u'יִ'), @@ -4579,10 +4626,6 @@ def _seg_43(): (0xFB43, 'M', u'ףּ'), (0xFB44, 'M', u'פּ'), (0xFB45, 'X'), - ] - -def _seg_44(): - return [ (0xFB46, 'M', u'צּ'), (0xFB47, 'M', u'קּ'), (0xFB48, 'M', u'רּ'), @@ -4640,6 +4683,10 @@ def _seg_44(): (0xFBEE, 'M', u'ئو'), (0xFBF0, 'M', u'ئۇ'), (0xFBF2, 'M', u'ئۆ'), + ] + +def _seg_45(): + return [ (0xFBF4, 'M', u'ئۈ'), (0xFBF6, 'M', u'ئې'), (0xFBF9, 'M', u'ئى'), @@ -4683,10 +4730,6 @@ def _seg_44(): (0xFC24, 'M', u'ضخ'), (0xFC25, 'M', u'ضم'), (0xFC26, 'M', u'طح'), - ] - -def _seg_45(): - return [ (0xFC27, 'M', u'طم'), (0xFC28, 'M', u'ظم'), (0xFC29, 'M', u'عج'), @@ -4744,6 +4787,10 @@ def _seg_45(): (0xFC5D, 'M', u'ىٰ'), (0xFC5E, '3', u' ٌّ'), (0xFC5F, '3', u' ٍّ'), + ] + +def _seg_46(): + return [ (0xFC60, '3', u' َّ'), (0xFC61, '3', u' ُّ'), (0xFC62, '3', u' ِّ'), @@ -4787,10 +4834,6 @@ def _seg_45(): (0xFC88, 'M', u'ما'), (0xFC89, 'M', u'مم'), (0xFC8A, 'M', u'نر'), - ] - -def _seg_46(): - return [ (0xFC8B, 'M', u'نز'), (0xFC8C, 'M', u'نم'), (0xFC8D, 'M', u'نن'), @@ -4848,6 +4891,10 @@ def _seg_46(): (0xFCC1, 'M', u'فم'), (0xFCC2, 'M', u'قح'), (0xFCC3, 'M', u'قم'), + ] + +def _seg_47(): + return [ (0xFCC4, 'M', u'كج'), (0xFCC5, 'M', u'كح'), (0xFCC6, 'M', u'كخ'), @@ -4891,10 +4938,6 @@ def _seg_46(): (0xFCEC, 'M', u'كم'), (0xFCED, 'M', u'لم'), (0xFCEE, 'M', u'نم'), - ] - -def _seg_47(): - return [ (0xFCEF, 'M', u'نه'), (0xFCF0, 'M', u'يم'), (0xFCF1, 'M', u'يه'), @@ -4952,6 +4995,10 @@ def _seg_47(): (0xFD25, 'M', u'شج'), (0xFD26, 'M', u'شح'), (0xFD27, 'M', u'شخ'), + ] + +def _seg_48(): + return [ (0xFD28, 'M', u'شم'), (0xFD29, 'M', u'شر'), (0xFD2A, 'M', u'سر'), @@ -4995,10 +5042,6 @@ def _seg_47(): (0xFD66, 'M', u'صمم'), (0xFD67, 'M', u'شحم'), (0xFD69, 'M', u'شجي'), - ] - -def _seg_48(): - return [ (0xFD6A, 'M', u'شمخ'), (0xFD6C, 'M', u'شمم'), (0xFD6E, 'M', u'ضحى'), @@ -5056,6 +5099,10 @@ def _seg_48(): (0xFDAC, 'M', u'لجي'), (0xFDAD, 'M', u'لمي'), (0xFDAE, 'M', u'يحي'), + ] + +def _seg_49(): + return [ (0xFDAF, 'M', u'يجي'), (0xFDB0, 'M', u'يمي'), (0xFDB1, 'M', u'ممي'), @@ -5099,10 +5146,6 @@ def _seg_48(): (0xFDFE, 'X'), (0xFE00, 'I'), (0xFE10, '3', u','), - ] - -def _seg_49(): - return [ (0xFE11, 'M', u'、'), (0xFE12, 'X'), (0xFE13, '3', u':'), @@ -5160,6 +5203,10 @@ def _seg_49(): (0xFE64, '3', u'<'), (0xFE65, '3', u'>'), (0xFE66, '3', u'='), + ] + +def _seg_50(): + return [ (0xFE67, 'X'), (0xFE68, '3', u'\\'), (0xFE69, '3', u'$'), @@ -5203,10 +5250,6 @@ def _seg_49(): (0xFEB1, 'M', u'س'), (0xFEB5, 'M', u'ش'), (0xFEB9, 'M', u'ص'), - ] - -def _seg_50(): - return [ (0xFEBD, 'M', u'ض'), (0xFEC1, 'M', u'ط'), (0xFEC5, 'M', u'ظ'), @@ -5264,6 +5307,10 @@ def _seg_50(): (0xFF21, 'M', u'a'), (0xFF22, 'M', u'b'), (0xFF23, 'M', u'c'), + ] + +def _seg_51(): + return [ (0xFF24, 'M', u'd'), (0xFF25, 'M', u'e'), (0xFF26, 'M', u'f'), @@ -5307,10 +5354,6 @@ def _seg_50(): (0xFF4C, 'M', u'l'), (0xFF4D, 'M', u'm'), (0xFF4E, 'M', u'n'), - ] - -def _seg_51(): - return [ (0xFF4F, 'M', u'o'), (0xFF50, 'M', u'p'), (0xFF51, 'M', u'q'), @@ -5368,6 +5411,10 @@ def _seg_51(): (0xFF85, 'M', u'ナ'), (0xFF86, 'M', u'ニ'), (0xFF87, 'M', u'ヌ'), + ] + +def _seg_52(): + return [ (0xFF88, 'M', u'ネ'), (0xFF89, 'M', u'ノ'), (0xFF8A, 'M', u'ハ'), @@ -5411,10 +5458,6 @@ def _seg_51(): (0xFFB0, 'M', u'ᄚ'), (0xFFB1, 'M', u'ᄆ'), (0xFFB2, 'M', u'ᄇ'), - ] - -def _seg_52(): - return [ (0xFFB3, 'M', u'ᄈ'), (0xFFB4, 'M', u'ᄡ'), (0xFFB5, 'M', u'ᄉ'), @@ -5472,6 +5515,10 @@ def _seg_52(): (0x10000, 'V'), (0x1000C, 'X'), (0x1000D, 'V'), + ] + +def _seg_53(): + return [ (0x10027, 'X'), (0x10028, 'V'), (0x1003B, 'X'), @@ -5490,7 +5537,7 @@ def _seg_52(): (0x10137, 'V'), (0x1018F, 'X'), (0x10190, 'V'), - (0x1019C, 'X'), + (0x1019D, 'X'), (0x101A0, 'V'), (0x101A1, 'X'), (0x101D0, 'V'), @@ -5515,10 +5562,6 @@ def _seg_52(): (0x103D6, 'X'), (0x10400, 'M', u'𐐨'), (0x10401, 'M', u'𐐩'), - ] - -def _seg_53(): - return [ (0x10402, 'M', u'𐐪'), (0x10403, 'M', u'𐐫'), (0x10404, 'M', u'𐐬'), @@ -5576,6 +5619,10 @@ def _seg_53(): (0x104BC, 'M', u'𐓤'), (0x104BD, 'M', u'𐓥'), (0x104BE, 'M', u'𐓦'), + ] + +def _seg_54(): + return [ (0x104BF, 'M', u'𐓧'), (0x104C0, 'M', u'𐓨'), (0x104C1, 'M', u'𐓩'), @@ -5619,10 +5666,6 @@ def _seg_53(): (0x1080A, 'V'), (0x10836, 'X'), (0x10837, 'V'), - ] - -def _seg_54(): - return [ (0x10839, 'X'), (0x1083C, 'V'), (0x1083D, 'X'), @@ -5680,6 +5723,10 @@ def _seg_54(): (0x10B9D, 'X'), (0x10BA9, 'V'), (0x10BB0, 'X'), + ] + +def _seg_55(): + return [ (0x10C00, 'V'), (0x10C49, 'X'), (0x10C80, 'M', u'𐳀'), @@ -5723,10 +5770,6 @@ def _seg_54(): (0x10CA6, 'M', u'𐳦'), (0x10CA7, 'M', u'𐳧'), (0x10CA8, 'M', u'𐳨'), - ] - -def _seg_55(): - return [ (0x10CA9, 'M', u'𐳩'), (0x10CAA, 'M', u'𐳪'), (0x10CAB, 'M', u'𐳫'), @@ -5746,10 +5789,20 @@ def _seg_55(): (0x10D3A, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), (0x10F00, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), @@ -5765,17 +5818,19 @@ def _seg_55(): (0x11100, 'V'), (0x11135, 'X'), (0x11136, 'V'), - (0x11147, 'X'), + (0x11148, 'X'), (0x11150, 'V'), (0x11177, 'X'), (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), (0x111E0, 'X'), (0x111E1, 'V'), (0x111F5, 'X'), (0x11200, 'V'), (0x11212, 'X'), + ] + +def _seg_56(): + return [ (0x11213, 'V'), (0x1123F, 'X'), (0x11280, 'V'), @@ -5823,15 +5878,9 @@ def _seg_55(): (0x11370, 'V'), (0x11375, 'X'), (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), (0x1145C, 'X'), (0x1145D, 'V'), - ] - -def _seg_56(): - return [ - (0x1145F, 'X'), + (0x11462, 'X'), (0x11480, 'V'), (0x114C8, 'X'), (0x114D0, 'V'), @@ -5847,7 +5896,7 @@ def _seg_56(): (0x11660, 'V'), (0x1166D, 'X'), (0x11680, 'V'), - (0x116B8, 'X'), + (0x116B9, 'X'), (0x116C0, 'V'), (0x116CA, 'X'), (0x11700, 'V'), @@ -5882,6 +5931,10 @@ def _seg_56(): (0x118B5, 'M', u'𑣕'), (0x118B6, 'M', u'𑣖'), (0x118B7, 'M', u'𑣗'), + ] + +def _seg_57(): + return [ (0x118B8, 'M', u'𑣘'), (0x118B9, 'M', u'𑣙'), (0x118BA, 'M', u'𑣚'), @@ -5893,12 +5946,30 @@ def _seg_56(): (0x118C0, 'V'), (0x118F3, 'X'), (0x118FF, 'V'), - (0x11900, 'X'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), (0x11A00, 'V'), (0x11A48, 'X'), (0x11A50, 'V'), - (0x11A84, 'X'), - (0x11A86, 'V'), (0x11AA3, 'X'), (0x11AC0, 'V'), (0x11AF9, 'X'), @@ -5931,10 +6002,6 @@ def _seg_56(): (0x11D50, 'V'), (0x11D5A, 'X'), (0x11D60, 'V'), - ] - -def _seg_57(): - return [ (0x11D66, 'X'), (0x11D67, 'V'), (0x11D69, 'X'), @@ -5948,7 +6015,11 @@ def _seg_57(): (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), - (0x12000, 'V'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), (0x1239A, 'X'), (0x12400, 'V'), (0x1246F, 'X'), @@ -5964,6 +6035,10 @@ def _seg_57(): (0x16A39, 'X'), (0x16A40, 'V'), (0x16A5F, 'X'), + ] + +def _seg_58(): + return [ (0x16A60, 'V'), (0x16A6A, 'X'), (0x16A6E, 'V'), @@ -5982,22 +6057,62 @@ def _seg_57(): (0x16B78, 'X'), (0x16B7D, 'V'), (0x16B90, 'X'), + (0x16E40, 'M', u'𖹠'), + (0x16E41, 'M', u'𖹡'), + (0x16E42, 'M', u'𖹢'), + (0x16E43, 'M', u'𖹣'), + (0x16E44, 'M', u'𖹤'), + (0x16E45, 'M', u'𖹥'), + (0x16E46, 'M', u'𖹦'), + (0x16E47, 'M', u'𖹧'), + (0x16E48, 'M', u'𖹨'), + (0x16E49, 'M', u'𖹩'), + (0x16E4A, 'M', u'𖹪'), + (0x16E4B, 'M', u'𖹫'), + (0x16E4C, 'M', u'𖹬'), + (0x16E4D, 'M', u'𖹭'), + (0x16E4E, 'M', u'𖹮'), + (0x16E4F, 'M', u'𖹯'), + (0x16E50, 'M', u'𖹰'), + (0x16E51, 'M', u'𖹱'), + (0x16E52, 'M', u'𖹲'), + (0x16E53, 'M', u'𖹳'), + (0x16E54, 'M', u'𖹴'), + (0x16E55, 'M', u'𖹵'), + (0x16E56, 'M', u'𖹶'), + (0x16E57, 'M', u'𖹷'), + (0x16E58, 'M', u'𖹸'), + (0x16E59, 'M', u'𖹹'), + (0x16E5A, 'M', u'𖹺'), + (0x16E5B, 'M', u'𖹻'), + (0x16E5C, 'M', u'𖹼'), + (0x16E5D, 'M', u'𖹽'), + (0x16E5E, 'M', u'𖹾'), + (0x16E5F, 'M', u'𖹿'), (0x16E60, 'V'), (0x16E9B, 'X'), (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), (0x16F8F, 'V'), (0x16FA0, 'X'), (0x16FE0, 'V'), - (0x16FE2, 'X'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), (0x17000, 'V'), - (0x187F2, 'X'), + (0x187F8, 'X'), (0x18800, 'V'), - (0x18AF3, 'X'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), (0x1B000, 'V'), (0x1B11F, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), (0x1B170, 'V'), (0x1B2FC, 'X'), (0x1BC00, 'V'), @@ -6024,6 +6139,10 @@ def _seg_57(): (0x1D163, 'M', u'𝅘𝅥𝅱'), (0x1D164, 'M', u'𝅘𝅥𝅲'), (0x1D165, 'V'), + ] + +def _seg_59(): + return [ (0x1D173, 'X'), (0x1D17B, 'V'), (0x1D1BB, 'M', u'𝆹𝅥'), @@ -6035,10 +6154,6 @@ def _seg_57(): (0x1D1C1, 'V'), (0x1D1E9, 'X'), (0x1D200, 'V'), - ] - -def _seg_58(): - return [ (0x1D246, 'X'), (0x1D2E0, 'V'), (0x1D2F4, 'X'), @@ -6128,6 +6243,10 @@ def _seg_58(): (0x1D44F, 'M', u'b'), (0x1D450, 'M', u'c'), (0x1D451, 'M', u'd'), + ] + +def _seg_60(): + return [ (0x1D452, 'M', u'e'), (0x1D453, 'M', u'f'), (0x1D454, 'M', u'g'), @@ -6139,10 +6258,6 @@ def _seg_58(): (0x1D45A, 'M', u'm'), (0x1D45B, 'M', u'n'), (0x1D45C, 'M', u'o'), - ] - -def _seg_59(): - return [ (0x1D45D, 'M', u'p'), (0x1D45E, 'M', u'q'), (0x1D45F, 'M', u'r'), @@ -6232,6 +6347,10 @@ def _seg_59(): (0x1D4B6, 'M', u'a'), (0x1D4B7, 'M', u'b'), (0x1D4B8, 'M', u'c'), + ] + +def _seg_61(): + return [ (0x1D4B9, 'M', u'd'), (0x1D4BA, 'X'), (0x1D4BB, 'M', u'f'), @@ -6243,10 +6362,6 @@ def _seg_59(): (0x1D4C1, 'M', u'l'), (0x1D4C2, 'M', u'm'), (0x1D4C3, 'M', u'n'), - ] - -def _seg_60(): - return [ (0x1D4C4, 'X'), (0x1D4C5, 'M', u'p'), (0x1D4C6, 'M', u'q'), @@ -6336,6 +6451,10 @@ def _seg_60(): (0x1D51B, 'M', u'x'), (0x1D51C, 'M', u'y'), (0x1D51D, 'X'), + ] + +def _seg_62(): + return [ (0x1D51E, 'M', u'a'), (0x1D51F, 'M', u'b'), (0x1D520, 'M', u'c'), @@ -6347,10 +6466,6 @@ def _seg_60(): (0x1D526, 'M', u'i'), (0x1D527, 'M', u'j'), (0x1D528, 'M', u'k'), - ] - -def _seg_61(): - return [ (0x1D529, 'M', u'l'), (0x1D52A, 'M', u'm'), (0x1D52B, 'M', u'n'), @@ -6440,6 +6555,10 @@ def _seg_61(): (0x1D581, 'M', u'v'), (0x1D582, 'M', u'w'), (0x1D583, 'M', u'x'), + ] + +def _seg_63(): + return [ (0x1D584, 'M', u'y'), (0x1D585, 'M', u'z'), (0x1D586, 'M', u'a'), @@ -6451,10 +6570,6 @@ def _seg_61(): (0x1D58C, 'M', u'g'), (0x1D58D, 'M', u'h'), (0x1D58E, 'M', u'i'), - ] - -def _seg_62(): - return [ (0x1D58F, 'M', u'j'), (0x1D590, 'M', u'k'), (0x1D591, 'M', u'l'), @@ -6544,6 +6659,10 @@ def _seg_62(): (0x1D5E5, 'M', u'r'), (0x1D5E6, 'M', u's'), (0x1D5E7, 'M', u't'), + ] + +def _seg_64(): + return [ (0x1D5E8, 'M', u'u'), (0x1D5E9, 'M', u'v'), (0x1D5EA, 'M', u'w'), @@ -6555,10 +6674,6 @@ def _seg_62(): (0x1D5F0, 'M', u'c'), (0x1D5F1, 'M', u'd'), (0x1D5F2, 'M', u'e'), - ] - -def _seg_63(): - return [ (0x1D5F3, 'M', u'f'), (0x1D5F4, 'M', u'g'), (0x1D5F5, 'M', u'h'), @@ -6648,6 +6763,10 @@ def _seg_63(): (0x1D649, 'M', u'n'), (0x1D64A, 'M', u'o'), (0x1D64B, 'M', u'p'), + ] + +def _seg_65(): + return [ (0x1D64C, 'M', u'q'), (0x1D64D, 'M', u'r'), (0x1D64E, 'M', u's'), @@ -6659,10 +6778,6 @@ def _seg_63(): (0x1D654, 'M', u'y'), (0x1D655, 'M', u'z'), (0x1D656, 'M', u'a'), - ] - -def _seg_64(): - return [ (0x1D657, 'M', u'b'), (0x1D658, 'M', u'c'), (0x1D659, 'M', u'd'), @@ -6752,6 +6867,10 @@ def _seg_64(): (0x1D6AE, 'M', u'η'), (0x1D6AF, 'M', u'θ'), (0x1D6B0, 'M', u'ι'), + ] + +def _seg_66(): + return [ (0x1D6B1, 'M', u'κ'), (0x1D6B2, 'M', u'λ'), (0x1D6B3, 'M', u'μ'), @@ -6763,10 +6882,6 @@ def _seg_64(): (0x1D6B9, 'M', u'θ'), (0x1D6BA, 'M', u'σ'), (0x1D6BB, 'M', u'τ'), - ] - -def _seg_65(): - return [ (0x1D6BC, 'M', u'υ'), (0x1D6BD, 'M', u'φ'), (0x1D6BE, 'M', u'χ'), @@ -6856,6 +6971,10 @@ def _seg_65(): (0x1D714, 'M', u'ω'), (0x1D715, 'M', u'∂'), (0x1D716, 'M', u'ε'), + ] + +def _seg_67(): + return [ (0x1D717, 'M', u'θ'), (0x1D718, 'M', u'κ'), (0x1D719, 'M', u'φ'), @@ -6867,10 +6986,6 @@ def _seg_65(): (0x1D71F, 'M', u'δ'), (0x1D720, 'M', u'ε'), (0x1D721, 'M', u'ζ'), - ] - -def _seg_66(): - return [ (0x1D722, 'M', u'η'), (0x1D723, 'M', u'θ'), (0x1D724, 'M', u'ι'), @@ -6960,6 +7075,10 @@ def _seg_66(): (0x1D779, 'M', u'κ'), (0x1D77A, 'M', u'λ'), (0x1D77B, 'M', u'μ'), + ] + +def _seg_68(): + return [ (0x1D77C, 'M', u'ν'), (0x1D77D, 'M', u'ξ'), (0x1D77E, 'M', u'ο'), @@ -6971,10 +7090,6 @@ def _seg_66(): (0x1D785, 'M', u'φ'), (0x1D786, 'M', u'χ'), (0x1D787, 'M', u'ψ'), - ] - -def _seg_67(): - return [ (0x1D788, 'M', u'ω'), (0x1D789, 'M', u'∂'), (0x1D78A, 'M', u'ε'), @@ -7064,6 +7179,10 @@ def _seg_67(): (0x1D7E1, 'M', u'9'), (0x1D7E2, 'M', u'0'), (0x1D7E3, 'M', u'1'), + ] + +def _seg_69(): + return [ (0x1D7E4, 'M', u'2'), (0x1D7E5, 'M', u'3'), (0x1D7E6, 'M', u'4'), @@ -7075,10 +7194,6 @@ def _seg_67(): (0x1D7EC, 'M', u'0'), (0x1D7ED, 'M', u'1'), (0x1D7EE, 'M', u'2'), - ] - -def _seg_68(): - return [ (0x1D7EF, 'M', u'3'), (0x1D7F0, 'M', u'4'), (0x1D7F1, 'M', u'5'), @@ -7112,6 +7227,18 @@ def _seg_68(): (0x1E025, 'X'), (0x1E026, 'V'), (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), (0x1E800, 'V'), (0x1E8C5, 'X'), (0x1E8C7, 'V'), @@ -7151,13 +7278,19 @@ def _seg_68(): (0x1E920, 'M', u'𞥂'), (0x1E921, 'M', u'𞥃'), (0x1E922, 'V'), - (0x1E94B, 'X'), + (0x1E94C, 'X'), (0x1E950, 'V'), (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), + ] + +def _seg_70(): + return [ (0x1EC71, 'V'), (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), (0x1EE00, 'M', u'ا'), (0x1EE01, 'M', u'ب'), (0x1EE02, 'M', u'ج'), @@ -7179,10 +7312,6 @@ def _seg_68(): (0x1EE12, 'M', u'ق'), (0x1EE13, 'M', u'ر'), (0x1EE14, 'M', u'ش'), - ] - -def _seg_69(): - return [ (0x1EE15, 'M', u'ت'), (0x1EE16, 'M', u'ث'), (0x1EE17, 'M', u'خ'), @@ -7258,6 +7387,10 @@ def _seg_69(): (0x1EE68, 'M', u'ط'), (0x1EE69, 'M', u'ي'), (0x1EE6A, 'M', u'ك'), + ] + +def _seg_71(): + return [ (0x1EE6B, 'X'), (0x1EE6C, 'M', u'م'), (0x1EE6D, 'M', u'ن'), @@ -7283,10 +7416,6 @@ def _seg_69(): (0x1EE81, 'M', u'ب'), (0x1EE82, 'M', u'ج'), (0x1EE83, 'M', u'د'), - ] - -def _seg_70(): - return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), @@ -7362,10 +7491,13 @@ def _seg_70(): (0x1F106, '3', u'5,'), (0x1F107, '3', u'6,'), (0x1F108, '3', u'7,'), + ] + +def _seg_72(): + return [ (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), (0x1F10B, 'V'), - (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), @@ -7387,10 +7519,6 @@ def _seg_70(): (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), - ] - -def _seg_71(): - return [ (0x1F125, '3', u'(v)'), (0x1F126, '3', u'(w)'), (0x1F127, '3', u'(x)'), @@ -7437,11 +7565,11 @@ def _seg_71(): (0x1F150, 'V'), (0x1F16A, 'M', u'mc'), (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), + (0x1F16C, 'M', u'mr'), + (0x1F16D, 'V'), (0x1F190, 'M', u'dj'), (0x1F191, 'V'), - (0x1F1AD, 'X'), + (0x1F1AE, 'X'), (0x1F1E6, 'V'), (0x1F200, 'M', u'ほか'), (0x1F201, 'M', u'ココ'), @@ -7467,6 +7595,10 @@ def _seg_71(): (0x1F221, 'M', u'終'), (0x1F222, 'M', u'生'), (0x1F223, 'M', u'販'), + ] + +def _seg_73(): + return [ (0x1F224, 'M', u'声'), (0x1F225, 'M', u'吹'), (0x1F226, 'M', u'演'), @@ -7491,10 +7623,6 @@ def _seg_71(): (0x1F239, 'M', u'割'), (0x1F23A, 'M', u'営'), (0x1F23B, 'M', u'配'), - ] - -def _seg_72(): - return [ (0x1F23C, 'X'), (0x1F240, 'M', u'〔本〕'), (0x1F241, 'M', u'〔三〕'), @@ -7512,15 +7640,17 @@ def _seg_72(): (0x1F260, 'V'), (0x1F266, 'X'), (0x1F300, 'V'), - (0x1F6D5, 'X'), + (0x1F6D8, 'X'), (0x1F6E0, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), - (0x1F6FA, 'X'), + (0x1F6FD, 'X'), (0x1F700, 'V'), (0x1F774, 'X'), (0x1F780, 'V'), (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), (0x1F800, 'V'), (0x1F80C, 'X'), (0x1F810, 'V'), @@ -7531,28 +7661,51 @@ def _seg_72(): (0x1F888, 'X'), (0x1F890, 'V'), (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F910, 'V'), - (0x1F93F, 'X'), - (0x1F940, 'V'), - (0x1F971, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), + (0x1F979, 'X'), (0x1F97A, 'V'), - (0x1F97B, 'X'), - (0x1F97C, 'V'), - (0x1F9A3, 'X'), - (0x1F9B0, 'V'), - (0x1F9BA, 'X'), - (0x1F9C0, 'V'), - (0x1F9C3, 'X'), - (0x1F9D0, 'V'), - (0x1FA00, 'X'), + (0x1F9CC, 'X'), + (0x1F9CD, 'V'), + (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7B, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAA9, 'X'), + (0x1FAB0, 'V'), + (0x1FAB7, 'X'), + (0x1FAC0, 'V'), + (0x1FAC3, 'X'), + (0x1FAD0, 'V'), + (0x1FAD7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', u'0'), + (0x1FBF1, 'M', u'1'), + (0x1FBF2, 'M', u'2'), + (0x1FBF3, 'M', u'3'), + (0x1FBF4, 'M', u'4'), + (0x1FBF5, 'M', u'5'), + (0x1FBF6, 'M', u'6'), + (0x1FBF7, 'M', u'7'), + (0x1FBF8, 'M', u'8'), + (0x1FBF9, 'M', u'9'), + ] + +def _seg_74(): + return [ + (0x1FBFA, 'X'), (0x20000, 'V'), - (0x2A6D7, 'X'), + (0x2A6DE, 'X'), (0x2A700, 'V'), (0x2B735, 'X'), (0x2B740, 'V'), @@ -7595,10 +7748,6 @@ def _seg_72(): (0x2F81F, 'M', u'㓟'), (0x2F820, 'M', u'刻'), (0x2F821, 'M', u'剆'), - ] - -def _seg_73(): - return [ (0x2F822, 'M', u'割'), (0x2F823, 'M', u'剷'), (0x2F824, 'M', u'㔕'), @@ -7654,6 +7803,10 @@ def _seg_73(): (0x2F859, 'M', u'𡓤'), (0x2F85A, 'M', u'売'), (0x2F85B, 'M', u'壷'), + ] + +def _seg_75(): + return [ (0x2F85C, 'M', u'夆'), (0x2F85D, 'M', u'多'), (0x2F85E, 'M', u'夢'), @@ -7699,10 +7852,6 @@ def _seg_73(): (0x2F887, 'M', u'幩'), (0x2F888, 'M', u'㡢'), (0x2F889, 'M', u'𢆃'), - ] - -def _seg_74(): - return [ (0x2F88A, 'M', u'㡼'), (0x2F88B, 'M', u'庰'), (0x2F88C, 'M', u'庳'), @@ -7758,6 +7907,10 @@ def _seg_74(): (0x2F8C0, 'M', u'揅'), (0x2F8C1, 'M', u'掩'), (0x2F8C2, 'M', u'㨮'), + ] + +def _seg_76(): + return [ (0x2F8C3, 'M', u'摩'), (0x2F8C4, 'M', u'摾'), (0x2F8C5, 'M', u'撝'), @@ -7803,10 +7956,6 @@ def _seg_74(): (0x2F8ED, 'M', u'櫛'), (0x2F8EE, 'M', u'㰘'), (0x2F8EF, 'M', u'次'), - ] - -def _seg_75(): - return [ (0x2F8F0, 'M', u'𣢧'), (0x2F8F1, 'M', u'歔'), (0x2F8F2, 'M', u'㱎'), @@ -7862,6 +8011,10 @@ def _seg_75(): (0x2F924, 'M', u'犀'), (0x2F925, 'M', u'犕'), (0x2F926, 'M', u'𤜵'), + ] + +def _seg_77(): + return [ (0x2F927, 'M', u'𤠔'), (0x2F928, 'M', u'獺'), (0x2F929, 'M', u'王'), @@ -7907,10 +8060,6 @@ def _seg_75(): (0x2F953, 'M', u'祖'), (0x2F954, 'M', u'𥚚'), (0x2F955, 'M', u'𥛅'), - ] - -def _seg_76(): - return [ (0x2F956, 'M', u'福'), (0x2F957, 'M', u'秫'), (0x2F958, 'M', u'䄯'), @@ -7966,6 +8115,10 @@ def _seg_76(): (0x2F98B, 'M', u'舁'), (0x2F98C, 'M', u'舄'), (0x2F98D, 'M', u'辞'), + ] + +def _seg_78(): + return [ (0x2F98E, 'M', u'䑫'), (0x2F98F, 'M', u'芑'), (0x2F990, 'M', u'芋'), @@ -8011,10 +8164,6 @@ def _seg_76(): (0x2F9B8, 'M', u'蚈'), (0x2F9B9, 'M', u'蜎'), (0x2F9BA, 'M', u'蛢'), - ] - -def _seg_77(): - return [ (0x2F9BB, 'M', u'蝹'), (0x2F9BC, 'M', u'蜨'), (0x2F9BD, 'M', u'蝫'), @@ -8070,6 +8219,10 @@ def _seg_77(): (0x2F9EF, 'M', u'䦕'), (0x2F9F0, 'M', u'閷'), (0x2F9F1, 'M', u'𨵷'), + ] + +def _seg_79(): + return [ (0x2F9F2, 'M', u'䧦'), (0x2F9F3, 'M', u'雃'), (0x2F9F4, 'M', u'嶲'), @@ -8114,11 +8267,9 @@ def _seg_77(): (0x2FA1C, 'M', u'鼻'), (0x2FA1D, 'M', u'𪘀'), (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), (0xE0100, 'I'), - ] - -def _seg_78(): - return [ (0xE01F0, 'X'), ] @@ -8202,4 +8353,5 @@ uts46data = tuple( + _seg_76() + _seg_77() + _seg_78() + + _seg_79() ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/ipaddress.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/ipaddress.py index f2d07668..3e6f9e49 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/ipaddress.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/ipaddress.py @@ -14,7 +14,7 @@ from __future__ import unicode_literals import itertools import struct -__version__ = '1.0.22' +__version__ = '1.0.23' # Compatibility functions _compat_int_types = (int,) @@ -1103,7 +1103,8 @@ class _BaseNetwork(_IPAddressBase): try: # Always false if one is v4 and the other is v6. if a._version != b._version: - raise TypeError("%s and %s are not of the same version" (a, b)) + raise TypeError( + "%s and %s are not of the same version" % (a, b)) return (b.network_address <= a.network_address and b.broadcast_address >= a.broadcast_address) except AttributeError: diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/__init__.py deleted file mode 100644 index a6f44a55..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/__init__.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -lockfile.py - Platform-independent advisory file locks. - -Requires Python 2.5 unless you apply 2.4.diff -Locking is done on a per-thread basis instead of a per-process basis. - -Usage: - ->>> lock = LockFile('somefile') ->>> try: -... lock.acquire() -... except AlreadyLocked: -... print 'somefile', 'is locked already.' -... except LockFailed: -... print 'somefile', 'can\\'t be locked.' -... else: -... print 'got lock' -got lock ->>> print lock.is_locked() -True ->>> lock.release() - ->>> lock = LockFile('somefile') ->>> print lock.is_locked() -False ->>> with lock: -... print lock.is_locked() -True ->>> print lock.is_locked() -False - ->>> lock = LockFile('somefile') ->>> # It is okay to lock twice from the same thread... ->>> with lock: -... lock.acquire() -... ->>> # Though no counter is kept, so you can't unlock multiple times... ->>> print lock.is_locked() -False - -Exceptions: - - Error - base class for other exceptions - LockError - base class for all locking exceptions - AlreadyLocked - Another thread or process already holds the lock - LockFailed - Lock failed for some other reason - UnlockError - base class for all unlocking exceptions - AlreadyUnlocked - File was not locked. - NotMyLock - File was locked but not by the current thread/process -""" - -from __future__ import absolute_import - -import functools -import os -import socket -import threading -import warnings - -# Work with PEP8 and non-PEP8 versions of threading module. -if not hasattr(threading, "current_thread"): - threading.current_thread = threading.currentThread -if not hasattr(threading.Thread, "get_name"): - threading.Thread.get_name = threading.Thread.getName - -__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', - 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', - 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', - 'LockBase', 'locked'] - - -class Error(Exception): - """ - Base class for other exceptions. - - >>> try: - ... raise Error - ... except Exception: - ... pass - """ - pass - - -class LockError(Error): - """ - Base class for error arising from attempts to acquire the lock. - - >>> try: - ... raise LockError - ... except Error: - ... pass - """ - pass - - -class LockTimeout(LockError): - """Raised when lock creation fails within a user-defined period of time. - - >>> try: - ... raise LockTimeout - ... except LockError: - ... pass - """ - pass - - -class AlreadyLocked(LockError): - """Some other thread/process is locking the file. - - >>> try: - ... raise AlreadyLocked - ... except LockError: - ... pass - """ - pass - - -class LockFailed(LockError): - """Lock file creation failed for some other reason. - - >>> try: - ... raise LockFailed - ... except LockError: - ... pass - """ - pass - - -class UnlockError(Error): - """ - Base class for errors arising from attempts to release the lock. - - >>> try: - ... raise UnlockError - ... except Error: - ... pass - """ - pass - - -class NotLocked(UnlockError): - """Raised when an attempt is made to unlock an unlocked file. - - >>> try: - ... raise NotLocked - ... except UnlockError: - ... pass - """ - pass - - -class NotMyLock(UnlockError): - """Raised when an attempt is made to unlock a file someone else locked. - - >>> try: - ... raise NotMyLock - ... except UnlockError: - ... pass - """ - pass - - -class _SharedBase(object): - def __init__(self, path): - self.path = path - - def acquire(self, timeout=None): - """ - Acquire the lock. - - * If timeout is omitted (or None), wait forever trying to lock the - file. - - * If timeout > 0, try to acquire the lock for that many seconds. If - the lock period expires and the file is still locked, raise - LockTimeout. - - * If timeout <= 0, raise AlreadyLocked immediately if the file is - already locked. - """ - raise NotImplemented("implement in subclass") - - def release(self): - """ - Release the lock. - - If the file is not locked, raise NotLocked. - """ - raise NotImplemented("implement in subclass") - - def __enter__(self): - """ - Context manager support. - """ - self.acquire() - return self - - def __exit__(self, *_exc): - """ - Context manager support. - """ - self.release() - - def __repr__(self): - return "<%s: %r>" % (self.__class__.__name__, self.path) - - -class LockBase(_SharedBase): - """Base class for platform-specific lock classes.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = LockBase('somefile') - >>> lock = LockBase('somefile', threaded=False) - """ - super(LockBase, self).__init__(path) - self.lock_file = os.path.abspath(path) + ".lock" - self.hostname = socket.gethostname() - self.pid = os.getpid() - if threaded: - t = threading.current_thread() - # Thread objects in Python 2.4 and earlier do not have ident - # attrs. Worm around that. - ident = getattr(t, "ident", hash(t)) - self.tname = "-%x" % (ident & 0xffffffff) - else: - self.tname = "" - dirname = os.path.dirname(self.lock_file) - - # unique name is mostly about the current process, but must - # also contain the path -- otherwise, two adjacent locked - # files conflict (one file gets locked, creating lock-file and - # unique file, the other one gets locked, creating lock-file - # and overwriting the already existing lock-file, then one - # gets unlocked, deleting both lock-file and unique file, - # finally the last lock errors out upon releasing. - self.unique_name = os.path.join(dirname, - "%s%s.%s%s" % (self.hostname, - self.tname, - self.pid, - hash(self.path))) - self.timeout = timeout - - def is_locked(self): - """ - Tell whether or not the file is locked. - """ - raise NotImplemented("implement in subclass") - - def i_am_locking(self): - """ - Return True if this object is locking the file. - """ - raise NotImplemented("implement in subclass") - - def break_lock(self): - """ - Remove a lock. Useful if a locking thread failed to unlock. - """ - raise NotImplemented("implement in subclass") - - def __repr__(self): - return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, - self.path) - - -def _fl_helper(cls, mod, *args, **kwds): - warnings.warn("Import from %s module instead of lockfile package" % mod, - DeprecationWarning, stacklevel=2) - # This is a bit funky, but it's only for awhile. The way the unit tests - # are constructed this function winds up as an unbound method, so it - # actually takes three args, not two. We want to toss out self. - if not isinstance(args[0], str): - # We are testing, avoid the first arg - args = args[1:] - if len(args) == 1 and not kwds: - kwds["threaded"] = True - return cls(*args, **kwds) - - -def LinkFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import LinkLockFile from the - lockfile.linklockfile module. - """ - from . import linklockfile - return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", - *args, **kwds) - - -def MkdirFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import MkdirLockFile from the - lockfile.mkdirlockfile module. - """ - from . import mkdirlockfile - return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", - *args, **kwds) - - -def SQLiteFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import SQLiteLockFile from the - lockfile.mkdirlockfile module. - """ - from . import sqlitelockfile - return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", - *args, **kwds) - - -def locked(path, timeout=None): - """Decorator which enables locks for decorated function. - - Arguments: - - path: path for lockfile. - - timeout (optional): Timeout for acquiring lock. - - Usage: - @locked('/var/run/myname', timeout=0) - def myname(...): - ... - """ - def decor(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - lock = FileLock(path, timeout=timeout) - lock.acquire() - try: - return func(*args, **kwargs) - finally: - lock.release() - return wrapper - return decor - - -if hasattr(os, "link"): - from . import linklockfile as _llf - LockFile = _llf.LinkLockFile -else: - from . import mkdirlockfile as _mlf - LockFile = _mlf.MkdirLockFile - -FileLock = LockFile diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py deleted file mode 100644 index 2ca9be04..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import absolute_import - -import time -import os - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class LinkLockFile(LockBase): - """Lock access to a file using atomic property of link(2). - - >>> lock = LinkLockFile('somefile') - >>> lock = LinkLockFile('somefile', threaded=False) - """ - - def acquire(self, timeout=None): - try: - open(self.unique_name, "wb").close() - except IOError: - raise LockFailed("failed to create %s" % self.unique_name) - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a hard link to it. - try: - os.link(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - nlinks = os.stat(self.unique_name).st_nlink - if nlinks == 2: - # The original link plus the one I created == 2. We're - # good to go. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - os.unlink(self.unique_name) - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name) and - os.stat(self.unique_name).st_nlink == 2) - - def break_lock(self): - if os.path.exists(self.lock_file): - os.unlink(self.lock_file) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py deleted file mode 100644 index 05a8c96c..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py +++ /dev/null @@ -1,84 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os -import sys -import errno - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class MkdirLockFile(LockBase): - """Lock file by creating a directory.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = MkdirLockFile('somefile') - >>> lock = MkdirLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - # Lock file itself is a directory. Place the unique file name into - # it. - self.unique_name = os.path.join(self.lock_file, - "%s.%s%s" % (self.hostname, - self.tname, - self.pid)) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - else: - wait = max(0, timeout / 10) - - while True: - try: - os.mkdir(self.lock_file) - except OSError: - err = sys.exc_info()[1] - if err.errno == errno.EEXIST: - # Already locked. - if os.path.exists(self.unique_name): - # Already locked by me. - return - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock. - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(wait) - else: - # Couldn't create the lock for some other reason - raise LockFailed("failed to create %s" % self.lock_file) - else: - open(self.unique_name, "wb").close() - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.rmdir(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name)) - - def break_lock(self): - if os.path.exists(self.lock_file): - for name in os.listdir(self.lock_file): - os.unlink(os.path.join(self.lock_file, name)) - os.rmdir(self.lock_file) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py deleted file mode 100644 index 069e85b1..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- - -# pidlockfile.py -# -# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au> -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Python Software Foundation License, version 2 or -# later as published by the Python Software Foundation. -# No warranty expressed or implied. See the file LICENSE.PSF-2 for details. - -""" Lockfile behaviour implemented via Unix PID files. - """ - -from __future__ import absolute_import - -import errno -import os -import time - -from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, - LockTimeout) - - -class PIDLockFile(LockBase): - """ Lockfile implemented as a Unix PID file. - - The lock file is a normal file named by the attribute `path`. - A lock's PID file contains a single line of text, containing - the process ID (PID) of the process that acquired the lock. - - >>> lock = PIDLockFile('somefile') - >>> lock = PIDLockFile('somefile') - """ - - def __init__(self, path, threaded=False, timeout=None): - # pid lockfiles don't support threaded operation, so always force - # False as the threaded arg. - LockBase.__init__(self, path, False, timeout) - self.unique_name = self.path - - def read_pid(self): - """ Get the PID from the lock file. - """ - return read_pid_from_pidfile(self.path) - - def is_locked(self): - """ Test if the lock is currently held. - - The lock is held if the PID file for this lock exists. - - """ - return os.path.exists(self.path) - - def i_am_locking(self): - """ Test if the lock is held by the current process. - - Returns ``True`` if the current process ID matches the - number stored in the PID file. - """ - return self.is_locked() and os.getpid() == self.read_pid() - - def acquire(self, timeout=None): - """ Acquire the lock. - - Creates the PID file for this lock, or raises an error if - the lock could not be acquired. - """ - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - try: - write_pid_to_pidfile(self.path) - except OSError as exc: - if exc.errno == errno.EEXIST: - # The lock creation failed. Maybe sleep a bit. - if time.time() > end_time: - if timeout is not None and timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - raise LockFailed("failed to create %s" % self.path) - else: - return - - def release(self): - """ Release the lock. - - Removes the PID file to release the lock, or raises an - error if the current process does not hold the lock. - - """ - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - remove_existing_pidfile(self.path) - - def break_lock(self): - """ Break an existing lock. - - Removes the PID file if it already exists, otherwise does - nothing. - - """ - remove_existing_pidfile(self.path) - - -def read_pid_from_pidfile(pidfile_path): - """ Read the PID recorded in the named PID file. - - Read and return the numeric PID recorded as text in the named - PID file. If the PID file cannot be read, or if the content is - not a valid PID, return ``None``. - - """ - pid = None - try: - pidfile = open(pidfile_path, 'r') - except IOError: - pass - else: - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. - # - # Programs that read PID files should be somewhat flexible - # in what they accept; i.e., they should ignore extra - # whitespace, leading zeroes, absence of the trailing - # newline, or additional lines in the PID file. - - line = pidfile.readline().strip() - try: - pid = int(line) - except ValueError: - pass - pidfile.close() - - return pid - - -def write_pid_to_pidfile(pidfile_path): - """ Write the PID in the named PID file. - - Get the numeric process ID (“PID”) of the current process - and write it to the named file as a line of text. - - """ - open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) - open_mode = 0o644 - pidfile_fd = os.open(pidfile_path, open_flags, open_mode) - pidfile = os.fdopen(pidfile_fd, 'w') - - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. For - # example, if crond was process number 25, /var/run/crond.pid - # would contain three characters: two, five, and newline. - - pid = os.getpid() - pidfile.write("%s\n" % pid) - pidfile.close() - - -def remove_existing_pidfile(pidfile_path): - """ Remove the named PID file if it exists. - - Removing a PID file that doesn't already exist puts us in the - desired state, so we ignore the condition if the file does not - exist. - - """ - try: - os.remove(pidfile_path) - except OSError as exc: - if exc.errno == errno.ENOENT: - pass - else: - raise diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py deleted file mode 100644 index f997e244..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os - -try: - unicode -except NameError: - unicode = str - -from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked - - -class SQLiteLockFile(LockBase): - "Demonstrate SQL-based locking." - - testdb = None - - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = SQLiteLockFile('somefile') - >>> lock = SQLiteLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - self.lock_file = unicode(self.lock_file) - self.unique_name = unicode(self.unique_name) - - if SQLiteLockFile.testdb is None: - import tempfile - _fd, testdb = tempfile.mkstemp() - os.close(_fd) - os.unlink(testdb) - del _fd, tempfile - SQLiteLockFile.testdb = testdb - - import sqlite3 - self.connection = sqlite3.connect(SQLiteLockFile.testdb) - - c = self.connection.cursor() - try: - c.execute("create table locks" - "(" - " lock_file varchar(32)," - " unique_name varchar(32)" - ")") - except sqlite3.OperationalError: - pass - else: - self.connection.commit() - import atexit - atexit.register(os.unlink, SQLiteLockFile.testdb) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - elif timeout <= 0: - wait = 0 - else: - wait = timeout / 10 - - cursor = self.connection.cursor() - - while True: - if not self.is_locked(): - # Not locked. Try to lock it. - cursor.execute("insert into locks" - " (lock_file, unique_name)" - " values" - " (?, ?)", - (self.lock_file, self.unique_name)) - self.connection.commit() - - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) > 1: - # Nope. Someone else got there. Remove our lock. - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - else: - # Yup. We're done, so go home. - return - else: - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) == 1: - # We're the locker, so go home. - return - - # Maybe we should wait a bit longer. - if timeout is not None and time.time() > end_time: - if timeout > 0: - # No more waiting. - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock and we are impatient.. - raise AlreadyLocked("%s is already locked" % self.path) - - # Well, okay. We'll give it a bit longer. - time.sleep(wait) - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me (by %s)" % - (self.unique_name, self._who_is_locking())) - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - - def _who_is_locking(self): - cursor = self.connection.cursor() - cursor.execute("select unique_name from locks" - " where lock_file = ?", - (self.lock_file,)) - return cursor.fetchone()[0] - - def is_locked(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?", - (self.lock_file,)) - rows = cursor.fetchall() - return not not rows - - def i_am_locking(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?" - " and unique_name = ?", - (self.lock_file, self.unique_name)) - return not not cursor.fetchall() - - def break_lock(self): - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where lock_file = ?", - (self.lock_file,)) - self.connection.commit() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py deleted file mode 100644 index 23b41f58..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py +++ /dev/null @@ -1,70 +0,0 @@ -from __future__ import absolute_import - -import os -import time - -from . import (LockBase, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class SymlinkLockFile(LockBase): - """Lock access to a file using symlink(2).""" - - def __init__(self, path, threaded=True, timeout=None): - # super(SymlinkLockFile).__init(...) - LockBase.__init__(self, path, threaded, timeout) - # split it back! - self.unique_name = os.path.split(self.unique_name)[1] - - def acquire(self, timeout=None): - # Hopefully unnecessary for symlink. - # try: - # open(self.unique_name, "wb").close() - # except IOError: - # raise LockFailed("failed to create %s" % self.unique_name) - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a symbolic link to it. - try: - os.symlink(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - if self.i_am_locking(): - # Linked to out unique name. Proceed. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout / 10 if timeout is not None else 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.islink(self.lock_file) - - def i_am_locking(self): - return (os.path.islink(self.lock_file) - and os.readlink(self.lock_file) == self.unique_name) - - def break_lock(self): - if os.path.islink(self.lock_file): # exists && link - os.unlink(self.lock_file) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/__init__.py index 2afca5ad..d6705e22 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/__init__.py @@ -1,31 +1,19 @@ # coding: utf-8 -from pip._vendor.msgpack._version import version -from pip._vendor.msgpack.exceptions import * +from ._version import version +from .exceptions import * +from .ext import ExtType, Timestamp -from collections import namedtuple - - -class ExtType(namedtuple('ExtType', 'code data')): - """ExtType represents ext type in msgpack.""" - def __new__(cls, code, data): - if not isinstance(code, int): - raise TypeError("code must be int") - if not isinstance(data, bytes): - raise TypeError("data must be bytes") - if not 0 <= code <= 127: - raise ValueError("code must be 0~127") - return super(ExtType, cls).__new__(cls, code, data) +import os +import sys -import os -if os.environ.get('MSGPACK_PUREPYTHON'): - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker +if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: + from .fallback import Packer, unpackb, Unpacker else: try: - from pip._vendor.msgpack._packer import Packer - from pip._vendor.msgpack._unpacker import unpackb, Unpacker + from ._cmsgpack import Packer, unpackb, Unpacker except ImportError: - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker + from .fallback import Packer, unpackb, Unpacker def pack(o, stream, **kwargs): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/_version.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/_version.py index d28f0deb..9f55cf50 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/_version.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/_version.py @@ -1 +1 @@ -version = (0, 5, 6) +version = (1, 0, 0) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/exceptions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/exceptions.py index 97668814..d6d2615c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/exceptions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/exceptions.py @@ -1,5 +1,10 @@ class UnpackException(Exception): - """Deprecated. Use Exception instead to catch all exception during unpacking.""" + """Base class for some exceptions raised while unpacking. + + NOTE: unpack may raise exception other than subclass of + UnpackException. If you want to catch all error, catch + Exception instead. + """ class BufferFull(UnpackException): @@ -10,32 +15,34 @@ class OutOfData(UnpackException): pass -class UnpackValueError(UnpackException, ValueError): - """Deprecated. Use ValueError instead.""" +class FormatError(ValueError, UnpackException): + """Invalid msgpack format""" -class ExtraData(UnpackValueError): - def __init__(self, unpacked, extra): - self.unpacked = unpacked - self.extra = extra - - def __str__(self): - return "unpack(b) received extra data." +class StackError(ValueError, UnpackException): + """Too nested""" -class PackException(Exception): - """Deprecated. Use Exception instead to catch all exception during packing.""" +# Deprecated. Use ValueError instead +UnpackValueError = ValueError -class PackValueError(PackException, ValueError): - """PackValueError is raised when type of input data is supported but it's value is unsupported. +class ExtraData(UnpackValueError): + """ExtraData is raised when there is trailing data. - Deprecated. Use ValueError instead. + This exception is raised while only one-shot (not streaming) + unpack. """ + def __init__(self, unpacked, extra): + self.unpacked = unpacked + self.extra = extra + + def __str__(self): + return "unpack(b) received extra data." -class PackOverflowError(PackValueError, OverflowError): - """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). - Deprecated. Use ValueError instead. - """ +# Deprecated. Use Exception instead to catch all exception during packing. +PackException = Exception +PackValueError = ValueError +PackOverflowError = OverflowError diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/ext.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/ext.py new file mode 100644 index 00000000..8341c68b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/ext.py @@ -0,0 +1,191 @@ +# coding: utf-8 +from collections import namedtuple +import datetime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 + +if PY2: + int_types = (int, long) + _utc = None +else: + int_types = int + try: + _utc = datetime.timezone.utc + except AttributeError: + _utc = datetime.timezone(datetime.timedelta(0)) + + +class ExtType(namedtuple("ExtType", "code data")): + """ExtType represents ext type in msgpack.""" + + def __new__(cls, code, data): + if not isinstance(code, int): + raise TypeError("code must be int") + if not isinstance(data, bytes): + raise TypeError("data must be bytes") + if not 0 <= code <= 127: + raise ValueError("code must be 0~127") + return super(ExtType, cls).__new__(cls, code, data) + + +class Timestamp(object): + """Timestamp represents the Timestamp extension type in msgpack. + + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python + msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`. + + This class is immutable: Do not override seconds and nanoseconds. + """ + + __slots__ = ["seconds", "nanoseconds"] + + def __init__(self, seconds, nanoseconds=0): + """Initialize a Timestamp object. + + :param int seconds: + Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds). + May be negative. + + :param int nanoseconds: + Number of nanoseconds to add to `seconds` to get fractional time. + Maximum is 999_999_999. Default is 0. + + Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. + """ + if not isinstance(seconds, int_types): + raise TypeError("seconds must be an interger") + if not isinstance(nanoseconds, int_types): + raise TypeError("nanoseconds must be an integer") + if not (0 <= nanoseconds < 10 ** 9): + raise ValueError( + "nanoseconds must be a non-negative integer less than 999999999." + ) + self.seconds = seconds + self.nanoseconds = nanoseconds + + def __repr__(self): + """String representation of Timestamp.""" + return "Timestamp(seconds={0}, nanoseconds={1})".format( + self.seconds, self.nanoseconds + ) + + def __eq__(self, other): + """Check for equality with another Timestamp object""" + if type(other) is self.__class__: + return ( + self.seconds == other.seconds and self.nanoseconds == other.nanoseconds + ) + return False + + def __ne__(self, other): + """not-equals method (see :func:`__eq__()`)""" + return not self.__eq__(other) + + def __hash__(self): + return hash((self.seconds, self.nanoseconds)) + + @staticmethod + def from_bytes(b): + """Unpack bytes into a `Timestamp` object. + + Used for pure-Python msgpack unpacking. + + :param b: Payload from msgpack ext message with code -1 + :type b: bytes + + :returns: Timestamp object unpacked from msgpack ext payload + :rtype: Timestamp + """ + if len(b) == 4: + seconds = struct.unpack("!L", b)[0] + nanoseconds = 0 + elif len(b) == 8: + data64 = struct.unpack("!Q", b)[0] + seconds = data64 & 0x00000003FFFFFFFF + nanoseconds = data64 >> 34 + elif len(b) == 12: + nanoseconds, seconds = struct.unpack("!Iq", b) + else: + raise ValueError( + "Timestamp type can only be created from 32, 64, or 96-bit byte objects" + ) + return Timestamp(seconds, nanoseconds) + + def to_bytes(self): + """Pack this Timestamp object into bytes. + + Used for pure-Python msgpack packing. + + :returns data: Payload for EXT message with code -1 (timestamp type) + :rtype: bytes + """ + if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits + data64 = self.nanoseconds << 34 | self.seconds + if data64 & 0xFFFFFFFF00000000 == 0: + # nanoseconds is zero and seconds < 2**32, so timestamp 32 + data = struct.pack("!L", data64) + else: + # timestamp 64 + data = struct.pack("!Q", data64) + else: + # timestamp 96 + data = struct.pack("!Iq", self.nanoseconds, self.seconds) + return data + + @staticmethod + def from_unix(unix_sec): + """Create a Timestamp from posix timestamp in seconds. + + :param unix_float: Posix timestamp in seconds. + :type unix_float: int or float. + """ + seconds = int(unix_sec // 1) + nanoseconds = int((unix_sec % 1) * 10 ** 9) + return Timestamp(seconds, nanoseconds) + + def to_unix(self): + """Get the timestamp as a floating-point value. + + :returns: posix timestamp + :rtype: float + """ + return self.seconds + self.nanoseconds / 1e9 + + @staticmethod + def from_unix_nano(unix_ns): + """Create a Timestamp from posix timestamp in nanoseconds. + + :param int unix_ns: Posix timestamp in nanoseconds. + :rtype: Timestamp + """ + return Timestamp(*divmod(unix_ns, 10 ** 9)) + + def to_unix_nano(self): + """Get the timestamp as a unixtime in nanoseconds. + + :returns: posix timestamp in nanoseconds + :rtype: int + """ + return self.seconds * 10 ** 9 + self.nanoseconds + + def to_datetime(self): + """Get the timestamp as a UTC datetime. + + Python 2 is not supported. + + :rtype: datetime. + """ + return datetime.datetime.fromtimestamp(self.to_unix(), _utc) + + @staticmethod + def from_datetime(dt): + """Create a Timestamp from datetime with tzinfo. + + Python 2 is not supported. + + :rtype: Timestamp + """ + return Timestamp.from_unix(dt.timestamp()) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/fallback.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/fallback.py index 94184218..9f6665b3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/fallback.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/msgpack/fallback.py @@ -1,76 +1,98 @@ """Fallback pure Python implementation of msgpack""" +from datetime import datetime as _DateTime import sys import struct -import warnings -if sys.version_info[0] == 3: - PY3 = True + +PY2 = sys.version_info[0] == 2 +if PY2: + int_types = (int, long) + + def dict_iteritems(d): + return d.iteritems() + + +else: int_types = int - Unicode = str + unicode = str xrange = range + def dict_iteritems(d): return d.items() + + +if sys.version_info < (3, 5): + # Ugly hack... + RecursionError = RuntimeError + + def _is_recursionerror(e): + return ( + len(e.args) == 1 + and isinstance(e.args[0], str) + and e.args[0].startswith("maximum recursion depth exceeded") + ) + + else: - PY3 = False - int_types = (int, long) - Unicode = unicode - def dict_iteritems(d): - return d.iteritems() + + def _is_recursionerror(e): + return True -if hasattr(sys, 'pypy_version_info'): - # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own +if hasattr(sys, "pypy_version_info"): + # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own # StringBuilder is fastest. from __pypy__ import newlist_hint + try: from __pypy__.builders import BytesBuilder as StringBuilder except ImportError: from __pypy__.builders import StringBuilder USING_STRINGBUILDER = True + class StringIO(object): - def __init__(self, s=b''): + def __init__(self, s=b""): if s: self.builder = StringBuilder(len(s)) self.builder.append(s) else: self.builder = StringBuilder() + def write(self, s): if isinstance(s, memoryview): s = s.tobytes() elif isinstance(s, bytearray): s = bytes(s) self.builder.append(s) + def getvalue(self): return self.builder.build() + + else: USING_STRINGBUILDER = False from io import BytesIO as StringIO + newlist_hint = lambda size: [] -from pip._vendor.msgpack.exceptions import ( - BufferFull, - OutOfData, - UnpackValueError, - PackValueError, - PackOverflowError, - ExtraData) +from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError -from pip._vendor.msgpack import ExtType +from .ext import ExtType, Timestamp -EX_SKIP = 0 -EX_CONSTRUCT = 1 -EX_READ_ARRAY_HEADER = 2 -EX_READ_MAP_HEADER = 3 +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 -TYPE_IMMEDIATE = 0 -TYPE_ARRAY = 1 -TYPE_MAP = 2 -TYPE_RAW = 3 -TYPE_BIN = 4 -TYPE_EXT = 5 +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 DEFAULT_RECURSE_LIMIT = 511 @@ -83,53 +105,54 @@ def _check_type_strict(obj, t, type=type, tuple=tuple): def _get_data_from_buffer(obj): - try: - view = memoryview(obj) - except TypeError: - # try to use legacy buffer protocol if 2.7, otherwise re-raise - if not PY3: - view = memoryview(buffer(obj)) - warnings.warn("using old buffer interface to unpack %s; " - "this leads to unpacking errors if slicing is used and " - "will be removed in a future version" % type(obj), - RuntimeWarning) - else: - raise + view = memoryview(obj) if view.itemsize != 1: raise ValueError("cannot unpack from multi-byte object") return view -def unpack(stream, **kwargs): - warnings.warn( - "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", - PendingDeprecationWarning) - data = stream.read() - return unpackb(data, **kwargs) - - def unpackb(packed, **kwargs): """ Unpack an object from `packed`. - Raises `ExtraData` when `packed` contains extra bytes. + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``ValueError`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + See :class:`Unpacker` for options. """ - unpacker = Unpacker(None, **kwargs) + unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) unpacker.feed(packed) try: ret = unpacker._unpack() except OutOfData: - raise UnpackValueError("Data is not enough.") + raise ValueError("Unpack failed: incomplete input") + except RecursionError as e: + if _is_recursionerror(e): + raise StackError + raise if unpacker._got_extradata(): raise ExtraData(ret, unpacker._get_extradata()) return ret +if sys.version_info < (2, 7, 6): + + def _unpack_from(f, b, o=0): + """Explicit type cast for legacy struct.unpack_from""" + return struct.unpack_from(f, bytes(b), o) + + +else: + _unpack_from = struct.unpack_from + + class Unpacker(object): """Streaming unpacker. - arguments: + Arguments: :param file_like: File-like object having `.read(n)` method. @@ -143,14 +166,19 @@ class Unpacker(object): Otherwise, unpack to Python tuple. (default: True) :param bool raw: - If true, unpack msgpack raw to Python bytes (default). - Otherwise, unpack to Python str (or unicode on Python 2) by decoding - with UTF-8 encoding (recommended). - Currently, the default is true, but it will be changed to false in - near future. So you must specify it explicitly for keeping backward - compatibility. + If true, unpack msgpack raw to Python bytes. + Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). + + :param int timestamp: + Control how timestamp type is unpacked: + + 0 - Timestamp + 1 - float (Seconds from the EPOCH) + 2 - int (Nanoseconds from the EPOCH) + 3 - datetime.datetime (UTC). Python 2 is not supported. - *encoding* option which is deprecated overrides this option. + :param bool strict_map_key: + If true (default), only str or bytes are accepted for map (dict) keys. :param callable object_hook: When specified, it should be callable. @@ -162,41 +190,46 @@ class Unpacker(object): Unpacker calls it with a list of key-value pairs after unpacking msgpack map. (See also simplejson) - :param str encoding: - Encoding used for decoding msgpack raw. - If it is None (default), msgpack raw is deserialized to Python bytes. - :param str unicode_errors: - (deprecated) Used for decoding msgpack raw with *encoding*. - (default: `'strict'`) + The error handler for decoding unicode. (default: 'strict') + This option should be used only when you have msgpack data which + contains invalid UTF-8 string. :param int max_buffer_size: - Limits size of data waiting unpacked. 0 means system's INT_MAX (default). + Limits size of data waiting unpacked. 0 means 2**32-1. + The default value is 100*1024*1024 (100MiB). Raises `BufferFull` exception when it is insufficient. You should set this parameter when unpacking data from untrusted source. :param int max_str_len: - Limits max length of str. (default: 2**31-1) + Deprecated, use *max_buffer_size* instead. + Limits max length of str. (default: max_buffer_size) :param int max_bin_len: - Limits max length of bin. (default: 2**31-1) + Deprecated, use *max_buffer_size* instead. + Limits max length of bin. (default: max_buffer_size) :param int max_array_len: - Limits max length of array. (default: 2**31-1) + Limits max length of array. + (default: max_buffer_size) :param int max_map_len: - Limits max length of map. (default: 2**31-1) + Limits max length of map. + (default: max_buffer_size//2) + :param int max_ext_len: + Deprecated, use *max_buffer_size* instead. + Limits max size of ext type. (default: max_buffer_size) - example of streaming deserialize from file-like object:: + Example of streaming deserialize from file-like object:: - unpacker = Unpacker(file_like, raw=False) + unpacker = Unpacker(file_like) for o in unpacker: process(o) - example of streaming deserialize from socket:: + Example of streaming deserialize from socket:: - unpacker = Unpacker(raw=False) + unpacker = Unpacker(max_buffer_size) while True: buf = sock.recv(1024**2) if not buf: @@ -204,25 +237,36 @@ class Unpacker(object): unpacker.feed(buf) for o in unpacker: process(o) - """ - def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, - object_hook=None, object_pairs_hook=None, list_hook=None, - encoding=None, unicode_errors=None, max_buffer_size=0, - ext_hook=ExtType, - max_str_len=2147483647, # 2**32-1 - max_bin_len=2147483647, - max_array_len=2147483647, - max_map_len=2147483647, - max_ext_len=2147483647): - - if encoding is not None: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - PendingDeprecationWarning) + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + def __init__( + self, + file_like=None, + read_size=0, + use_list=True, + raw=False, + timestamp=0, + strict_map_key=True, + object_hook=None, + object_pairs_hook=None, + list_hook=None, + unicode_errors=None, + max_buffer_size=100 * 1024 * 1024, + ext_hook=ExtType, + max_str_len=-1, + max_bin_len=-1, + max_array_len=-1, + max_map_len=-1, + max_ext_len=-1, + ): if unicode_errors is None: - unicode_errors = 'strict' + unicode_errors = "strict" if file_like is None: self._feeding = True @@ -234,12 +278,6 @@ class Unpacker(object): #: array of bytes fed. self._buffer = bytearray() - # Some very old pythons don't support `struct.unpack_from()` with a - # `bytearray`. So we wrap it in a `buffer()` there. - if sys.version_info < (2, 7, 6): - self._buffer_view = buffer(self._buffer) - else: - self._buffer_view = self._buffer #: Which position we currently reads self._buff_i = 0 @@ -252,14 +290,30 @@ class Unpacker(object): # state, which _buf_checkpoint records. self._buf_checkpoint = 0 - self._max_buffer_size = max_buffer_size or 2**31-1 + if not max_buffer_size: + max_buffer_size = 2 ** 31 - 1 + if max_str_len == -1: + max_str_len = max_buffer_size + if max_bin_len == -1: + max_bin_len = max_buffer_size + if max_array_len == -1: + max_array_len = max_buffer_size + if max_map_len == -1: + max_map_len = max_buffer_size // 2 + if max_ext_len == -1: + max_ext_len = max_buffer_size + + self._max_buffer_size = max_buffer_size if read_size > self._max_buffer_size: raise ValueError("read_size must be smaller than max_buffer_size") - self._read_size = read_size or min(self._max_buffer_size, 16*1024) + self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) self._raw = bool(raw) - self._encoding = encoding + self._strict_map_key = bool(strict_map_key) self._unicode_errors = unicode_errors self._use_list = use_list + if not (0 <= timestamp <= 3): + raise ValueError("timestamp must be 0..3") + self._timestamp = timestamp self._list_hook = list_hook self._object_hook = object_hook self._object_pairs_hook = object_pairs_hook @@ -272,30 +326,32 @@ class Unpacker(object): self._stream_offset = 0 if list_hook is not None and not callable(list_hook): - raise TypeError('`list_hook` is not callable') + raise TypeError("`list_hook` is not callable") if object_hook is not None and not callable(object_hook): - raise TypeError('`object_hook` is not callable') + raise TypeError("`object_hook` is not callable") if object_pairs_hook is not None and not callable(object_pairs_hook): - raise TypeError('`object_pairs_hook` is not callable') + raise TypeError("`object_pairs_hook` is not callable") if object_hook is not None and object_pairs_hook is not None: - raise TypeError("object_pairs_hook and object_hook are mutually " - "exclusive") + raise TypeError( + "object_pairs_hook and object_hook are mutually " "exclusive" + ) if not callable(ext_hook): raise TypeError("`ext_hook` is not callable") def feed(self, next_bytes): assert self._feeding view = _get_data_from_buffer(next_bytes) - if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): + if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: raise BufferFull # Strip buffer before checkpoint before reading file. if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] + del self._buffer[: self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 - self._buffer += view + # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython + self._buffer.extend(view) def _consume(self): """ Gets rid of the used parts of the buffer. """ @@ -306,17 +362,19 @@ class Unpacker(object): return self._buff_i < len(self._buffer) def _get_extradata(self): - return self._buffer[self._buff_i:] + return self._buffer[self._buff_i :] def read_bytes(self, n): - return self._read(n) + ret = self._read(n) + self._consume() + return ret def _read(self, n): # (int) -> bytearray self._reserve(n) i = self._buff_i - self._buff_i = i+n - return self._buffer[i:i+n] + self._buff_i = i + n + return self._buffer[i : i + n] def _reserve(self, n): remain_bytes = len(self._buffer) - self._buff_i - n @@ -331,7 +389,7 @@ class Unpacker(object): # Strip buffer before checkpoint before reading file. if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] + del self._buffer[: self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 @@ -360,206 +418,206 @@ class Unpacker(object): if b & 0b10000000 == 0: obj = b elif b & 0b11100000 == 0b11100000: - obj = -1 - (b ^ 0xff) + obj = -1 - (b ^ 0xFF) elif b & 0b11100000 == 0b10100000: n = b & 0b00011111 typ = TYPE_RAW if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) elif b & 0b11110000 == 0b10010000: n = b & 0b00001111 typ = TYPE_ARRAY if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) elif b & 0b11110000 == 0b10000000: n = b & 0b00001111 typ = TYPE_MAP if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - elif b == 0xc0: + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + elif b == 0xC0: obj = None - elif b == 0xc2: + elif b == 0xC2: obj = False - elif b == 0xc3: + elif b == 0xC3: obj = True - elif b == 0xc4: + elif b == 0xC4: typ = TYPE_BIN self._reserve(1) n = self._buffer[self._buff_i] self._buff_i += 1 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc5: + elif b == 0xC5: typ = TYPE_BIN self._reserve(2) - n = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + n = _unpack_from(">H", self._buffer, self._buff_i)[0] self._buff_i += 2 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc6: + elif b == 0xC6: typ = TYPE_BIN self._reserve(4) - n = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + n = _unpack_from(">I", self._buffer, self._buff_i)[0] self._buff_i += 4 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc7: # ext 8 + elif b == 0xC7: # ext 8 typ = TYPE_EXT self._reserve(2) - L, n = struct.unpack_from('Bb', self._buffer_view, self._buff_i) + L, n = _unpack_from("Bb", self._buffer, self._buff_i) self._buff_i += 2 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xc8: # ext 16 + elif b == 0xC8: # ext 16 typ = TYPE_EXT self._reserve(3) - L, n = struct.unpack_from('>Hb', self._buffer_view, self._buff_i) + L, n = _unpack_from(">Hb", self._buffer, self._buff_i) self._buff_i += 3 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xc9: # ext 32 + elif b == 0xC9: # ext 32 typ = TYPE_EXT self._reserve(5) - L, n = struct.unpack_from('>Ib', self._buffer_view, self._buff_i) + L, n = _unpack_from(">Ib", self._buffer, self._buff_i) self._buff_i += 5 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xca: + elif b == 0xCA: self._reserve(4) - obj = struct.unpack_from(">f", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">f", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xcb: + elif b == 0xCB: self._reserve(8) - obj = struct.unpack_from(">d", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">d", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xcc: + elif b == 0xCC: self._reserve(1) obj = self._buffer[self._buff_i] self._buff_i += 1 - elif b == 0xcd: + elif b == 0xCD: self._reserve(2) - obj = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">H", self._buffer, self._buff_i)[0] self._buff_i += 2 - elif b == 0xce: + elif b == 0xCE: self._reserve(4) - obj = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">I", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xcf: + elif b == 0xCF: self._reserve(8) - obj = struct.unpack_from(">Q", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xd0: + elif b == 0xD0: self._reserve(1) - obj = struct.unpack_from("b", self._buffer_view, self._buff_i)[0] + obj = _unpack_from("b", self._buffer, self._buff_i)[0] self._buff_i += 1 - elif b == 0xd1: + elif b == 0xD1: self._reserve(2) - obj = struct.unpack_from(">h", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">h", self._buffer, self._buff_i)[0] self._buff_i += 2 - elif b == 0xd2: + elif b == 0xD2: self._reserve(4) - obj = struct.unpack_from(">i", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">i", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xd3: + elif b == 0xD3: self._reserve(8) - obj = struct.unpack_from(">q", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">q", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xd4: # fixext 1 + elif b == 0xD4: # fixext 1 typ = TYPE_EXT if self._max_ext_len < 1: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) self._reserve(2) - n, obj = struct.unpack_from("b1s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b1s", self._buffer, self._buff_i) self._buff_i += 2 - elif b == 0xd5: # fixext 2 + elif b == 0xD5: # fixext 2 typ = TYPE_EXT if self._max_ext_len < 2: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) self._reserve(3) - n, obj = struct.unpack_from("b2s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b2s", self._buffer, self._buff_i) self._buff_i += 3 - elif b == 0xd6: # fixext 4 + elif b == 0xD6: # fixext 4 typ = TYPE_EXT if self._max_ext_len < 4: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) self._reserve(5) - n, obj = struct.unpack_from("b4s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b4s", self._buffer, self._buff_i) self._buff_i += 5 - elif b == 0xd7: # fixext 8 + elif b == 0xD7: # fixext 8 typ = TYPE_EXT if self._max_ext_len < 8: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) self._reserve(9) - n, obj = struct.unpack_from("b8s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b8s", self._buffer, self._buff_i) self._buff_i += 9 - elif b == 0xd8: # fixext 16 + elif b == 0xD8: # fixext 16 typ = TYPE_EXT if self._max_ext_len < 16: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) self._reserve(17) - n, obj = struct.unpack_from("b16s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b16s", self._buffer, self._buff_i) self._buff_i += 17 - elif b == 0xd9: + elif b == 0xD9: typ = TYPE_RAW self._reserve(1) n = self._buffer[self._buff_i] self._buff_i += 1 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xda: + elif b == 0xDA: typ = TYPE_RAW self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xdb: + elif b == 0xDB: typ = TYPE_RAW self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xdc: + elif b == 0xDC: typ = TYPE_ARRAY self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xdd: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDD: typ = TYPE_ARRAY self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xde: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDE: self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) typ = TYPE_MAP - elif b == 0xdf: + elif b == 0xDF: self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) typ = TYPE_MAP else: - raise UnpackValueError("Unknown header: 0x%x" % b) + raise FormatError("Unknown header: 0x%x" % b) return typ, n, obj def _unpack(self, execute=EX_CONSTRUCT): @@ -567,11 +625,11 @@ class Unpacker(object): if execute == EX_READ_ARRAY_HEADER: if typ != TYPE_ARRAY: - raise UnpackValueError("Expected array") + raise ValueError("Expected array") return n if execute == EX_READ_MAP_HEADER: if typ != TYPE_MAP: - raise UnpackValueError("Expected map") + raise ValueError("Expected map") return n # TODO should we eliminate the recursion? if typ == TYPE_ARRAY: @@ -596,13 +654,19 @@ class Unpacker(object): return if self._object_pairs_hook is not None: ret = self._object_pairs_hook( - (self._unpack(EX_CONSTRUCT), - self._unpack(EX_CONSTRUCT)) - for _ in xrange(n)) + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) + for _ in xrange(n) + ) else: ret = {} for _ in xrange(n): key = self._unpack(EX_CONSTRUCT) + if self._strict_map_key and type(key) not in (unicode, bytes): + raise ValueError( + "%s is not allowed for map key" % str(type(key)) + ) + if not PY2 and type(key) is str: + key = sys.intern(key) ret[key] = self._unpack(EX_CONSTRUCT) if self._object_hook is not None: ret = self._object_hook(ret) @@ -610,17 +674,26 @@ class Unpacker(object): if execute == EX_SKIP: return if typ == TYPE_RAW: - if self._encoding is not None: - obj = obj.decode(self._encoding, self._unicode_errors) - elif self._raw: + if self._raw: obj = bytes(obj) else: - obj = obj.decode('utf_8') + obj = obj.decode("utf_8", self._unicode_errors) return obj - if typ == TYPE_EXT: - return self._ext_hook(n, bytes(obj)) if typ == TYPE_BIN: return bytes(obj) + if typ == TYPE_EXT: + if n == -1: # timestamp + ts = Timestamp.from_bytes(bytes(obj)) + if self._timestamp == 1: + return ts.to_unix() + elif self._timestamp == 2: + return ts.to_unix_nano() + elif self._timestamp == 3: + return ts.to_datetime() + else: + return ts + else: + return self._ext_hook(n, bytes(obj)) assert typ == TYPE_IMMEDIATE return obj @@ -635,37 +708,30 @@ class Unpacker(object): except OutOfData: self._consume() raise StopIteration + except RecursionError: + raise StackError next = __next__ - def skip(self, write_bytes=None): + def skip(self): self._unpack(EX_SKIP) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() - def unpack(self, write_bytes=None): - ret = self._unpack(EX_CONSTRUCT) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + def unpack(self): + try: + ret = self._unpack(EX_CONSTRUCT) + except RecursionError: + raise StackError self._consume() return ret - def read_array_header(self, write_bytes=None): + def read_array_header(self): ret = self._unpack(EX_READ_ARRAY_HEADER) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() return ret - def read_map_header(self, write_bytes=None): + def read_map_header(self): ret = self._unpack(EX_READ_MAP_HEADER) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() return ret @@ -677,7 +743,7 @@ class Packer(object): """ MessagePack Packer - usage: + Usage: packer = Packer() astream.write(packer.pack(a)) @@ -698,49 +764,58 @@ class Packer(object): :param bool use_bin_type: Use bin type introduced in msgpack spec 2.0 for bytes. - It also enables str8 type for unicode. + It also enables str8 type for unicode. (default: True) :param bool strict_types: If set to true, types will be checked to be exact. Derived classes - from serializeable types will not be serialized and will be + from serializable types will not be serialized and will be treated as unsupported type and forwarded to default. Additionally tuples will not be serialized as lists. This is useful when trying to implement accurate serialization for python types. - :param str encoding: - (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') + :param bool datetime: + If set to true, datetime with tzinfo is packed into Timestamp type. + Note that the tzinfo is stripped in the timestamp. + You can get UTC datetime with `timestamp=3` option of the Unpacker. + (Python 2 is not supported). :param str unicode_errors: - Error handler for encoding unicode. (default: 'strict') + The error handler for encoding unicode. (default: 'strict') + DO NOT USE THIS!! This option is kept for very specific usage. """ - def __init__(self, default=None, encoding=None, unicode_errors=None, - use_single_float=False, autoreset=True, use_bin_type=False, - strict_types=False): - if encoding is None: - encoding = 'utf_8' - else: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - PendingDeprecationWarning) - - if unicode_errors is None: - unicode_errors = 'strict' + def __init__( + self, + default=None, + use_single_float=False, + autoreset=True, + use_bin_type=True, + strict_types=False, + datetime=False, + unicode_errors=None, + ): self._strict_types = strict_types self._use_float = use_single_float self._autoreset = autoreset self._use_bin_type = use_bin_type - self._encoding = encoding - self._unicode_errors = unicode_errors self._buffer = StringIO() + if PY2 and datetime: + raise ValueError("datetime is not supported in Python 2") + self._datetime = bool(datetime) + self._unicode_errors = unicode_errors or "strict" if default is not None: if not callable(default): raise TypeError("default must be callable") self._default = default - def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, - check=isinstance, check_type_strict=_check_type_strict): + def _pack( + self, + obj, + nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, + check_type_strict=_check_type_strict, + ): default_used = False if self._strict_types: check = check_type_strict @@ -749,7 +824,7 @@ class Packer(object): list_types = (list, tuple) while True: if nest_limit < 0: - raise PackValueError("recursion limit exceeded") + raise ValueError("recursion limit exceeded") if obj is None: return self._buffer.write(b"\xc0") if check(obj, bool): @@ -761,76 +836,76 @@ class Packer(object): return self._buffer.write(struct.pack("B", obj)) if -0x20 <= obj < 0: return self._buffer.write(struct.pack("b", obj)) - if 0x80 <= obj <= 0xff: - return self._buffer.write(struct.pack("BB", 0xcc, obj)) + if 0x80 <= obj <= 0xFF: + return self._buffer.write(struct.pack("BB", 0xCC, obj)) if -0x80 <= obj < 0: - return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) - if 0xff < obj <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xcd, obj)) + return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) + if 0xFF < obj <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xCD, obj)) if -0x8000 <= obj < -0x80: - return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) - if 0xffff < obj <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xce, obj)) + return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) + if 0xFFFF < obj <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xCE, obj)) if -0x80000000 <= obj < -0x8000: - return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) - if 0xffffffff < obj <= 0xffffffffffffffff: - return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) + return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) + if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: + return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) if -0x8000000000000000 <= obj < -0x80000000: - return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) + return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) if not default_used and self._default is not None: obj = self._default(obj) default_used = True continue - raise PackOverflowError("Integer value out of range") + raise OverflowError("Integer value out of range") if check(obj, (bytes, bytearray)): n = len(obj) - if n >= 2**32: - raise PackValueError("%s is too large" % type(obj).__name__) + if n >= 2 ** 32: + raise ValueError("%s is too large" % type(obj).__name__) self._pack_bin_header(n) return self._buffer.write(obj) - if check(obj, Unicode): - if self._encoding is None: - raise TypeError( - "Can't encode unicode string: " - "no encoding is specified") - obj = obj.encode(self._encoding, self._unicode_errors) + if check(obj, unicode): + obj = obj.encode("utf-8", self._unicode_errors) n = len(obj) - if n >= 2**32: - raise PackValueError("String is too large") + if n >= 2 ** 32: + raise ValueError("String is too large") self._pack_raw_header(n) return self._buffer.write(obj) if check(obj, memoryview): n = len(obj) * obj.itemsize - if n >= 2**32: - raise PackValueError("Memoryview is too large") + if n >= 2 ** 32: + raise ValueError("Memoryview is too large") self._pack_bin_header(n) return self._buffer.write(obj) if check(obj, float): if self._use_float: - return self._buffer.write(struct.pack(">Bf", 0xca, obj)) - return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) - if check(obj, ExtType): - code = obj.code - data = obj.data + return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) + return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) + if check(obj, (ExtType, Timestamp)): + if check(obj, Timestamp): + code = -1 + data = obj.to_bytes() + else: + code = obj.code + data = obj.data assert isinstance(code, int) assert isinstance(data, bytes) L = len(data) if L == 1: - self._buffer.write(b'\xd4') + self._buffer.write(b"\xd4") elif L == 2: - self._buffer.write(b'\xd5') + self._buffer.write(b"\xd5") elif L == 4: - self._buffer.write(b'\xd6') + self._buffer.write(b"\xd6") elif L == 8: - self._buffer.write(b'\xd7') + self._buffer.write(b"\xd7") elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(struct.pack(">BB", 0xc7, L)) - elif L <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xc8, L)) + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xC7, L)) + elif L <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xC8, L)) else: - self._buffer.write(struct.pack(">BI", 0xc9, L)) + self._buffer.write(struct.pack(">BI", 0xC9, L)) self._buffer.write(struct.pack("b", code)) self._buffer.write(data) return @@ -841,13 +916,20 @@ class Packer(object): self._pack(obj[i], nest_limit - 1) return if check(obj, dict): - return self._pack_map_pairs(len(obj), dict_iteritems(obj), - nest_limit - 1) + return self._pack_map_pairs( + len(obj), dict_iteritems(obj), nest_limit - 1 + ) + + if self._datetime and check(obj, _DateTime): + obj = Timestamp.from_datetime(obj) + default_used = 1 + continue + if not default_used and self._default is not None: obj = self._default(obj) default_used = 1 continue - raise TypeError("Cannot serialize %r" % (obj, )) + raise TypeError("Cannot serialize %r" % (obj,)) def pack(self, obj): try: @@ -855,43 +937,35 @@ class Packer(object): except: self._buffer = StringIO() # force reset raise - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_map_pairs(self, pairs): self._pack_map_pairs(len(pairs), pairs) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_array_header(self, n): - if n >= 2**32: - raise PackValueError + if n >= 2 ** 32: + raise ValueError self._pack_array_header(n) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_map_header(self, n): - if n >= 2**32: - raise PackValueError + if n >= 2 ** 32: + raise ValueError self._pack_map_header(n) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_ext_type(self, typecode, data): if not isinstance(typecode, int): @@ -901,44 +975,44 @@ class Packer(object): if not isinstance(data, bytes): raise TypeError("data must have bytes type") L = len(data) - if L > 0xffffffff: - raise PackValueError("Too large data") + if L > 0xFFFFFFFF: + raise ValueError("Too large data") if L == 1: - self._buffer.write(b'\xd4') + self._buffer.write(b"\xd4") elif L == 2: - self._buffer.write(b'\xd5') + self._buffer.write(b"\xd5") elif L == 4: - self._buffer.write(b'\xd6') + self._buffer.write(b"\xd6") elif L == 8: - self._buffer.write(b'\xd7') + self._buffer.write(b"\xd7") elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(b'\xc7' + struct.pack('B', L)) - elif L <= 0xffff: - self._buffer.write(b'\xc8' + struct.pack('>H', L)) + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(b"\xc7" + struct.pack("B", L)) + elif L <= 0xFFFF: + self._buffer.write(b"\xc8" + struct.pack(">H", L)) else: - self._buffer.write(b'\xc9' + struct.pack('>I', L)) - self._buffer.write(struct.pack('B', typecode)) + self._buffer.write(b"\xc9" + struct.pack(">I", L)) + self._buffer.write(struct.pack("B", typecode)) self._buffer.write(data) def _pack_array_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x90 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xdc, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdd, n)) - raise PackValueError("Array is too large") + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x90 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDC, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDD, n)) + raise ValueError("Array is too large") def _pack_map_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x80 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xde, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdf, n)) - raise PackValueError("Dict is too large") + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x80 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDE, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDF, n)) + raise ValueError("Dict is too large") def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): self._pack_map_header(n) @@ -947,31 +1021,43 @@ class Packer(object): self._pack(v, nest_limit - 1) def _pack_raw_header(self, n): - if n <= 0x1f: - self._buffer.write(struct.pack('B', 0xa0 + n)) - elif self._use_bin_type and n <= 0xff: - self._buffer.write(struct.pack('>BB', 0xd9, n)) - elif n <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xda, n)) - elif n <= 0xffffffff: - self._buffer.write(struct.pack(">BI", 0xdb, n)) + if n <= 0x1F: + self._buffer.write(struct.pack("B", 0xA0 + n)) + elif self._use_bin_type and n <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xD9, n)) + elif n <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xDA, n)) + elif n <= 0xFFFFFFFF: + self._buffer.write(struct.pack(">BI", 0xDB, n)) else: - raise PackValueError('Raw is too large') + raise ValueError("Raw is too large") def _pack_bin_header(self, n): if not self._use_bin_type: return self._pack_raw_header(n) - elif n <= 0xff: - return self._buffer.write(struct.pack('>BB', 0xc4, n)) - elif n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xc5, n)) - elif n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xc6, n)) + elif n <= 0xFF: + return self._buffer.write(struct.pack(">BB", 0xC4, n)) + elif n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xC5, n)) + elif n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xC6, n)) else: - raise PackValueError('Bin is too large') + raise ValueError("Bin is too large") def bytes(self): + """Return internal buffer contents as bytes object""" return self._buffer.getvalue() def reset(self): + """Reset internal buffer. + + This method is useful only when autoreset=False. + """ self._buffer = StringIO() + + def getbuffer(self): + """Return view of internal buffer.""" + if USING_STRINGBUILDER or PY2: + return memoryview(self.bytes()) + else: + return self._buffer.getbuffer() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/__about__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/__about__.py index 7481c9e2..4d998578 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/__about__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/__about__.py @@ -18,10 +18,10 @@ __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "19.0" +__version__ = "20.4" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" -__license__ = "BSD or Apache License, Version 2.0" +__license__ = "BSD-2-Clause or Apache-2.0" __copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_compat.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_compat.py index 25da473c..e54bd4ed 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_compat.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_compat.py @@ -5,6 +5,11 @@ from __future__ import absolute_import, division, print_function import sys +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 @@ -18,14 +23,16 @@ else: def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any """ Create a base class with a metaclass. """ # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): + class metaclass(meta): # type: ignore def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any return meta(name, bases, d) return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_structures.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_structures.py index 68dcca63..800d5c55 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_structures.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_structures.py @@ -4,65 +4,83 @@ from __future__ import absolute_import, division, print_function -class Infinity(object): +class InfinityType(object): def __repr__(self): + # type: () -> str return "Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return False def __le__(self, other): + # type: (object) -> bool return False def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return True def __ge__(self, other): + # type: (object) -> bool return True def __neg__(self): + # type: (object) -> NegativeInfinityType return NegativeInfinity -Infinity = Infinity() +Infinity = InfinityType() -class NegativeInfinity(object): +class NegativeInfinityType(object): def __repr__(self): + # type: () -> str return "-Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return True def __le__(self, other): + # type: (object) -> bool return True def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return False def __ge__(self, other): + # type: (object) -> bool return False def __neg__(self): + # type: (object) -> InfinityType return Infinity -NegativeInfinity = NegativeInfinity() +NegativeInfinity = NegativeInfinityType() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_typing.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_typing.py new file mode 100644 index 00000000..2846133b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/_typing.py @@ -0,0 +1,48 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from pip._vendor.packaging._typing import TYPE_CHECKING + + if TYPE_CHECKING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +__all__ = ["TYPE_CHECKING", "cast"] + +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast +else: + # executed at runtime + def cast(type_, value): # noqa + return value diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/markers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/markers.py index 54824768..ed642b01 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/markers.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/markers.py @@ -13,8 +13,14 @@ from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString from pip._vendor.pyparsing import Literal as L # noqa from ._compat import string_types +from ._typing import TYPE_CHECKING from .specifiers import Specifier, InvalidSpecifier +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + __all__ = [ "InvalidMarker", @@ -46,30 +52,37 @@ class UndefinedEnvironmentName(ValueError): class Node(object): def __init__(self, value): + # type: (Any) -> None self.value = value def __str__(self): + # type: () -> str return str(self.value) def __repr__(self): + # type: () -> str return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) def serialize(self): + # type: () -> str raise NotImplementedError class Variable(Node): def serialize(self): + # type: () -> str return str(self) class Value(Node): def serialize(self): + # type: () -> str return '"{0}"'.format(self) class Op(Node): def serialize(self): + # type: () -> str return str(self) @@ -85,13 +98,13 @@ VARIABLE = ( | L("python_version") | L("sys_platform") | L("os_name") - | L("os.name") + | L("os.name") # PEP-345 | L("sys.platform") # PEP-345 | L("platform.version") # PEP-345 | L("platform.machine") # PEP-345 | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # PEP-345 - | L("extra") # undocumented setuptools legacy + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 ) ALIASES = { "os.name": "os_name", @@ -131,6 +144,7 @@ MARKER = stringStart + MARKER_EXPR + stringEnd def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] if isinstance(results, ParseResults): return [_coerce_parse_result(i) for i in results] else: @@ -138,6 +152,8 @@ def _coerce_parse_result(results): def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + assert isinstance(marker, (list, tuple, string_types)) # Sometimes we have a structure like [[...]] which is a single item list @@ -172,10 +188,11 @@ _operators = { "!=": operator.ne, ">=": operator.ge, ">": operator.gt, -} +} # type: Dict[str, Operator] def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool try: spec = Specifier("".join([op.serialize(), rhs])) except InvalidSpecifier: @@ -183,7 +200,7 @@ def _eval_op(lhs, op, rhs): else: return spec.contains(lhs) - oper = _operators.get(op.serialize()) + oper = _operators.get(op.serialize()) # type: Optional[Operator] if oper is None: raise UndefinedComparison( "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) @@ -192,13 +209,18 @@ def _eval_op(lhs, op, rhs): return oper(lhs, rhs) -_undefined = object() +class Undefined(object): + pass + + +_undefined = Undefined() def _get_env(environment, name): - value = environment.get(name, _undefined) + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] - if value is _undefined: + if isinstance(value, Undefined): raise UndefinedEnvironmentName( "{0!r} does not exist in evaluation environment.".format(name) ) @@ -207,7 +229,8 @@ def _get_env(environment, name): def _evaluate_markers(markers, environment): - groups = [[]] + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] for marker in markers: assert isinstance(marker, (list, tuple, string_types)) @@ -234,6 +257,7 @@ def _evaluate_markers(markers, environment): def format_full_version(info): + # type: (sys._version_info) -> str version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -242,9 +266,13 @@ def format_full_version(info): def default_environment(): + # type: () -> Dict[str, str] if hasattr(sys, "implementation"): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore else: iver = "0" implementation_name = "" @@ -259,13 +287,14 @@ def default_environment(): "platform_version": platform.version(), "python_full_version": platform.python_version(), "platform_python_implementation": platform.python_implementation(), - "python_version": platform.python_version()[:3], + "python_version": ".".join(platform.python_version_tuple()[:2]), "sys_platform": sys.platform, } class Marker(object): def __init__(self, marker): + # type: (str) -> None try: self._markers = _coerce_parse_result(MARKER.parseString(marker)) except ParseException as e: @@ -275,12 +304,15 @@ class Marker(object): raise InvalidMarker(err_str) def __str__(self): + # type: () -> str return _format_marker(self._markers) def __repr__(self): + # type: () -> str return "<Marker({0!r})>".format(str(self)) def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool """Evaluate a marker. Return the boolean from evaluating the given marker against the diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/requirements.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/requirements.py index dbc5f11d..5e64101c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/requirements.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/requirements.py @@ -11,9 +11,13 @@ from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from pip._vendor.pyparsing import Literal as L # noqa from pip._vendor.six.moves.urllib import parse as urlparse +from ._typing import TYPE_CHECKING from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet +if TYPE_CHECKING: # pragma: no cover + from typing import List + class InvalidRequirement(ValueError): """ @@ -89,6 +93,7 @@ class Requirement(object): # TODO: Can we normalize the name and extra name? def __init__(self, requirement_string): + # type: (str) -> None try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: @@ -116,7 +121,8 @@ class Requirement(object): self.marker = req.marker if req.marker else None def __str__(self): - parts = [self.name] + # type: () -> str + parts = [self.name] # type: List[str] if self.extras: parts.append("[{0}]".format(",".join(sorted(self.extras)))) @@ -135,4 +141,5 @@ class Requirement(object): return "".join(parts) def __repr__(self): + # type: () -> str return "<Requirement({0!r})>".format(str(self)) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/specifiers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/specifiers.py index 743576a0..fe09bb1d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/specifiers.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/specifiers.py @@ -9,8 +9,27 @@ import itertools import re from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version from .version import Version, LegacyVersion, parse +if TYPE_CHECKING: # pragma: no cover + from typing import ( + List, + Dict, + Union, + Iterable, + Iterator, + Optional, + Callable, + Tuple, + FrozenSet, + ) + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + class InvalidSpecifier(ValueError): """ @@ -18,9 +37,10 @@ class InvalidSpecifier(ValueError): """ -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore @abc.abstractmethod def __str__(self): + # type: () -> str """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. @@ -28,12 +48,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __hash__(self): + # type: () -> int """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are equal. @@ -41,6 +63,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __ne__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are not equal. @@ -48,6 +71,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractproperty def prereleases(self): + # type: () -> Optional[bool] """ Returns whether or not pre-releases as a whole are allowed by this specifier. @@ -55,6 +79,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None """ Sets whether or not pre-releases as a whole are allowed by this specifier. @@ -62,12 +87,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. @@ -76,19 +103,24 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): class _IndividualSpecifier(BaseSpecifier): - _operators = {} + _operators = {} # type: Dict[str, str] def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - self._spec = (match.group("operator").strip(), match.group("version").strip()) + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -98,26 +130,35 @@ class _IndividualSpecifier(BaseSpecifier): return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) def __str__(self): + # type: () -> str return "{0}{1}".format(*self._spec) + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + def __hash__(self): - return hash(self._spec) + # type: () -> int + return hash(self._canonical_spec) def __eq__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented - return self._spec == other._spec + return self._canonical_spec == other._canonical_spec def __ne__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): @@ -126,52 +167,67 @@ class _IndividualSpecifier(BaseSpecifier): return self._spec != other._spec def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def operator(self): + # type: () -> str return self._spec[0] @property def version(self): + # type: () -> str return self._spec[1] @property def prereleases(self): + # type: () -> Optional[bool] return self._prereleases @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (str) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) + normalized_item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: + if normalized_item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + yielded = False found_prereleases = [] @@ -230,32 +286,43 @@ class LegacySpecifier(_IndividualSpecifier): } def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective > self._coerce_version(spec) -def _require_version_compare(fn): +def _require_version_compare( + fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] @functools.wraps(fn) def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool if not isinstance(prospective, Version): return False return fn(self, prospective, spec) @@ -373,6 +440,8 @@ class Specifier(_IndividualSpecifier): @_require_version_compare def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of @@ -400,56 +469,75 @@ class Specifier(_IndividualSpecifier): @_require_version_compare def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* + split_spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. - prospective = _version_split(str(prospective)) + split_prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - prospective = prospective[: len(spec)] + shortened_prospective = split_prospective[: len(split_spec)] # Pad out our two sides with zeros so that they both equal the same # length. - spec, prospective = _pad_version(spec, prospective) + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec else: # Convert our spec string into a Version - spec = Version(spec) + spec_version = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. - if not spec.local: + if not spec_version.local: prospective = Version(prospective.public) - return prospective == spec + return prospective == spec_version @_require_version_compare def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) @_require_version_compare - def _compare_less_than(self, prospective, spec): + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now @@ -471,10 +559,12 @@ class Specifier(_IndividualSpecifier): return True @_require_version_compare - def _compare_greater_than(self, prospective, spec): + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now @@ -502,10 +592,13 @@ class Specifier(_IndividualSpecifier): return True def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool return str(prospective).lower() == str(spec).lower() @property def prereleases(self): + # type: () -> bool + # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: @@ -530,6 +623,7 @@ class Specifier(_IndividualSpecifier): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value @@ -537,7 +631,8 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") def _version_split(version): - result = [] + # type: (str) -> List[str] + result = [] # type: List[str] for item in version.split("."): match = _prefix_regex.search(item) if match: @@ -548,6 +643,7 @@ def _version_split(version): def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] left_split, right_split = [], [] # Get the release segment of our versions @@ -567,14 +663,16 @@ def _pad_version(left, right): class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() - for specifier in specifiers: + for specifier in split_specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: @@ -588,6 +686,7 @@ class SpecifierSet(BaseSpecifier): self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -597,12 +696,15 @@ class SpecifierSet(BaseSpecifier): return "<SpecifierSet({0!r}{1})>".format(str(self), pre) def __str__(self): + # type: () -> str return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): + # type: () -> int return hash(self._specs) def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): @@ -626,9 +728,8 @@ class SpecifierSet(BaseSpecifier): return specifier def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -636,9 +737,8 @@ class SpecifierSet(BaseSpecifier): return self._specs == other._specs def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -646,13 +746,17 @@ class SpecifierSet(BaseSpecifier): return self._specs != other._specs def __len__(self): + # type: () -> int return len(self._specs) def __iter__(self): + # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] return iter(self._specs) @property def prereleases(self): + # type: () -> Optional[bool] + # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: @@ -670,12 +774,16 @@ class SpecifierSet(BaseSpecifier): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) @@ -701,7 +809,13 @@ class SpecifierSet(BaseSpecifier): # will always return True, this is an explicit design decision. return all(s.contains(item, prereleases=prereleases) for s in self._specs) - def filter(self, iterable, prereleases=None): + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. @@ -719,8 +833,8 @@ class SpecifierSet(BaseSpecifier): # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: - filtered = [] - found_prereleases = [] + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] for item in iterable: # Ensure that we some kind of Version class for this item. diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/tags.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/tags.py new file mode 100644 index 00000000..9064910b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/tags.py @@ -0,0 +1,751 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import logging +import os +import platform +import re +import struct +import sys +import sysconfig +import warnings + +from ._typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + Dict, + FrozenSet, + IO, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} # type: Dict[str, str] + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + + @property + def interpreter(self): + # type: () -> str + return self._interpreter + + @property + def abi(self): + # type: () -> str + return self._abi + + @property + def platform(self): + # type: () -> str + return self._platform + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + # type: () -> int + return hash((self._interpreter, self._abi, self._platform)) + + def __str__(self): + # type: () -> str + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + # type: () -> str + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string): + # type: (str) -> str + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp<python_version>-<abi>-<platform> + - cp<python_version>-abi3-<platform> + - cp<python_version>-none-<platform> + - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + interpreter = "cp{}".format(_version_nodot(python_version[:2])) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + # type: () -> Iterator[str] + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - <interpreter>-<abi>-<platform> + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=_version_nodot(py_version[:2])) + yield "py{major}".format(major=py_version[0]) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + + +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none-<platform> + - <interpreter>-none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + formats.append("universal") + return formats + + +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + for minor_version in range(version[1], -1, -1): + compat_version = version[0], minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +# From PEP 513. +def _is_manylinux_compatible(name, glibc_version): + # type: (str, GlibcVersion) -> bool + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + + return bool(getattr(_manylinux, name + "_compatible")) + except (ImportError, AttributeError): + # Fall through to heuristic check below. + pass + + return _have_compatible_glibc(*glibc_version) + + +def _glibc_version_string(): + # type: () -> Optional[str] + # Returns glibc version string, or None if not using glibc. + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() # type: str + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing. +def _check_glibc_version(version_str, required_major, minimum_minor): + # type: (str, int, int) -> bool + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return False + return ( + int(m.group("major")) == required_major + and int(m.group("minor")) >= minimum_minor + ) + + +def _have_compatible_glibc(required_major, minimum_minor): + # type: (int, int) -> bool + version_str = _glibc_version_string() + if version_str is None: + return False + return _check_glibc_version(version_str, required_major, minimum_minor) + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + (result,) = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H" + format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I" + format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return True + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: + manylinux_support.append( + ("manylinux2014", (2, 17)) + ) # CentOS 7 w/ glibc 2.17 (PEP 599) + if arch in {"x86_64", "i686"}: + manylinux_support.append( + ("manylinux2010", (2, 12)) + ) # CentOS 6 w/ glibc 2.12 (PEP 571) + manylinux_support.append( + ("manylinux1", (2, 5)) + ) # CentOS 5 w/ glibc 2.5 (PEP 513) + manylinux_support_iter = iter(manylinux_support) + for name, glibc_version in manylinux_support_iter: + if _is_manylinux_compatible(name, glibc_version): + yield linux.replace("linux", name) + break + # Support for a later manylinux implies support for an earlier version. + for name, _ in manylinux_support_iter: + yield linux.replace("linux", name) + yield linux + + +def _generic_platforms(): + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version): + # type: (PythonVersion) -> str + if any(v >= 10 for v in version): + sep = "_" + else: + sep = "" + return sep.join(map(str, version)) + + +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/utils.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/utils.py index 88418786..19579c1a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/utils.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/utils.py @@ -5,28 +5,36 @@ from __future__ import absolute_import, division, print_function import re +from ._typing import TYPE_CHECKING, cast from .version import InvalidVersion, Version +if TYPE_CHECKING: # pragma: no cover + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) _canonicalize_regex = re.compile(r"[-_.]+") def canonicalize_name(name): + # type: (str) -> NormalizedName # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) -def canonicalize_version(version): +def canonicalize_version(_version): + # type: (str) -> Union[Version, str] """ - This is very similar to Version.__str__, but has one subtle differences + This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. """ try: - version = Version(version) + version = Version(_version) except InvalidVersion: # Legacy versions cannot be normalized - return version + return _version parts = [] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/version.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/version.py index 95157a1f..00371e86 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/version.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/packaging/version.py @@ -7,8 +7,35 @@ import collections import itertools import re -from ._structures import Infinity - +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] @@ -19,6 +46,7 @@ _Version = collections.namedtuple( def parse(version): + # type: (str) -> Union[LegacyVersion, Version] """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is @@ -37,28 +65,38 @@ class InvalidVersion(ValueError): class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + def __hash__(self): + # type: () -> int return hash(self._key) def __lt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s < o) def __le__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s <= o) def __eq__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s == o) def __ge__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s >= o) def __gt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s > o) def __ne__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s != o) def _compare(self, other, method): + # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] if not isinstance(other, _BaseVersion): return NotImplemented @@ -67,57 +105,71 @@ class _BaseVersion(object): class LegacyVersion(_BaseVersion): def __init__(self, version): + # type: (str) -> None self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): + # type: () -> str return self._version def __repr__(self): + # type: () -> str return "<LegacyVersion({0})>".format(repr(str(self))) @property def public(self): + # type: () -> str return self._version @property def base_version(self): + # type: () -> str return self._version @property def epoch(self): + # type: () -> int return -1 @property def release(self): + # type: () -> None return None @property def pre(self): + # type: () -> None return None @property def post(self): + # type: () -> None return None @property def dev(self): + # type: () -> None return None @property def local(self): + # type: () -> None return None @property def is_prerelease(self): + # type: () -> bool return False @property def is_postrelease(self): + # type: () -> bool return False @property def is_devrelease(self): + # type: () -> bool return False @@ -133,6 +185,7 @@ _legacy_version_replacement_map = { def _parse_version_parts(s): + # type: (str) -> Iterator[str] for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) @@ -150,6 +203,8 @@ def _parse_version_parts(s): def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, @@ -158,7 +213,7 @@ def _legacy_cmpkey(version): # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. - parts = [] + parts = [] # type: List[str] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag @@ -171,9 +226,8 @@ def _legacy_cmpkey(version): parts.pop() parts.append(part) - parts = tuple(parts) - return epoch, parts + return epoch, tuple(parts) # Deliberately not anchored to the start and end of the string, to make it @@ -215,6 +269,8 @@ class Version(_BaseVersion): _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version): + # type: (str) -> None + # Validate the version and parse it into pieces match = self._regex.search(version) if not match: @@ -243,9 +299,11 @@ class Version(_BaseVersion): ) def __repr__(self): + # type: () -> str return "<Version({0})>".format(repr(str(self))) def __str__(self): + # type: () -> str parts = [] # Epoch @@ -275,26 +333,35 @@ class Version(_BaseVersion): @property def epoch(self): - return self._version.epoch + # type: () -> int + _epoch = self._version.epoch # type: int + return _epoch @property def release(self): - return self._version.release + # type: () -> Tuple[int, ...] + _release = self._version.release # type: Tuple[int, ...] + return _release @property def pre(self): - return self._version.pre + # type: () -> Optional[Tuple[str, int]] + _pre = self._version.pre # type: Optional[Tuple[str, int]] + return _pre @property def post(self): + # type: () -> Optional[Tuple[str, int]] return self._version.post[1] if self._version.post else None @property def dev(self): + # type: () -> Optional[Tuple[str, int]] return self._version.dev[1] if self._version.dev else None @property def local(self): + # type: () -> Optional[str] if self._version.local: return ".".join(str(x) for x in self._version.local) else: @@ -302,10 +369,12 @@ class Version(_BaseVersion): @property def public(self): + # type: () -> str return str(self).split("+", 1)[0] @property def base_version(self): + # type: () -> str parts = [] # Epoch @@ -319,18 +388,41 @@ class Version(_BaseVersion): @property def is_prerelease(self): + # type: () -> bool return self.dev is not None or self.pre is not None @property def is_postrelease(self): + # type: () -> bool return self.post is not None @property def is_devrelease(self): + # type: () -> bool return self.dev is not None + @property + def major(self): + # type: () -> int + return self.release[0] if len(self.release) >= 1 else 0 + + @property + def minor(self): + # type: () -> int + return self.release[1] if len(self.release) >= 2 else 0 + + @property + def micro(self): + # type: () -> int + return self.release[2] if len(self.release) >= 3 else 0 + + +def _parse_letter_version( + letter, # type: str + number, # type: Union[str, bytes, SupportsInt] +): + # type: (...) -> Optional[Tuple[str, int]] -def _parse_letter_version(letter, number): if letter: # We consider there to be an implicit 0 in a pre-release if there is # not a numeral associated with it. @@ -360,11 +452,14 @@ def _parse_letter_version(letter, number): return letter, int(number) + return None + _local_version_separators = re.compile(r"[\._-]") def _parse_local_version(local): + # type: (str) -> Optional[LocalType] """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -373,15 +468,25 @@ def _parse_local_version(local): part.lower() if not part.isdigit() else int(part) for part in _local_version_separators.split(local) ) + return None + +def _cmpkey( + epoch, # type: int + release, # type: Tuple[int, ...] + pre, # type: Optional[Tuple[str, int]] + post, # type: Optional[Tuple[str, int]] + dev, # type: Optional[Tuple[str, int]] + local, # type: Optional[Tuple[SubLocalType]] +): + # type: (...) -> CmpKey -def _cmpkey(epoch, release, pre, post, dev, local): # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now # leading zeros until we come to something non zero, then take the rest # re-reverse it back into the correct order and make it a tuple and use # that for our sorting key. - release = tuple( + _release = tuple( reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) ) @@ -390,23 +495,31 @@ def _cmpkey(epoch, release, pre, post, dev, local): # if there is not a pre or a post segment. If we have one of those then # the normal sorting rules will handle this case correctly. if pre is None and post is None and dev is not None: - pre = -Infinity + _pre = NegativeInfinity # type: PrePostDevType # Versions without a pre-release (except as noted above) should sort after # those with one. elif pre is None: - pre = Infinity + _pre = Infinity + else: + _pre = pre # Versions without a post segment should sort before those with one. if post is None: - post = -Infinity + _post = NegativeInfinity # type: PrePostDevType + + else: + _post = post # Versions without a development segment should sort after those with one. if dev is None: - dev = Infinity + _dev = Infinity # type: PrePostDevType + + else: + _dev = dev if local is None: # Versions without a local segment should sort before those with one. - local = -Infinity + _local = NegativeInfinity # type: LocalType else: # Versions with a local segment need that segment parsed to implement # the sorting rules in PEP440. @@ -415,6 +528,8 @@ def _cmpkey(epoch, release, pre, post, dev, local): # - Numeric segments sort numerically # - Shorter versions sort before longer versions when the prefixes # match exactly - local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) + _local = tuple( + (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local + ) - return epoch, release, pre, post, dev, local + return epoch, _release, _pre, _post, _dev, _local diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/__init__.py index 9c1a098f..7355b68a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/__init__.py @@ -1,4 +1,4 @@ """Wrappers to build Python packages using PEP 517 hooks """ -__version__ = '0.5.0' +__version__ = '0.8.2' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/_in_process.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/_in_process.py index d6524b66..a536b03e 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/_in_process.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/_in_process.py @@ -2,7 +2,9 @@ It expects: - Command line args: hook_name, control_dir -- Environment variable: PEP517_BUILD_BACKEND=entry.point:spec +- Environment variables: + PEP517_BUILD_BACKEND=entry.point:spec + PEP517_BACKEND_PATH=paths (separated with os.pathsep) - control_dir/input.json: - {"kwargs": {...}} @@ -12,28 +14,86 @@ Results: """ from glob import glob from importlib import import_module +import json import os +import os.path from os.path import join as pjoin import re import shutil import sys +import traceback -# This is run as a script, not a module, so it can't do a relative import -import compat +# This file is run as a script, and `import compat` is not zip-safe, so we +# include write_json() and read_json() from compat.py. +# +# Handle reading and writing JSON in UTF-8, on Python 3 and 2. + +if sys.version_info[0] >= 3: + # Python 3 + def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + def read_json(path): + with open(path, 'r', encoding='utf-8') as f: + return json.load(f) + +else: + # Python 2 + def write_json(obj, path, **kwargs): + with open(path, 'wb') as f: + json.dump(obj, f, encoding='utf-8', **kwargs) + + def read_json(path): + with open(path, 'rb') as f: + return json.load(f) class BackendUnavailable(Exception): """Raised if we cannot import the backend""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Raised if the backend is invalid""" + def __init__(self, message): + self.message = message + + +class HookMissing(Exception): + """Raised if a hook is missing and we are not executing the fallback""" + + +def contained_in(filename, directory): + """Test if a file is located within the given directory.""" + filename = os.path.normcase(os.path.abspath(filename)) + directory = os.path.normcase(os.path.abspath(directory)) + return os.path.commonprefix([filename, directory]) == directory def _build_backend(): """Find and load the build backend""" + # Add in-tree backend directories to the front of sys.path. + backend_path = os.environ.get('PEP517_BACKEND_PATH') + if backend_path: + extra_pathitems = backend_path.split(os.pathsep) + sys.path[:0] = extra_pathitems + ep = os.environ['PEP517_BUILD_BACKEND'] mod_path, _, obj_path = ep.partition(':') try: obj = import_module(mod_path) except ImportError: - raise BackendUnavailable + raise BackendUnavailable(traceback.format_exc()) + + if backend_path: + if not any( + contained_in(obj.__file__, path) + for path in extra_pathitems + ): + raise BackendInvalid("Backend was not loaded from backend-path") + if obj_path: for path_part in obj_path.split('.'): obj = getattr(obj, path_part) @@ -54,15 +114,19 @@ def get_requires_for_build_wheel(config_settings): return hook(config_settings) -def prepare_metadata_for_build_wheel(metadata_directory, config_settings): +def prepare_metadata_for_build_wheel( + metadata_directory, config_settings, _allow_fallback): """Invoke optional prepare_metadata_for_build_wheel - Implements a fallback by building a wheel if the hook isn't defined. + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. """ backend = _build_backend() try: hook = backend.prepare_metadata_for_build_wheel except AttributeError: + if not _allow_fallback: + raise HookMissing() return _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings) else: @@ -161,6 +225,8 @@ class _DummyException(Exception): class GotUnsupportedOperation(Exception): """For internal use when backend raises UnsupportedOperation""" + def __init__(self, traceback): + self.traceback = traceback def build_sdist(sdist_directory, config_settings): @@ -169,7 +235,7 @@ def build_sdist(sdist_directory, config_settings): try: return backend.build_sdist(sdist_directory, config_settings) except getattr(backend, 'UnsupportedOperation', _DummyException): - raise GotUnsupportedOperation + raise GotUnsupportedOperation(traceback.format_exc()) HOOK_NAMES = { @@ -190,17 +256,24 @@ def main(): sys.exit("Unknown hook: %s" % hook_name) hook = globals()[hook_name] - hook_input = compat.read_json(pjoin(control_dir, 'input.json')) + hook_input = read_json(pjoin(control_dir, 'input.json')) json_out = {'unsupported': False, 'return_val': None} try: json_out['return_val'] = hook(**hook_input['kwargs']) - except BackendUnavailable: + except BackendUnavailable as e: json_out['no_backend'] = True - except GotUnsupportedOperation: + json_out['traceback'] = e.traceback + except BackendInvalid as e: + json_out['backend_invalid'] = True + json_out['backend_error'] = e.message + except GotUnsupportedOperation as e: json_out['unsupported'] = True + json_out['traceback'] = e.traceback + except HookMissing: + json_out['hook_missing'] = True - compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) + write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) if __name__ == '__main__': diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/build.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/build.py index ac6c9495..26430144 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/build.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/build.py @@ -3,25 +3,56 @@ import argparse import logging import os -import contextlib -from pip._vendor import pytoml +from pip._vendor import toml import shutil -import errno -import tempfile from .envbuild import BuildEnvironment from .wrappers import Pep517HookCaller +from .dirtools import tempdir, mkdir_p +from .compat import FileNotFoundError log = logging.getLogger(__name__) -@contextlib.contextmanager -def tempdir(): - td = tempfile.mkdtemp() +def validate_system(system): + """ + Ensure build system has the requisite fields. + """ + required = {'requires', 'build-backend'} + if not (required <= set(system)): + message = "Missing required fields: {missing}".format( + missing=required-set(system), + ) + raise ValueError(message) + + +def load_system(source_dir): + """ + Load the build system from a source dir (pyproject.toml). + """ + pyproject = os.path.join(source_dir, 'pyproject.toml') + with open(pyproject) as f: + pyproject_data = toml.load(f) + return pyproject_data['build-system'] + + +def compat_system(source_dir): + """ + Given a source dir, attempt to get a build system backend + and requirements from pyproject.toml. Fallback to + setuptools but only if the file was not found or a build + system was not indicated. + """ try: - yield td - finally: - shutil.rmtree(td) + system = load_system(source_dir) + except (FileNotFoundError, KeyError): + system = {} + system.setdefault( + 'build-backend', + 'setuptools.build_meta:__legacy__', + ) + system.setdefault('requires', ['setuptools', 'wheel']) + return system def _do_build(hooks, env, dist, dest): @@ -42,33 +73,18 @@ def _do_build(hooks, env, dist, dest): shutil.move(source, os.path.join(dest, os.path.basename(filename))) -def mkdir_p(*args, **kwargs): - """Like `mkdir`, but does not raise an exception if the - directory already exists. - """ - try: - return os.mkdir(*args, **kwargs) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - - -def build(source_dir, dist, dest=None): - pyproject = os.path.join(source_dir, 'pyproject.toml') +def build(source_dir, dist, dest=None, system=None): + system = system or load_system(source_dir) dest = os.path.join(source_dir, dest or 'dist') mkdir_p(dest) - with open(pyproject) as f: - pyproject_data = pytoml.load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - - hooks = Pep517HookCaller(source_dir, backend) + validate_system(system) + hooks = Pep517HookCaller( + source_dir, system['build-backend'], system.get('backend-path') + ) with BuildEnvironment() as env: - env.pip_install(requires) + env.pip_install(system['requires']) _do_build(hooks, env, dist, dest) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/check.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/check.py index f4cdc6be..13e722a3 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/check.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/check.py @@ -4,7 +4,7 @@ import argparse import logging import os from os.path import isfile, join as pjoin -from pip._vendor.pytoml import TomlError, load as toml_load +from pip._vendor.toml import TomlDecodeError, load as toml_load import shutil from subprocess import CalledProcessError import sys @@ -147,12 +147,13 @@ def check(source_dir): buildsys = pyproject_data['build-system'] requires = buildsys['requires'] backend = buildsys['build-backend'] + backend_path = buildsys.get('backend-path') log.info('Loaded pyproject.toml') - except (TomlError, KeyError): + except (TomlDecodeError, KeyError): log.error("Invalid pyproject.toml", exc_info=True) return False - hooks = Pep517HookCaller(source_dir, backend) + hooks = Pep517HookCaller(source_dir, backend, backend_path) sdist_ok = check_build_sdist(hooks, requires) wheel_ok = check_build_wheel(hooks, requires) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/compat.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/compat.py index 01c66fc7..8432acb7 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/compat.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/compat.py @@ -1,7 +1,10 @@ -"""Handle reading and writing JSON in UTF-8, on Python 3 and 2.""" +"""Python 2/3 compatibility""" import json import sys + +# Handle reading and writing JSON in UTF-8, on Python 3 and 2. + if sys.version_info[0] >= 3: # Python 3 def write_json(obj, path, **kwargs): @@ -21,3 +24,11 @@ else: def read_json(path): with open(path, 'rb') as f: return json.load(f) + + +# FileNotFoundError + +try: + FileNotFoundError = FileNotFoundError +except NameError: + FileNotFoundError = IOError diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/dirtools.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/dirtools.py new file mode 100644 index 00000000..58c6ca0c --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/dirtools.py @@ -0,0 +1,44 @@ +import os +import io +import contextlib +import tempfile +import shutil +import errno +import zipfile + + +@contextlib.contextmanager +def tempdir(): + """Create a temporary directory in a context manager.""" + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +def mkdir_p(*args, **kwargs): + """Like `mkdir`, but does not raise an exception if the + directory already exists. + """ + try: + return os.mkdir(*args, **kwargs) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + +def dir_to_zipfile(root): + """Construct an in-memory zip file for a directory.""" + buffer = io.BytesIO() + zip_file = zipfile.ZipFile(buffer, 'w') + for root, dirs, files in os.walk(root): + for path in dirs: + fs_path = os.path.join(root, path) + rel_path = os.path.relpath(fs_path, root) + zip_file.writestr(rel_path + '/', '') + for path in files: + fs_path = os.path.join(root, path) + rel_path = os.path.relpath(fs_path, root) + zip_file.write(fs_path, rel_path) + return zip_file diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/envbuild.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/envbuild.py index f7ac5f46..4088dcdb 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/envbuild.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/envbuild.py @@ -3,23 +3,27 @@ import os import logging -from pip._vendor import pytoml +from pip._vendor import toml import shutil from subprocess import check_call import sys from sysconfig import get_paths from tempfile import mkdtemp -from .wrappers import Pep517HookCaller +from .wrappers import Pep517HookCaller, LoggerWrapper log = logging.getLogger(__name__) def _load_pyproject(source_dir): with open(os.path.join(source_dir, 'pyproject.toml')) as f: - pyproject_data = pytoml.load(f) + pyproject_data = toml.load(f) buildsys = pyproject_data['build-system'] - return buildsys['requires'], buildsys['build-backend'] + return ( + buildsys['requires'], + buildsys['build-backend'], + buildsys.get('backend-path'), + ) class BuildEnvironment(object): @@ -90,9 +94,14 @@ class BuildEnvironment(object): if not reqs: return log.info('Calling pip to install %s', reqs) - check_call([ + cmd = [ sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', self.path] + list(reqs)) + '--prefix', self.path] + list(reqs) + check_call( + cmd, + stdout=LoggerWrapper(log, logging.INFO), + stderr=LoggerWrapper(log, logging.ERROR), + ) def __exit__(self, exc_type, exc_val, exc_tb): needs_cleanup = ( @@ -126,8 +135,8 @@ def build_wheel(source_dir, wheel_dir, config_settings=None): """ if config_settings is None: config_settings = {} - requires, backend = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend) + requires, backend, backend_path = _load_pyproject(source_dir) + hooks = Pep517HookCaller(source_dir, backend, backend_path) with BuildEnvironment() as env: env.pip_install(requires) @@ -148,8 +157,8 @@ def build_sdist(source_dir, sdist_dir, config_settings=None): """ if config_settings is None: config_settings = {} - requires, backend = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend) + requires, backend, backend_path = _load_pyproject(source_dir) + hooks = Pep517HookCaller(source_dir, backend, backend_path) with BuildEnvironment() as env: env.pip_install(requires) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/meta.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/meta.py new file mode 100644 index 00000000..d525de5c --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/meta.py @@ -0,0 +1,92 @@ +"""Build metadata for a project using PEP 517 hooks. +""" +import argparse +import logging +import os +import shutil +import functools + +try: + import importlib.metadata as imp_meta +except ImportError: + import importlib_metadata as imp_meta + +try: + from zipfile import Path +except ImportError: + from zipp import Path + +from .envbuild import BuildEnvironment +from .wrappers import Pep517HookCaller, quiet_subprocess_runner +from .dirtools import tempdir, mkdir_p, dir_to_zipfile +from .build import validate_system, load_system, compat_system + +log = logging.getLogger(__name__) + + +def _prep_meta(hooks, env, dest): + reqs = hooks.get_requires_for_build_wheel({}) + log.info('Got build requires: %s', reqs) + + env.pip_install(reqs) + log.info('Installed dynamic build dependencies') + + with tempdir() as td: + log.info('Trying to build metadata in %s', td) + filename = hooks.prepare_metadata_for_build_wheel(td, {}) + source = os.path.join(td, filename) + shutil.move(source, os.path.join(dest, os.path.basename(filename))) + + +def build(source_dir='.', dest=None, system=None): + system = system or load_system(source_dir) + dest = os.path.join(source_dir, dest or 'dist') + mkdir_p(dest) + validate_system(system) + hooks = Pep517HookCaller( + source_dir, system['build-backend'], system.get('backend-path') + ) + + with hooks.subprocess_runner(quiet_subprocess_runner): + with BuildEnvironment() as env: + env.pip_install(system['requires']) + _prep_meta(hooks, env, dest) + + +def build_as_zip(builder=build): + with tempdir() as out_dir: + builder(dest=out_dir) + return dir_to_zipfile(out_dir) + + +def load(root): + """ + Given a source directory (root) of a package, + return an importlib.metadata.Distribution object + with metadata build from that package. + """ + root = os.path.expanduser(root) + system = compat_system(root) + builder = functools.partial(build, source_dir=root, system=system) + path = Path(build_as_zip(builder)) + return imp_meta.PathDistribution(path) + + +parser = argparse.ArgumentParser() +parser.add_argument( + 'source_dir', + help="A directory containing pyproject.toml", +) +parser.add_argument( + '--out-dir', '-o', + help="Destination in which to save the builds relative to source dir", +) + + +def main(): + args = parser.parse_args() + build(args.source_dir, args.out_dir) + + +if __name__ == '__main__': + main() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/wrappers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/wrappers.py index b14b8991..00a3d1a7 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/wrappers.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pep517/wrappers.py @@ -1,14 +1,24 @@ +import threading from contextlib import contextmanager import os from os.path import dirname, abspath, join as pjoin import shutil -from subprocess import check_call +from subprocess import check_call, check_output, STDOUT import sys from tempfile import mkdtemp from . import compat -_in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py') + +try: + import importlib.resources as resources + + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') +except ImportError: + @contextmanager + def _in_proc_script_path(): + yield pjoin(dirname(abspath(__file__)), '_in_process.py') @contextmanager @@ -22,10 +32,29 @@ def tempdir(): class BackendUnavailable(Exception): """Will be raised if the backend cannot be imported in the hook process.""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Will be raised if the backend is invalid.""" + def __init__(self, backend_name, backend_path, message): + self.backend_name = backend_name + self.backend_path = backend_path + self.message = message + + +class HookMissing(Exception): + """Will be raised on missing hooks.""" + def __init__(self, hook_name): + super(HookMissing, self).__init__(hook_name) + self.hook_name = hook_name class UnsupportedOperation(Exception): """May be raised by build_sdist if the backend indicates that it can't.""" + def __init__(self, traceback): + self.traceback = traceback def default_subprocess_runner(cmd, cwd=None, extra_environ=None): @@ -37,25 +66,86 @@ def default_subprocess_runner(cmd, cwd=None, extra_environ=None): check_call(cmd, cwd=cwd, env=env) +def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): + """A method of calling the wrapper subprocess while suppressing output.""" + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) + + +def norm_and_check(source_tree, requested): + """Normalise and check a backend path. + + Ensure that the requested backend path is specified as a relative path, + and resolves to a location under the given source tree. + + Return an absolute version of the requested path. + """ + if os.path.isabs(requested): + raise ValueError("paths must be relative") + + abs_source = os.path.abspath(source_tree) + abs_requested = os.path.normpath(os.path.join(abs_source, requested)) + # We have to use commonprefix for Python 2.7 compatibility. So we + # normalise case to avoid problems because commonprefix is a character + # based comparison :-( + norm_source = os.path.normcase(abs_source) + norm_requested = os.path.normcase(abs_requested) + if os.path.commonprefix([norm_source, norm_requested]) != norm_source: + raise ValueError("paths must be inside source tree") + + return abs_requested + + class Pep517HookCaller(object): """A wrapper around a source directory to be built with a PEP 517 backend. source_dir : The path to the source directory, containing pyproject.toml. - backend : The build backend spec, as per PEP 517, from pyproject.toml. + build_backend : The build backend spec, as per PEP 517, from + pyproject.toml. + backend_path : The backend path, as per PEP 517, from pyproject.toml. + runner : A callable that invokes the wrapper subprocess. + + The 'runner', if provided, must expect the following: + cmd : a list of strings representing the command and arguments to + execute, as would be passed to e.g. 'subprocess.check_call'. + cwd : a string representing the working directory that must be + used for the subprocess. Corresponds to the provided source_dir. + extra_environ : a dict mapping environment variable names to values + which must be set for the subprocess execution. """ - def __init__(self, source_dir, build_backend): + def __init__( + self, + source_dir, + build_backend, + backend_path=None, + runner=None, + ): + if runner is None: + runner = default_subprocess_runner + self.source_dir = abspath(source_dir) self.build_backend = build_backend - self._subprocess_runner = default_subprocess_runner + if backend_path: + backend_path = [ + norm_and_check(self.source_dir, p) for p in backend_path + ] + self.backend_path = backend_path + self._subprocess_runner = runner - # TODO: Is this over-engineered? Maybe frontends only need to - # set this when creating the wrapper, not on every call. @contextmanager def subprocess_runner(self, runner): + """A context manager for temporarily overriding the default subprocess + runner. + """ prev = self._subprocess_runner self._subprocess_runner = runner - yield - self._subprocess_runner = prev + try: + yield + finally: + self._subprocess_runner = prev def get_requires_for_build_wheel(self, config_settings=None): """Identify packages required for building a wheel @@ -72,18 +162,21 @@ class Pep517HookCaller(object): }) def prepare_metadata_for_build_wheel( - self, metadata_directory, config_settings=None): + self, metadata_directory, config_settings=None, + _allow_fallback=True): """Prepare a *.dist-info folder with metadata for this project. Returns the name of the newly created folder. If the build backend defines a hook with this name, it will be called in a subprocess. If not, the backend will be asked to build a wheel, - and the dist-info extracted from that. + and the dist-info extracted from that (unless _allow_fallback is + False). """ return self._call_hook('prepare_metadata_for_build_wheel', { 'metadata_directory': abspath(metadata_directory), 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, }) def build_wheel( @@ -139,25 +232,77 @@ class Pep517HookCaller(object): # letters, digits and _, . and : characters, and will be used as a # Python identifier, so non-ASCII content is wrong on Python 2 in # any case). + # For backend_path, we use sys.getfilesystemencoding. if sys.version_info[0] == 2: build_backend = self.build_backend.encode('ASCII') else: build_backend = self.build_backend + extra_environ = {'PEP517_BUILD_BACKEND': build_backend} + + if self.backend_path: + backend_path = os.pathsep.join(self.backend_path) + if sys.version_info[0] == 2: + backend_path = backend_path.encode(sys.getfilesystemencoding()) + extra_environ['PEP517_BACKEND_PATH'] = backend_path with tempdir() as td: - compat.write_json({'kwargs': kwargs}, pjoin(td, 'input.json'), + hook_input = {'kwargs': kwargs} + compat.write_json(hook_input, pjoin(td, 'input.json'), indent=2) # Run the hook in a subprocess - self._subprocess_runner( - [sys.executable, _in_proc_script, hook_name, td], - cwd=self.source_dir, - extra_environ={'PEP517_BUILD_BACKEND': build_backend} - ) + with _in_proc_script_path() as script: + self._subprocess_runner( + [sys.executable, str(script), hook_name, td], + cwd=self.source_dir, + extra_environ=extra_environ + ) data = compat.read_json(pjoin(td, 'output.json')) if data.get('unsupported'): - raise UnsupportedOperation + raise UnsupportedOperation(data.get('traceback', '')) if data.get('no_backend'): - raise BackendUnavailable + raise BackendUnavailable(data.get('traceback', '')) + if data.get('backend_invalid'): + raise BackendInvalid( + backend_name=self.build_backend, + backend_path=self.backend_path, + message=data.get('backend_error', '') + ) + if data.get('hook_missing'): + raise HookMissing(hook_name) return data['return_val'] + + +class LoggerWrapper(threading.Thread): + """ + Read messages from a pipe and redirect them + to a logger (see python's logging module). + """ + + def __init__(self, logger, level): + threading.Thread.__init__(self) + self.daemon = True + + self.logger = logger + self.level = level + + # create the pipe and reader + self.fd_read, self.fd_write = os.pipe() + self.reader = os.fdopen(self.fd_read) + + self.start() + + def fileno(self): + return self.fd_write + + @staticmethod + def remove_newline(msg): + return msg[:-1] if msg.endswith(os.linesep) else msg + + def run(self): + for line in self.reader: + self._write(self.remove_newline(line)) + + def _write(self, message): + self.logger.log(self.level, message) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py index 9c4fd8ea..a457ff27 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py @@ -39,6 +39,8 @@ import tempfile import textwrap import itertools import inspect +import ntpath +import posixpath from pkgutil import get_importer try: @@ -86,8 +88,8 @@ __import__('pip._vendor.packaging.markers') __metaclass__ = type -if (3, 0) < sys.version_info < (3, 4): - raise RuntimeError("Python 3.4 or later is required") +if (3, 0) < sys.version_info < (3, 5): + raise RuntimeError("Python 3.5 or later is required") if six.PY2: # Those builtin exceptions are only defined in Python 3 @@ -331,7 +333,7 @@ class UnknownExtra(ResolutionError): _provider_factories = {} -PY_MAJOR = sys.version[:3] +PY_MAJOR = '{}.{}'.format(*sys.version_info) EGG_DIST = 3 BINARY_DIST = 2 SOURCE_DIST = 1 @@ -1401,14 +1403,30 @@ class NullProvider: def has_resource(self, resource_name): return self._has(self._fn(self.module_path, resource_name)) + def _get_metadata_path(self, name): + return self._fn(self.egg_info, name) + def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info, name)) + if not self.egg_info: + return self.egg_info + + path = self._get_metadata_path(name) + return self._has(path) def get_metadata(self, name): if not self.egg_info: return "" - value = self._get(self._fn(self.egg_info, name)) - return value.decode('utf-8') if six.PY3 else value + path = self._get_metadata_path(name) + value = self._get(path) + if six.PY2: + return value + try: + return value.decode('utf-8') + except UnicodeDecodeError as exc: + # Include the path in the error message to simplify + # troubleshooting, and without changing the exception type. + exc.reason += ' in {} file at path: {}'.format(name, path) + raise def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -1466,10 +1484,86 @@ class NullProvider: ) def _fn(self, base, resource_name): + self._validate_resource_path(resource_name) if resource_name: return os.path.join(base, *resource_name.split('/')) return base + @staticmethod + def _validate_resource_path(path): + """ + Validate the resource paths according to the docs. + https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + + >>> warned = getfixture('recwarn') + >>> warnings.simplefilter('always') + >>> vrp = NullProvider._validate_resource_path + >>> vrp('foo/bar.txt') + >>> bool(warned) + False + >>> vrp('../foo/bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('/foo/bar.txt') + >>> bool(warned) + True + >>> vrp('foo/../../bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('foo/f../bar.txt') + >>> bool(warned) + False + + Windows path separators are straight-up disallowed. + >>> vrp(r'\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + >>> vrp(r'C:\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + Blank values are allowed + + >>> vrp('') + >>> bool(warned) + False + + Non-string values are not. + + >>> vrp(None) + Traceback (most recent call last): + ... + AttributeError: ... + """ + invalid = ( + os.path.pardir in path.split(posixpath.sep) or + posixpath.isabs(path) or + ntpath.isabs(path) + ) + if not invalid: + return + + msg = "Use of .. or absolute path in a resource path is not allowed." + + # Aggressively disallow Windows absolute paths + if ntpath.isabs(path) and not posixpath.isabs(path): + raise ValueError(msg) + + # for compatibility, warn; in future + # raise ValueError(msg) + warnings.warn( + msg[:-1] + " and will raise exceptions in a future release.", + DeprecationWarning, + stacklevel=4, + ) + def _get(self, path): if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) @@ -1790,6 +1884,9 @@ class FileMetadata(EmptyProvider): def __init__(self, path): self.path = path + def _get_metadata_path(self, name): + return self.path + def has_metadata(self, name): return name == 'PKG-INFO' and os.path.isfile(self.path) @@ -1888,7 +1985,7 @@ def find_eggs_in_zip(importer, path_item, only=False): if only: # don't yield nested distros return - for subitem in metadata.resource_listdir('/'): + for subitem in metadata.resource_listdir(''): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) @@ -2583,10 +2680,14 @@ class Distribution: try: return self._version except AttributeError: - version = _version_from_file(self._get_metadata(self.PKG_INFO)) + version = self._get_version() if version is None: - tmpl = "Missing 'Version:' header and/or %s file" - raise ValueError(tmpl % self.PKG_INFO, self) + path = self._get_metadata_path_for_display(self.PKG_INFO) + msg = ( + "Missing 'Version:' header and/or {} file at path: {}" + ).format(self.PKG_INFO, path) + raise ValueError(msg, self) + return version @property @@ -2644,11 +2745,34 @@ class Distribution: ) return deps + def _get_metadata_path_for_display(self, name): + """ + Return the path to the given metadata file, if available. + """ + try: + # We need to access _get_metadata_path() on the provider object + # directly rather than through this class's __getattr__() + # since _get_metadata_path() is marked private. + path = self._provider._get_metadata_path(name) + + # Handle exceptions e.g. in case the distribution's metadata + # provider doesn't support _get_metadata_path(). + except Exception: + return '[could not detect]' + + return path + def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line + def _get_version(self): + lines = self._get_metadata(self.PKG_INFO) + version = _version_from_file(lines) + + return version + def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: @@ -2867,7 +2991,7 @@ class EggInfoDistribution(Distribution): take an extra step and try to get the version number from the metadata file itself instead of the filename. """ - md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) + md_version = self._get_version() if md_version: self._version = md_version return self @@ -2985,6 +3109,7 @@ class Requirement(packaging.requirements.Requirement): self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, + self.url, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/__init__.py index a41f65dc..e434c257 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/__init__.py @@ -12,31 +12,49 @@ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -from __future__ import division +from __future__ import division, print_function from collections import deque from datetime import timedelta from math import ceil from sys import stderr -from time import time +try: + from time import monotonic +except ImportError: + from time import time as monotonic -__version__ = '1.4' +__version__ = '1.5' + +HIDE_CURSOR = '\x1b[?25l' +SHOW_CURSOR = '\x1b[?25h' class Infinite(object): file = stderr sma_window = 10 # Simple Moving Average window + check_tty = True + hide_cursor = True - def __init__(self, *args, **kwargs): + def __init__(self, message='', **kwargs): self.index = 0 - self.start_ts = time() + self.start_ts = monotonic() self.avg = 0 + self._avg_update_ts = self.start_ts self._ts = self.start_ts self._xput = deque(maxlen=self.sma_window) for key, val in kwargs.items(): setattr(self, key, val) + self._width = 0 + self.message = message + + if self.file and self.is_tty(): + if self.hide_cursor: + print(HIDE_CURSOR, end='', file=self.file) + print(self.message, end='', file=self.file) + self.file.flush() + def __getitem__(self, key): if key.startswith('_'): return None @@ -44,7 +62,7 @@ class Infinite(object): @property def elapsed(self): - return int(time() - self.start_ts) + return int(monotonic() - self.start_ts) @property def elapsed_td(self): @@ -52,8 +70,14 @@ class Infinite(object): def update_avg(self, n, dt): if n > 0: + xput_len = len(self._xput) self._xput.append(dt / n) - self.avg = sum(self._xput) / len(self._xput) + now = monotonic() + # update when we're still filling _xput, then after every second + if (xput_len < self.sma_window or + now - self._avg_update_ts > 1): + self.avg = sum(self._xput) / len(self._xput) + self._avg_update_ts = now def update(self): pass @@ -61,11 +85,34 @@ class Infinite(object): def start(self): pass + def clearln(self): + if self.file and self.is_tty(): + print('\r\x1b[K', end='', file=self.file) + + def write(self, s): + if self.file and self.is_tty(): + line = self.message + s.ljust(self._width) + print('\r' + line, end='', file=self.file) + self._width = max(self._width, len(s)) + self.file.flush() + + def writeln(self, line): + if self.file and self.is_tty(): + self.clearln() + print(line, end='', file=self.file) + self.file.flush() + def finish(self): - pass + if self.file and self.is_tty(): + print(file=self.file) + if self.hide_cursor: + print(SHOW_CURSOR, end='', file=self.file) + + def is_tty(self): + return self.file.isatty() if self.check_tty else True def next(self, n=1): - now = time() + now = monotonic() dt = now - self._ts self.update_avg(n, dt) self._ts = now @@ -73,12 +120,17 @@ class Infinite(object): self.update() def iter(self, it): - try: + with self: for x in it: yield x self.next() - finally: - self.finish() + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.finish() class Progress(Infinite): @@ -119,9 +171,7 @@ class Progress(Infinite): except TypeError: pass - try: + with self: for x in it: yield x self.next() - finally: - self.finish() diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/bar.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/bar.py index 025e61c4..8819efda 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/bar.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/bar.py @@ -19,18 +19,15 @@ from __future__ import unicode_literals import sys from . import Progress -from .helpers import WritelnMixin -class Bar(WritelnMixin, Progress): +class Bar(Progress): width = 32 - message = '' suffix = '%(index)d/%(max)d' bar_prefix = ' |' bar_suffix = '| ' empty_fill = ' ' fill = '#' - hide_cursor = True def update(self): filled_length = int(self.width * self.progress) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/counter.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/counter.py index 6b45a1ec..d955ca47 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/counter.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/counter.py @@ -16,27 +16,20 @@ from __future__ import unicode_literals from . import Infinite, Progress -from .helpers import WriteMixin -class Counter(WriteMixin, Infinite): - message = '' - hide_cursor = True - +class Counter(Infinite): def update(self): self.write(str(self.index)) -class Countdown(WriteMixin, Progress): - hide_cursor = True - +class Countdown(Progress): def update(self): self.write(str(self.remaining)) -class Stack(WriteMixin, Progress): +class Stack(Progress): phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') - hide_cursor = True def update(self): nphases = len(self.phases) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/helpers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/helpers.py deleted file mode 100644 index 0cde44ec..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/helpers.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import print_function - - -HIDE_CURSOR = '\x1b[?25l' -SHOW_CURSOR = '\x1b[?25h' - - -class WriteMixin(object): - hide_cursor = False - - def __init__(self, message=None, **kwargs): - super(WriteMixin, self).__init__(**kwargs) - self._width = 0 - if message: - self.message = message - - if self.file and self.file.isatty(): - if self.hide_cursor: - print(HIDE_CURSOR, end='', file=self.file) - print(self.message, end='', file=self.file) - self.file.flush() - - def write(self, s): - if self.file and self.file.isatty(): - b = '\b' * self._width - c = s.ljust(self._width) - print(b + c, end='', file=self.file) - self._width = max(self._width, len(s)) - self.file.flush() - - def finish(self): - if self.file and self.file.isatty() and self.hide_cursor: - print(SHOW_CURSOR, end='', file=self.file) - - -class WritelnMixin(object): - hide_cursor = False - - def __init__(self, message=None, **kwargs): - super(WritelnMixin, self).__init__(**kwargs) - if message: - self.message = message - - if self.file and self.file.isatty() and self.hide_cursor: - print(HIDE_CURSOR, end='', file=self.file) - - def clearln(self): - if self.file and self.file.isatty(): - print('\r\x1b[K', end='', file=self.file) - - def writeln(self, line): - if self.file and self.file.isatty(): - self.clearln() - print(line, end='', file=self.file) - self.file.flush() - - def finish(self): - if self.file and self.file.isatty(): - print(file=self.file) - if self.hide_cursor: - print(SHOW_CURSOR, end='', file=self.file) - - -from signal import signal, SIGINT -from sys import exit - - -class SigIntMixin(object): - """Registers a signal handler that calls finish on SIGINT""" - - def __init__(self, *args, **kwargs): - super(SigIntMixin, self).__init__(*args, **kwargs) - signal(SIGINT, self._sigint_handler) - - def _sigint_handler(self, signum, frame): - self.finish() - exit(0) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/spinner.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/spinner.py index 464c7b27..4e100cab 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/spinner.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/progress/spinner.py @@ -16,11 +16,9 @@ from __future__ import unicode_literals from . import Infinite -from .helpers import WriteMixin -class Spinner(WriteMixin, Infinite): - message = '' +class Spinner(Infinite): phases = ('-', '\\', '|', '/') hide_cursor = True @@ -40,5 +38,6 @@ class MoonSpinner(Spinner): class LineSpinner(Spinner): phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] + class PixelSpinner(Spinner): - phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] + phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pyparsing.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pyparsing.py index bea4d9c7..7ebc7eb9 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pyparsing.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pyparsing.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # module pyparsing.py # # Copyright (c) 2003-2019 Paul T. McGuire @@ -87,14 +87,16 @@ classes inherit from. Use the docstrings for examples of how to: more complex ones - associate names with your parsed results using :class:`ParserElement.setResultsName` + - access the parsed data, which is returned as a :class:`ParseResults` + object - find some helpful expression short-cuts like :class:`delimitedList` and :class:`oneOf` - find more useful common expressions in the :class:`pyparsing_common` namespace class """ -__version__ = "2.3.1" -__versionTime__ = "09 Jan 2019 23:26 UTC" +__version__ = "2.4.7" +__versionTime__ = "30 Mar 2020 00:43 UTC" __author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" import string @@ -109,6 +111,10 @@ import pprint import traceback import types from datetime import datetime +from operator import itemgetter +import itertools +from functools import wraps +from contextlib import contextmanager try: # Python 3 @@ -124,11 +130,11 @@ except ImportError: try: # Python 3 from collections.abc import Iterable - from collections.abc import MutableMapping + from collections.abc import MutableMapping, Mapping except ImportError: # Python 2.7 from collections import Iterable - from collections import MutableMapping + from collections import MutableMapping, Mapping try: from collections import OrderedDict as _OrderedDict @@ -143,29 +149,73 @@ try: except ImportError: class SimpleNamespace: pass - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', -] +# version compatibility configuration +__compat__ = SimpleNamespace() +__compat__.__doc__ = """ + A cross-version compatibility configuration for pyparsing features that will be + released in a future version. By setting values in this configuration to True, + those features can be enabled in prior versions for compatibility development + and testing. + + - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping + of results names when an And expression is nested within an Or or MatchFirst; set to + True to enable bugfix released in pyparsing 2.3.0, or False to preserve + pre-2.3.0 handling of named results +""" +__compat__.collect_all_And_tokens = True + +__diag__ = SimpleNamespace() +__diag__.__doc__ = """ +Diagnostic configuration (all default to False) + - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results + name is defined on a MatchFirst or Or expression with one or more And subexpressions + (only warns if __compat__.collect_all_And_tokens is False) + - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results + name is defined on a containing expression with ungrouped subexpressions that also + have results names + - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined + with a results name, but has no contents defined + - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is + incorrectly called with multiple str arguments + - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent + calls to ParserElement.setName() +""" +__diag__.warn_multiple_tokens_in_named_alternation = False +__diag__.warn_ungrouped_named_tokens_in_collection = False +__diag__.warn_name_set_on_empty_Forward = False +__diag__.warn_on_multiple_string_args_to_oneof = False +__diag__.enable_debug_on_named_expressions = False +__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")] + +def _enable_all_warnings(): + __diag__.warn_multiple_tokens_in_named_alternation = True + __diag__.warn_ungrouped_named_tokens_in_collection = True + __diag__.warn_name_set_on_empty_Forward = True + __diag__.warn_on_multiple_string_args_to_oneof = True +__diag__.enable_all_warnings = _enable_all_warnings + + +__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__', + 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', + 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', + 'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', + 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', + 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', + 'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', + 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', + 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', + 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', + 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', + 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', + 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', + 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', + 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', + 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', + 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', + 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass', + 'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', + 'conditionAsParseAction', 're', + ] system_version = tuple(sys.version_info)[:3] PY_3 = system_version[0] == 3 @@ -190,7 +240,7 @@ else: < returns the unicode object | encodes it with the default encoding | ... >. """ - if isinstance(obj,unicode): + if isinstance(obj, unicode): return obj try: @@ -208,9 +258,10 @@ else: # build list of single arg builtins, tolerant of Python version, that can be used as parse actions singleArgBuiltins = [] import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): try: - singleArgBuiltins.append(getattr(__builtin__,fname)) + singleArgBuiltins.append(getattr(__builtin__, fname)) except AttributeError: continue @@ -221,23 +272,36 @@ def _xml_escape(data): # ampersand must be replaced first from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): + to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split()) + for from_, to_ in zip(from_symbols, to_symbols): data = data.replace(from_, to_) return data -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) printables = "".join(c for c in string.printable if c not in string.whitespace) + +def conditionAsParseAction(fn, message=None, fatal=False): + msg = message if message is not None else "failed user-defined condition" + exc_type = ParseFatalException if fatal else ParseException + fn = _trim_arity(fn) + + @wraps(fn) + def pa(s, l, t): + if not bool(fn(s, l, t)): + raise exc_type(s, l, msg) + + return pa + class ParseBaseException(Exception): """base exception class for all parsing runtime exceptions""" # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): + def __init__(self, pstr, loc=0, msg=None, elem=None): self.loc = loc if msg is None: self.msg = pstr @@ -256,27 +320,34 @@ class ParseBaseException(Exception): """ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - def __getattr__( self, aname ): + def __getattr__(self, aname): """supported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) + if aname == "lineno": + return lineno(self.loc, self.pstr) + elif aname in ("col", "column"): + return col(self.loc, self.pstr) + elif aname == "line": + return line(self.loc, self.pstr) else: raise AttributeError(aname) - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): + def __str__(self): + if self.pstr: + if self.loc >= len(self.pstr): + foundstr = ', found end of text' + else: + foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\') + else: + foundstr = '' + return ("%s%s (at char %d), (line:%d, col:%d)" % + (self.msg, foundstr, self.loc, self.lineno, self.column)) + def __repr__(self): return _ustr(self) - def markInputline( self, markerString = ">!<" ): + def markInputline(self, markerString=">!<"): """Extracts the exception line from the input string, and marks the location of the exception with a special symbol. """ @@ -350,7 +421,7 @@ class ParseException(ParseBaseException): callers = inspect.getinnerframes(exc.__traceback__, context=depth) seen = set() for i, ff in enumerate(callers[-depth:]): - frm = ff.frame + frm = ff[0] f_self = frm.f_locals.get('self', None) if isinstance(f_self, ParserElement): @@ -412,21 +483,21 @@ class RecursiveGrammarException(Exception): """exception thrown by :class:`ParserElement.validate` if the grammar could be improperly recursive """ - def __init__( self, parseElementList ): + def __init__(self, parseElementList): self.parseElementTrace = parseElementList - def __str__( self ): + def __str__(self): return "RecursiveGrammarException: %s" % self.parseElementTrace class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): + def __init__(self, p1, p2): + self.tup = (p1, p2) + def __getitem__(self, i): return self.tup[i] def __repr__(self): return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) + def setOffset(self, i): + self.tup = (self.tup[0], i) class ParseResults(object): """Structured parse results, to provide multiple means of access to @@ -471,7 +542,7 @@ class ParseResults(object): - month: 12 - year: 1999 """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + def __new__(cls, toklist=None, name=None, asList=True, modal=True): if isinstance(toklist, cls): return toklist retobj = object.__new__(cls) @@ -480,7 +551,7 @@ class ParseResults(object): # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance): if self.__doinit: self.__doinit = False self.__name = None @@ -501,85 +572,93 @@ class ParseResults(object): if name is not None and name: if not modal: self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency + if isinstance(name, int): + name = _ustr(name) # will always return a str, but use _ustr for consistency self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])): + if isinstance(toklist, basestring): + toklist = [toklist] if asList: - if isinstance(toklist,ParseResults): + if isinstance(toklist, ParseResults): self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0) else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0) self[name].__name = name else: try: self[name] = toklist[0] - except (KeyError,TypeError,IndexError): + except (KeyError, TypeError, IndexError): self[name] = toklist - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): + def __getitem__(self, i): + if isinstance(i, (int, slice)): return self.__toklist[i] else: if i not in self.__accumNames: return self.__tokdict[i][-1][0] else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) + return ParseResults([v[0] for v in self.__tokdict[i]]) - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + def __setitem__(self, k, v, isinstance=isinstance): + if isinstance(v, _ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k, list()) + [v] sub = v[0] - elif isinstance(k,(int,slice)): + elif isinstance(k, (int, slice)): self.__toklist[k] = v sub = v else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)] sub = v - if isinstance(sub,ParseResults): + if isinstance(sub, ParseResults): sub.__parent = wkref(self) - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) + def __delitem__(self, i): + if isinstance(i, (int, slice)): + mylen = len(self.__toklist) del self.__toklist[i] # convert int to slice if isinstance(i, int): if i < 0: i += mylen - i = slice(i, i+1) + i = slice(i, i + 1) # get removed indices removed = list(range(*i.indices(mylen))) removed.reverse() # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): + for name, occurrences in self.__tokdict.items(): for j in removed: for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) else: del self.__tokdict[i] - def __contains__( self, k ): + def __contains__(self, k): return k in self.__tokdict - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) + def __len__(self): + return len(self.__toklist) + + def __bool__(self): + return (not not self.__toklist) __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): + + def __iter__(self): + return iter(self.__toklist) + + def __reversed__(self): + return iter(self.__toklist[::-1]) + + def _iterkeys(self): if hasattr(self.__tokdict, "iterkeys"): return self.__tokdict.iterkeys() else: return iter(self.__tokdict) - def _itervalues( self ): + def _itervalues(self): return (self[k] for k in self._iterkeys()) - def _iteritems( self ): + def _iteritems(self): return ((k, self[k]) for k in self._iterkeys()) if PY_3: @@ -602,24 +681,24 @@ class ParseResults(object): iteritems = _iteritems """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - def keys( self ): + def keys(self): """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.iterkeys()) - def values( self ): + def values(self): """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.itervalues()) - def items( self ): + def items(self): """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" return list(self.iteritems()) - def haskeys( self ): + def haskeys(self): """Since keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.""" return bool(self.__tokdict) - def pop( self, *args, **kwargs): + def pop(self, *args, **kwargs): """ Removes and returns item at specified index (default= ``last``). Supports both ``list`` and ``dict`` semantics for ``pop()``. If @@ -658,14 +737,14 @@ class ParseResults(object): """ if not args: args = [-1] - for k,v in kwargs.items(): + for k, v in kwargs.items(): if k == 'default': args = (args[0], v) else: raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): + if (isinstance(args[0], int) + or len(args) == 1 + or args[0] in self): index = args[0] ret = self[index] del self[index] @@ -697,7 +776,7 @@ class ParseResults(object): else: return defaultValue - def insert( self, index, insStr ): + def insert(self, index, insStr): """ Inserts new element at location index in the list of parsed tokens. @@ -714,11 +793,11 @@ class ParseResults(object): """ self.__toklist.insert(index, insStr) # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): + for name, occurrences in self.__tokdict.items(): for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - def append( self, item ): + def append(self, item): """ Add single element to end of ParseResults list of elements. @@ -733,7 +812,7 @@ class ParseResults(object): """ self.__toklist.append(item) - def extend( self, itemseq ): + def extend(self, itemseq): """ Add sequence of elements to end of ParseResults list of elements. @@ -748,78 +827,70 @@ class ParseResults(object): print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' """ if isinstance(itemseq, ParseResults): - self += itemseq + self.__iadd__(itemseq) else: self.__toklist.extend(itemseq) - def clear( self ): + def clear(self): """ Clear all elements and results names. """ del self.__toklist[:] self.__tokdict.clear() - def __getattr__( self, name ): + def __getattr__(self, name): try: return self[name] except KeyError: return "" - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): + def __add__(self, other): ret = self.copy() ret += other return ret - def __iadd__( self, other ): + def __iadd__(self, other): if other.__tokdict: offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset + addoffset = lambda a: offset if a < 0 else a + offset otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: + otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) + for k, vlist in otheritems for v in vlist] + for k, v in otherdictitems: self[k] = v - if isinstance(v[0],ParseResults): + if isinstance(v[0], ParseResults): v[0].__parent = wkref(self) self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) + self.__accumNames.update(other.__accumNames) return self def __radd__(self, other): - if isinstance(other,int) and other == 0: + if isinstance(other, int) and other == 0: # useful for merging many ParseResults using sum() builtin return self.copy() else: # this may raise a TypeError - so be it return other + self - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + def __repr__(self): + return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict)) - def __str__( self ): + def __str__(self): return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - def _asStringList( self, sep='' ): + def _asStringList(self, sep=''): out = [] for item in self.__toklist: if out and sep: out.append(sep) - if isinstance( item, ParseResults ): + if isinstance(item, ParseResults): out += item._asStringList() else: - out.append( _ustr(item) ) + out.append(_ustr(item)) return out - def asList( self ): + def asList(self): """ Returns the parse results as a nested list of matching tokens, all converted to strings. @@ -834,9 +905,9 @@ class ParseResults(object): result_list = result.asList() print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist] - def asDict( self ): + def asDict(self): """ Returns the named parse results as a nested dictionary. @@ -870,27 +941,27 @@ class ParseResults(object): else: return obj - return dict((k,toItem(v)) for k,v in item_fn()) + return dict((k, toItem(v)) for k, v in item_fn()) - def copy( self ): + def copy(self): """ Returns a new copy of a :class:`ParseResults` object. """ - ret = ParseResults( self.__toklist ) + ret = ParseResults(self.__toklist) ret.__tokdict = dict(self.__tokdict.items()) ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) + ret.__accumNames.update(self.__accumNames) ret.__name = self.__name return ret - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True): """ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. """ nl = "\n" out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) + namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items() + for v in vlist) nextLevelIndent = indent + " " # collapse out indents if formatting is not desired @@ -912,20 +983,20 @@ class ParseResults(object): else: selfTag = "ITEM" - out += [ nl, indent, "<", selfTag, ">" ] + out += [nl, indent, "<", selfTag, ">"] - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): + for i, res in enumerate(self.__toklist): + if isinstance(res, ParseResults): if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] + out += [res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] + out += [res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] else: # individual token, see if there is a name for it resTag = None @@ -937,16 +1008,16 @@ class ParseResults(object): else: resTag = "ITEM" xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "</", resTag, ">" ] + out += [nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">"] - out += [ nl, indent, "</", selfTag, ">" ] + out += [nl, indent, "</", selfTag, ">"] return "".join(out) - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: + def __lookup(self, sub): + for k, vlist in self.__tokdict.items(): + for v, loc in vlist: if sub is v: return k return None @@ -984,14 +1055,14 @@ class ParseResults(object): return par.__lookup(self) else: return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + elif (len(self) == 1 + and len(self.__tokdict) == 1 + and next(iter(self.__tokdict.values()))[0][1] in (0, -1)): return next(iter(self.__tokdict.keys())) else: return None - def dump(self, indent='', depth=0, full=True): + def dump(self, indent='', full=True, include_list=True, _depth=0): """ Diagnostic method for listing out the contents of a :class:`ParseResults`. Accepts an optional ``indent`` argument so @@ -1014,28 +1085,45 @@ class ParseResults(object): """ out = [] NL = '\n' - out.append( indent+_ustr(self.asList()) ) + if include_list: + out.append(indent + _ustr(self.asList())) + else: + out.append('') + if full: if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: if out: out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): + out.append("%s%s- %s: " % (indent, (' ' * _depth), k)) + if isinstance(v, ParseResults): if v: - out.append( v.dump(indent,depth+1) ) + out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1)) else: out.append(_ustr(v)) else: out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): + elif any(isinstance(vv, ParseResults) for vv in self): v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + vv.dump(indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1))) else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + _ustr(vv))) return "".join(out) @@ -1068,18 +1156,15 @@ class ParseResults(object): # add support for pickle protocol def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) + return (self.__toklist, + (self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name)) - def __setstate__(self,state): + def __setstate__(self, state): self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] + self.__tokdict, par, inAccumNames, self.__name = state[1] self.__accumNames = {} self.__accumNames.update(inAccumNames) if par is not None: @@ -1091,11 +1176,39 @@ class ParseResults(object): return self.__toklist, self.__name, self.__asList, self.__modal def __dir__(self): - return (dir(type(self)) + list(self.keys())) + return dir(type(self)) + list(self.keys()) + + @classmethod + def from_dict(cls, other, name=None): + """ + Helper classmethod to construct a ParseResults from a dict, preserving the + name-value relations as results names. If an optional 'name' argument is + given, a nested ParseResults will be returned + """ + def is_iterable(obj): + try: + iter(obj) + except Exception: + return False + else: + if PY_3: + return not isinstance(obj, (str, bytes)) + else: + return not isinstance(obj, basestring) + + ret = cls([]) + for k, v in other.items(): + if isinstance(v, Mapping): + ret += cls.from_dict(v, name=k) + else: + ret += cls([v], name=k, asList=is_iterable(v)) + if name is not None: + ret = cls([ret], name=name) + return ret MutableMapping.register(ParseResults) -def col (loc,strg): +def col (loc, strg): """Returns current column within a string, counting newlines as line separators. The first column is number 1. @@ -1107,9 +1220,9 @@ def col (loc,strg): location, and line and column positions within the parsed string. """ s = strg - return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) -def lineno(loc,strg): +def lineno(loc, strg): """Returns current line number within a string, counting newlines as line separators. The first line is number 1. @@ -1119,26 +1232,26 @@ def lineno(loc,strg): suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ - return strg.count("\n",0,loc) + 1 + return strg.count("\n", 0, loc) + 1 -def line( loc, strg ): +def line(loc, strg): """Returns the line of text containing loc within a string, counting newlines as line separators. """ lastCR = strg.rfind("\n", 0, loc) nextCR = strg.find("\n", loc) if nextCR >= 0: - return strg[lastCR+1:nextCR] + return strg[lastCR + 1:nextCR] else: - return strg[lastCR+1:] + return strg[lastCR + 1:] -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) +def _defaultStartDebugAction(instring, loc, expr): + print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)))) -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) +def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks): + print("Matched " + _ustr(expr) + " -> " + str(toks.asList())) -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) +def _defaultExceptionDebugAction(instring, loc, expr, exc): + print("Exception raised:" + _ustr(exc)) def nullDebugAction(*args): """'Do-nothing' debug action, to suppress debugging output during parsing.""" @@ -1169,16 +1282,16 @@ def nullDebugAction(*args): 'decorator to trim function calls to match the arity of the target' def _trim_arity(func, maxargs=2): if func in singleArgBuiltins: - return lambda s,l,t: func(t) + return lambda s, l, t: func(t) limit = [0] foundArity = [False] # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): + if system_version[:2] >= (3, 5): def extract_stack(limit=0): # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + offset = -3 if system_version == (3, 5, 0) else -2 + frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset] return [frame_summary[:2]] def extract_tb(tb, limit=0): frames = traceback.extract_tb(tb, limit=limit) @@ -1195,7 +1308,7 @@ def _trim_arity(func, maxargs=2): # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF) def wrapper(*args): while 1: @@ -1213,7 +1326,10 @@ def _trim_arity(func, maxargs=2): if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: raise finally: - del tb + try: + del tb + except NameError: + pass if limit[0] <= maxargs: limit[0] += 1 @@ -1231,13 +1347,14 @@ def _trim_arity(func, maxargs=2): return wrapper + class ParserElement(object): """Abstract base level parser element class.""" DEFAULT_WHITE_CHARS = " \n\t\r" verbose_stacktrace = False @staticmethod - def setDefaultWhitespaceChars( chars ): + def setDefaultWhitespaceChars(chars): r""" Overrides the default whitespace chars @@ -1274,10 +1391,16 @@ class ParserElement(object): """ ParserElement._literalStringClass = cls - def __init__( self, savelist=False ): + @classmethod + def _trim_traceback(cls, tb): + while tb.tb_next: + tb = tb.tb_next + return tb + + def __init__(self, savelist=False): self.parseAction = list() self.failAction = None - #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + # ~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall self.strRepr = None self.resultsName = None self.saveAsList = savelist @@ -1292,12 +1415,12 @@ class ParserElement(object): self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index self.errmsg = "" self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions + self.debugActions = (None, None, None) # custom debug actions self.re = None self.callPreparse = True # used to avoid redundant calls to preParse self.callDuringTry = False - def copy( self ): + def copy(self): """ Make a copy of this :class:`ParserElement`. Useful for defining different parse actions for the same parsing pattern, using copies of @@ -1306,8 +1429,8 @@ class ParserElement(object): Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) @@ -1317,16 +1440,16 @@ class ParserElement(object): Equivalent form of ``expr.copy()`` is just ``expr()``:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") """ - cpy = copy.copy( self ) + cpy = copy.copy(self) cpy.parseAction = self.parseAction[:] cpy.ignoreExprs = self.ignoreExprs[:] if self.copyDefaultWhiteChars: cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS return cpy - def setName( self, name ): + def setName(self, name): """ Define name for this expression, makes debugging and exception messages clearer. @@ -1337,11 +1460,11 @@ class ParserElement(object): """ self.name = name self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg + if __diag__.enable_debug_on_named_expressions: + self.setDebug() return self - def setResultsName( self, name, listAllMatches=False ): + def setResultsName(self, name, listAllMatches=False): """ Define name for referencing matching tokens as a nested attribute of the returned parse results. @@ -1362,15 +1485,18 @@ class ParserElement(object): # equivalent form: date_str = integer("year") + '/' + integer("month") + '/' + integer("day") """ + return self._setResultsName(name, listAllMatches) + + def _setResultsName(self, name, listAllMatches=False): newself = self.copy() if name.endswith("*"): name = name[:-1] - listAllMatches=True + listAllMatches = True newself.resultsName = name newself.modalResults = not listAllMatches return newself - def setBreak(self,breakFlag = True): + def setBreak(self, breakFlag=True): """Method to invoke the Python pdb debugger when this element is about to be parsed. Set ``breakFlag`` to True to enable, False to disable. @@ -1379,20 +1505,21 @@ class ParserElement(object): _parseMethod = self._parse def breaker(instring, loc, doActions=True, callPreParse=True): import pdb + # this call to pdb.set_trace() is intentional, not a checkin error pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) + return _parseMethod(instring, loc, doActions, callPreParse) breaker._originalParseMethod = _parseMethod self._parse = breaker else: - if hasattr(self._parse,"_originalParseMethod"): + if hasattr(self._parse, "_originalParseMethod"): self._parse = self._parse._originalParseMethod return self - def setParseAction( self, *fns, **kwargs ): + def setParseAction(self, *fns, **kwargs): """ Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as ``fn(s,loc,toks)`` , - ``fn(loc,toks)`` , ``fn(toks)`` , or just ``fn()`` , where: + Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` , + ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: - s = the original string being parsed (see note below) - loc = the location of the matching substring @@ -1402,8 +1529,11 @@ class ParserElement(object): value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. + If None is passed as the parse action, all previously added parse actions for this + expression are cleared. + Optional keyword arguments: - - callDuringTry = (default= ``False`` ) indicate if parse action should be run during lookaheads and alternate testing + - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See :class:`parseString for more @@ -1425,11 +1555,16 @@ class ParserElement(object): # note that integer fields are now ints, not strings date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) + if list(fns) == [None,]: + self.parseAction = [] + else: + if not all(callable(fn) for fn in fns): + raise TypeError("parse actions must be callable") + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) return self - def addParseAction( self, *fns, **kwargs ): + def addParseAction(self, *fns, **kwargs): """ Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`. @@ -1457,21 +1592,17 @@ class ParserElement(object): result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException for fn in fns: - fn = _trim_arity(fn) - def pa(s,l,t): - if not bool(fn(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) + self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'), + fatal=kwargs.get('fatal', False))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self - def setFailAction( self, fn ): + def setFailAction(self, fn): """Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments - ``fn(s,loc,expr,err)`` where: + ``fn(s, loc, expr, err)`` where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed @@ -1481,22 +1612,22 @@ class ParserElement(object): self.failAction = fn return self - def _skipIgnorables( self, instring, loc ): + def _skipIgnorables(self, instring, loc): exprsFound = True while exprsFound: exprsFound = False for e in self.ignoreExprs: try: while 1: - loc,dummy = e._parse( instring, loc ) + loc, dummy = e._parse(instring, loc) exprsFound = True except ParseException: pass return loc - def preParse( self, instring, loc ): + def preParse(self, instring, loc): if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) + loc = self._skipIgnorables(instring, loc) if self.skipWhitespace: wt = self.whiteChars @@ -1506,101 +1637,105 @@ class ParserElement(object): return loc - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): return loc, [] - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): return tokenlist - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) + # ~ @profile + def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): + TRY, MATCH, FAIL = 0, 1, 2 + debugging = (self.debug) # and doActions) if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc + # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring))) + if self.debugActions[TRY]: + self.debugActions[TRY](instring, loc, self) try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) + if callPreParse and self.callPreparse: + preloc = self.preParse(instring, loc) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except IndexError: + raise ParseException(instring, len(instring), self.errmsg, self) + else: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except Exception as err: + # ~ print ("Exception raised:", err) + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) if self.failAction: - self.failAction( instring, tokensStart, self, err ) + self.failAction(instring, tokensStart, self, err) raise else: if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) + preloc = self.preParse(instring, loc) else: preloc = loc tokensStart = preloc if self.mayIndexError or preloc >= len(instring): try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) + loc, tokens = self.parseImpl(instring, preloc, doActions) except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) + raise ParseException(instring, len(instring), self.errmsg, self) else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) + loc, tokens = self.parseImpl(instring, preloc, doActions) - tokens = self.postParse( instring, loc, tokens ) + tokens = self.postParse(instring, loc, tokens) - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults) if self.parseAction and (doActions or self.callDuringTry): if debugging: try: for fn in self.parseAction: try: - tokens = fn( instring, tokensStart, retTokens ) + tokens = fn(instring, tokensStart, retTokens) except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") exc.__cause__ = parse_action_exc raise exc if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, + retTokens = ParseResults(tokens, self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) + except Exception as err: + # ~ print "Exception raised in user parse action:", err + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) raise else: for fn in self.parseAction: try: - tokens = fn( instring, tokensStart, retTokens ) + tokens = fn(instring, tokensStart, retTokens) except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") exc.__cause__ = parse_action_exc raise exc if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, + retTokens = ParseResults(tokens, self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + # ~ print ("Matched", self, "->", retTokens.asList()) + if self.debugActions[MATCH]: + self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens) return loc, retTokens - def tryParse( self, instring, loc ): + def tryParse(self, instring, loc): try: - return self._parse( instring, loc, doActions=False )[0] + return self._parse(instring, loc, doActions=False)[0] except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) + raise ParseException(instring, loc, self.errmsg, self) def canParseNext(self, instring, loc): try: @@ -1697,7 +1832,7 @@ class ParserElement(object): # this method gets repeatedly called during backtracking with the same arguments - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + def _parseCache(self, instring, loc, doActions=True, callPreParse=True): HIT, MISS = 0, 1 lookup = (self, instring, loc, callPreParse, doActions) with ParserElement.packrat_cache_lock: @@ -1718,7 +1853,7 @@ class ParserElement(object): ParserElement.packrat_cache_stats[HIT] += 1 if isinstance(value, Exception): raise value - return (value[0], value[1].copy()) + return value[0], value[1].copy() _parse = _parseNoCache @@ -1763,12 +1898,16 @@ class ParserElement(object): ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) ParserElement._parse = ParserElement._parseCache - def parseString( self, instring, parseAll=False ): + def parseString(self, instring, parseAll=False): """ Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. + Returns the parsed data as a :class:`ParseResults` object, which may be + accessed as a list, or as a dict or object with attributes if the given parser + includes results names. + If you want the grammar to require that the entire input string be successfully parsed, then set ``parseAll`` to True (equivalent to ending the grammar with ``StringEnd()``). @@ -1782,7 +1921,7 @@ class ParserElement(object): - calling ``parseWithTabs`` on your grammar before calling ``parseString`` (see :class:`parseWithTabs`) - - define your parse action using the full ``(s,loc,toks)`` signature, and + - define your parse action using the full ``(s, loc, toks)`` signature, and reference the input string using the parse action's ``s`` argument - explictly expand the tabs in your input string before calling ``parseString`` @@ -1795,27 +1934,29 @@ class ParserElement(object): ParserElement.resetCache() if not self.streamlined: self.streamline() - #~ self.saveAsList = True + # ~ self.saveAsList = True for e in self.ignoreExprs: e.streamline() if not self.keepTabs: instring = instring.expandtabs() try: - loc, tokens = self._parse( instring, 0 ) + loc, tokens = self._parse(instring, 0) if parseAll: - loc = self.preParse( instring, loc ) + loc = self.preParse(instring, loc) se = Empty() + StringEnd() - se._parse( instring, loc ) + se._parse(instring, loc) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc else: return tokens - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + def scanString(self, instring, maxMatches=_MAX_INT, overlap=False): """ Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional @@ -1830,7 +1971,7 @@ class ParserElement(object): source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) - for tokens,start,end in Word(alphas).scanString(source): + for tokens, start, end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) @@ -1862,16 +2003,16 @@ class ParserElement(object): try: while loc <= instrlen and matches < maxMatches: try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + preloc = preparseFn(instring, loc) + nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) except ParseException: - loc = preloc+1 + loc = preloc + 1 else: if nextLoc > loc: matches += 1 yield tokens, preloc, nextLoc if overlap: - nextloc = preparseFn( instring, loc ) + nextloc = preparseFn(instring, loc) if nextloc > loc: loc = nextLoc else: @@ -1879,15 +2020,17 @@ class ParserElement(object): else: loc = nextLoc else: - loc = preloc+1 + loc = preloc + 1 except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def transformString( self, instring ): + def transformString(self, instring): """ Extension to :class:`scanString`, to modify matching text with modified tokens that may be returned from a parse action. To use ``transformString``, define a grammar and @@ -1913,27 +2056,29 @@ class ParserElement(object): # keep string locs straight between transformString and scanString self.keepTabs = True try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) + for t, s, e in self.scanString(instring): + out.append(instring[lastE:s]) if t: - if isinstance(t,ParseResults): + if isinstance(t, ParseResults): out += t.asList() - elif isinstance(t,list): + elif isinstance(t, list): out += t else: out.append(t) lastE = e out.append(instring[lastE:]) out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) + return "".join(map(_ustr, _flatten(out))) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def searchString( self, instring, maxMatches=_MAX_INT ): + def searchString(self, instring, maxMatches=_MAX_INT): """ Another extension to :class:`scanString`, simplifying the access to the tokens found to match the given parse expression. May be called with optional @@ -1955,12 +2100,14 @@ class ParserElement(object): ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] """ try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)]) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): @@ -1981,14 +2128,14 @@ class ParserElement(object): """ splits = 0 last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): + for t, s, e in self.scanString(instring, maxMatches=maxsplit): yield instring[last:s] if includeSeparators: yield t[0] last = e yield instring[last:] - def __add__(self, other ): + def __add__(self, other): """ Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement converts them to :class:`Literal`s by default. @@ -2002,24 +2149,42 @@ class ParserElement(object): prints:: Hello, World! -> ['Hello', ',', 'World', '!'] + + ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. + + Literal('start') + ... + Literal('end') + + is equivalent to: + + Literal('start') + SkipTo('end')("_skipped*") + Literal('end') + + Note that the skipped text is returned with '_skipped' as a results name, + and to support having multiple skips in the same parser, the value returned is + a list of all skipped text. """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return _PendingSkip(self) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return And( [ self, other ] ) + return And([self, other]) - def __radd__(self, other ): + def __radd__(self, other): """ Implementation of + operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return SkipTo(self)("_skipped*") + self + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other + self @@ -2027,64 +2192,70 @@ class ParserElement(object): """ Implementation of - operator, returns :class:`And` with error stop """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return self + And._ErrorStop() + other - def __rsub__(self, other ): + def __rsub__(self, other): """ Implementation of - operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other - self - def __mul__(self,other): + def __mul__(self, other): """ Implementation of * operator, allows use of ``expr * 3`` in place of ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer - tuple, similar to ``{min,max}`` multipliers in regular expressions. Tuples + tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples may also include ``None`` as in: - - ``expr*(n,None)`` or ``expr*(n,)`` is equivalent + - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent to ``expr*n + ZeroOrMore(expr)`` (read as "at least n instances of ``expr``") - - ``expr*(None,n)`` is equivalent to ``expr*(0,n)`` + - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` (read as "0 to n instances of ``expr``") - - ``expr*(None,None)`` is equivalent to ``ZeroOrMore(expr)`` - - ``expr*(1,None)`` is equivalent to ``OneOrMore(expr)`` + - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` + - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` - Note that ``expr*(None,n)`` does not raise an exception if + Note that ``expr*(None, n)`` does not raise an exception if more than n exprs exist in the input stream; that is, - ``expr*(None,n)`` does not enforce a maximum number of expr + ``expr*(None, n)`` does not enforce a maximum number of expr occurrences. If this behavior is desired, then write - ``expr*(None,n) + ~expr`` + ``expr*(None, n) + ~expr`` """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): + if other is Ellipsis: + other = (0, None) + elif isinstance(other, tuple) and other[:1] == (Ellipsis,): + other = ((0, ) + other[1:] + (None,))[:2] + + if isinstance(other, int): + minElements, optElements = other, 0 + elif isinstance(other, tuple): + other = tuple(o if o is not Ellipsis else None for o in other) other = (other + (None, None))[:2] if other[0] is None: other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: + if isinstance(other[0], int) and other[1] is None: if other[0] == 0: return ZeroOrMore(self) if other[0] == 1: return OneOrMore(self) else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): + return self * other[0] + ZeroOrMore(self) + elif isinstance(other[0], int) and isinstance(other[1], int): minElements, optElements = other optElements -= minElements else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1])) else: raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) @@ -2093,108 +2264,152 @@ class ParserElement(object): if optElements < 0: raise ValueError("second tuple value must be greater or equal to first tuple value") if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + raise ValueError("cannot multiply ParserElement by 0 or (0, 0)") - if (optElements): + if optElements: def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) + if n > 1: + return Optional(self + makeOptionalList(n - 1)) else: return Optional(self) if minElements: if minElements == 1: ret = self + makeOptionalList(optElements) else: - ret = And([self]*minElements) + makeOptionalList(optElements) + ret = And([self] * minElements) + makeOptionalList(optElements) else: ret = makeOptionalList(optElements) else: if minElements == 1: ret = self else: - ret = And([self]*minElements) + ret = And([self] * minElements) return ret def __rmul__(self, other): return self.__mul__(other) - def __or__(self, other ): + def __or__(self, other): """ Implementation of | operator - returns :class:`MatchFirst` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return _PendingSkip(self, must_skip=True) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return MatchFirst( [ self, other ] ) + return MatchFirst([self, other]) - def __ror__(self, other ): + def __ror__(self, other): """ Implementation of | operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other | self - def __xor__(self, other ): + def __xor__(self, other): """ Implementation of ^ operator - returns :class:`Or` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return Or( [ self, other ] ) + return Or([self, other]) - def __rxor__(self, other ): + def __rxor__(self, other): """ Implementation of ^ operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other ^ self - def __and__(self, other ): + def __and__(self, other): """ Implementation of & operator - returns :class:`Each` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return Each( [ self, other ] ) + return Each([self, other]) - def __rand__(self, other ): + def __rand__(self, other): """ Implementation of & operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other & self - def __invert__( self ): + def __invert__(self): """ Implementation of ~ operator - returns :class:`NotAny` """ - return NotAny( self ) + return NotAny(self) + + def __iter__(self): + # must implement __iter__ to override legacy use of sequential access to __getitem__ to + # iterate over a sequence + raise TypeError('%r object is not iterable' % self.__class__.__name__) + + def __getitem__(self, key): + """ + use ``[]`` indexing notation as a short form for expression repetition: + - ``expr[n]`` is equivalent to ``expr*n`` + - ``expr[m, n]`` is equivalent to ``expr*(m, n)`` + - ``expr[n, ...]`` or ``expr[n,]`` is equivalent + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") + - ``expr[..., n]`` is equivalent to ``expr*(0, n)`` + (read as "0 to n instances of ``expr``") + - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)`` + - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)`` + ``None`` may be used in place of ``...``. + + Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception + if more than ``n`` ``expr``s exist in the input stream. If this behavior is + desired, then write ``expr[..., n] + ~expr``. + """ + + # convert single arg keys to tuples + try: + if isinstance(key, str): + key = (key,) + iter(key) + except TypeError: + key = (key, key) + + if len(key) > 2: + warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5], + '... [{0}]'.format(len(key)) + if len(key) > 5 else '')) + + # clip to 2 elements + ret = self * tuple(key[:2]) + return ret def __call__(self, name=None): """ @@ -2208,22 +2423,22 @@ class ParserElement(object): Example:: # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums + "-")("socsecno") """ if name is not None: - return self.setResultsName(name) + return self._setResultsName(name) else: return self.copy() - def suppress( self ): + def suppress(self): """ Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from cluttering up returned output. """ - return Suppress( self ) + return Suppress(self) - def leaveWhitespace( self ): + def leaveWhitespace(self): """ Disables the skipping of whitespace before matching the characters in the :class:`ParserElement`'s defined pattern. This is normally only used internally by @@ -2232,7 +2447,7 @@ class ParserElement(object): self.skipWhitespace = False return self - def setWhitespaceChars( self, chars ): + def setWhitespaceChars(self, chars): """ Overrides the default whitespace chars """ @@ -2241,7 +2456,7 @@ class ParserElement(object): self.copyDefaultWhiteChars = False return self - def parseWithTabs( self ): + def parseWithTabs(self): """ Overrides default behavior to expand ``<TAB>``s to spaces before parsing the input string. Must be called before ``parseString`` when the input grammar contains elements that @@ -2250,7 +2465,7 @@ class ParserElement(object): self.keepTabs = True return self - def ignore( self, other ): + def ignore(self, other): """ Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other @@ -2267,14 +2482,14 @@ class ParserElement(object): if isinstance(other, basestring): other = Suppress(other) - if isinstance( other, Suppress ): + if isinstance(other, Suppress): if other not in self.ignoreExprs: self.ignoreExprs.append(other) else: - self.ignoreExprs.append( Suppress( other.copy() ) ) + self.ignoreExprs.append(Suppress(other.copy())) return self - def setDebugActions( self, startAction, successAction, exceptionAction ): + def setDebugActions(self, startAction, successAction, exceptionAction): """ Enable display of debugging messages while doing pattern matching. """ @@ -2284,7 +2499,7 @@ class ParserElement(object): self.debug = True return self - def setDebug( self, flag=True ): + def setDebug(self, flag=True): """ Enable display of debugging messages while doing pattern matching. Set ``flag`` to True to enable, False to disable. @@ -2322,32 +2537,32 @@ class ParserElement(object): name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``. """ if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction) else: self.debug = False return self - def __str__( self ): + def __str__(self): return self.name - def __repr__( self ): + def __repr__(self): return _ustr(self) - def streamline( self ): + def streamline(self): self.streamlined = True self.strRepr = None return self - def checkRecursion( self, parseElementList ): + def checkRecursion(self, parseElementList): pass - def validate( self, validateTrace=[] ): + def validate(self, validateTrace=None): """ Check defined expressions for valid structure, check for infinite recursive definitions. """ - self.checkRecursion( [] ) + self.checkRecursion([]) - def parseFile( self, file_or_filename, parseAll=False ): + def parseFile(self, file_or_filename, parseAll=False): """ Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), @@ -2364,27 +2579,30 @@ class ParserElement(object): if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) + def __eq__(self, other): + if self is other: + return True elif isinstance(other, basestring): return self.matches(other) - else: - return super(ParserElement,self)==other + elif isinstance(other, ParserElement): + return vars(self) == vars(other) + return False - def __ne__(self,other): + def __ne__(self, other): return not (self == other) def __hash__(self): - return hash(id(self)) + return id(self) - def __req__(self,other): + def __req__(self, other): return self == other - def __rne__(self,other): + def __rne__(self, other): return not (self == other) def matches(self, testString, parseAll=True): @@ -2408,7 +2626,8 @@ class ParserElement(object): return False def runTests(self, tests, parseAll=True, comment='#', - fullDump=True, printResults=True, failureTests=False, postParse=None): + fullDump=True, printResults=True, failureTests=False, postParse=None, + file=None): """ Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to @@ -2425,6 +2644,8 @@ class ParserElement(object): - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing - postParse - (default= ``None``) optional callback for successful parse results; called as `fn(test_string, parse_results)` and returns a string to be added to the test output + - file - (default=``None``) optional file-like object to which test output will be written; + if None, will default to ``sys.stdout`` Returns: a (success, results) tuple, where success indicates that all tests succeeded (or failed if ``failureTests`` is True), and the results contain a list of lines of each @@ -2504,37 +2725,34 @@ class ParserElement(object): tests = list(map(str.strip, tests.rstrip().splitlines())) if isinstance(comment, basestring): comment = Literal(comment) + if file is None: + file = sys.stdout + print_ = file.write + allResults = [] comments = [] success = True + NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString) + BOM = u'\ufeff' for t in tests: if comment is not None and comment.matches(t, False) or comments and not t: comments.append(t) continue if not t: continue - out = ['\n'.join(comments), t] + out = ['\n' + '\n'.join(comments) if comments else '', t] comments = [] try: # convert newline marks to actual newlines, and strip leading BOM if present - t = t.replace(r'\n','\n').lstrip('\ufeff') + t = NL.transformString(t.lstrip(BOM)) result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - if postParse is not None: - try: - pp_value = postParse(t, result) - if pp_value is not None: - out.append(str(pp_value)) - except Exception as e: - out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) except ParseBaseException as pe: fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" if '\n' in t: out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal) else: - out.append(' '*pe.loc + '^' + fatal) + out.append(' ' * pe.loc + '^' + fatal) out.append("FAIL: " + str(pe)) success = success and failureTests result = pe @@ -2542,30 +2760,80 @@ class ParserElement(object): out.append("FAIL-EXCEPTION: " + str(exc)) success = success and failureTests result = exc + else: + success = success and not failureTests + if postParse is not None: + try: + pp_value = postParse(t, result) + if pp_value is not None: + if isinstance(pp_value, ParseResults): + out.append(pp_value.dump()) + else: + out.append(str(pp_value)) + else: + out.append(result.dump()) + except Exception as e: + out.append(result.dump(full=fullDump)) + out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) + else: + out.append(result.dump(full=fullDump)) if printResults: if fullDump: out.append('') - print('\n'.join(out)) + print_('\n'.join(out)) allResults.append((t, result)) return success, allResults +class _PendingSkip(ParserElement): + # internal placeholder class to hold a place were '...' is added to a parser element, + # once another ParserElement is added, this placeholder will be replaced with a SkipTo + def __init__(self, expr, must_skip=False): + super(_PendingSkip, self).__init__() + self.strRepr = str(expr + Empty()).replace('Empty', '...') + self.name = self.strRepr + self.anchor = expr + self.must_skip = must_skip + + def __add__(self, other): + skipper = SkipTo(other).setName("...")("_skipped*") + if self.must_skip: + def must_skip(t): + if not t._skipped or t._skipped.asList() == ['']: + del t[0] + t.pop("_skipped", None) + def show_skip(t): + if t._skipped.asList()[-1:] == ['']: + skipped = t.pop('_skipped') + t['_skipped'] = 'missing <' + repr(self.anchor) + '>' + return (self.anchor + skipper().addParseAction(must_skip) + | skipper().addParseAction(show_skip)) + other + + return self.anchor + skipper + other + + def __repr__(self): + return self.strRepr + + def parseImpl(self, *args): + raise Exception("use of `...` expression without following SkipTo target expression") + + class Token(ParserElement): """Abstract :class:`ParserElement` subclass, for defining atomic matching patterns. """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) + def __init__(self): + super(Token, self).__init__(savelist=False) class Empty(Token): """An empty token, will always match. """ - def __init__( self ): - super(Empty,self).__init__() + def __init__(self): + super(Empty, self).__init__() self.name = "Empty" self.mayReturnEmpty = True self.mayIndexError = False @@ -2574,14 +2842,14 @@ class Empty(Token): class NoMatch(Token): """A token that will never match. """ - def __init__( self ): - super(NoMatch,self).__init__() + def __init__(self): + super(NoMatch, self).__init__() self.name = "NoMatch" self.mayReturnEmpty = True self.mayIndexError = False self.errmsg = "Unmatchable token" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): raise ParseException(instring, loc, self.errmsg, self) @@ -2599,8 +2867,8 @@ class Literal(Token): For keyword matching (force word break before and after the matched string), use :class:`Keyword` or :class:`CaselessKeyword`. """ - def __init__( self, matchString ): - super(Literal,self).__init__() + def __init__(self, matchString): + super(Literal, self).__init__() self.match = matchString self.matchLen = len(matchString) try: @@ -2614,15 +2882,22 @@ class Literal(Token): self.mayReturnEmpty = False self.mayIndexError = False - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match + # Performance tuning: modify __class__ to select + # a parseImpl optimized for single-character check + if self.matchLen == 1 and type(self) is Literal: + self.__class__ = _SingleCharLiteral + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc): + return loc + self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) + +class _SingleCharLiteral(Literal): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar: + return loc + 1, self.match + raise ParseException(instring, loc, self.errmsg, self) + _L = Literal ParserElement._literalStringClass = Literal @@ -2651,10 +2926,10 @@ class Keyword(Token): For case-insensitive matching, use :class:`CaselessKeyword`. """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" + DEFAULT_KEYWORD_CHARS = alphanums + "_$" - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() + def __init__(self, matchString, identChars=None, caseless=False): + super(Keyword, self).__init__() if identChars is None: identChars = Keyword.DEFAULT_KEYWORD_CHARS self.match = matchString @@ -2663,7 +2938,7 @@ class Keyword(Token): self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) self.name = '"%s"' % self.match self.errmsg = "Expected " + self.name self.mayReturnEmpty = False @@ -2674,27 +2949,32 @@ class Keyword(Token): identChars = identChars.upper() self.identChars = set(identChars) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match + if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen].upper() not in self.identChars) + and (loc == 0 + or instring[loc - 1].upper() not in self.identChars)): + return loc + self.matchLen, self.match + else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match + if instring[loc] == self.firstMatchChar: + if ((self.matchLen == 1 or instring.startswith(self.match, loc)) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen] not in self.identChars) + and (loc == 0 or instring[loc - 1] not in self.identChars)): + return loc + self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) def copy(self): - c = super(Keyword,self).copy() + c = super(Keyword, self).copy() c.identChars = Keyword.DEFAULT_KEYWORD_CHARS return c @staticmethod - def setDefaultKeywordChars( chars ): + def setDefaultKeywordChars(chars): """Overrides the default Keyword chars """ Keyword.DEFAULT_KEYWORD_CHARS = chars @@ -2710,16 +2990,16 @@ class CaselessLiteral(Literal): (Contrast with example for :class:`CaselessKeyword`.) """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) + def __init__(self, matchString): + super(CaselessLiteral, self).__init__(matchString.upper()) # Preserve the defining literal. self.returnString = matchString self.name = "'%s'" % self.returnString self.errmsg = "Expected " + self.name - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString + def parseImpl(self, instring, loc, doActions=True): + if instring[loc:loc + self.matchLen].upper() == self.match: + return loc + self.matchLen, self.returnString raise ParseException(instring, loc, self.errmsg, self) class CaselessKeyword(Keyword): @@ -2732,8 +3012,8 @@ class CaselessKeyword(Keyword): (Contrast with example for :class:`CaselessLiteral`.) """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + def __init__(self, matchString, identChars=None): + super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True) class CloseMatch(Token): """A variation on :class:`Literal` which matches "close" matches, @@ -2769,7 +3049,7 @@ class CloseMatch(Token): patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) """ def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() + super(CloseMatch, self).__init__() self.name = match_string self.match_string = match_string self.maxMismatches = maxMismatches @@ -2777,7 +3057,7 @@ class CloseMatch(Token): self.mayIndexError = False self.mayReturnEmpty = False - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): start = loc instrlen = len(instring) maxloc = start + len(self.match_string) @@ -2788,8 +3068,8 @@ class CloseMatch(Token): mismatches = [] maxMismatches = self.maxMismatches - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m + for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)): + src, mat = s_m if src != mat: mismatches.append(match_stringloc) if len(mismatches) > maxMismatches: @@ -2797,7 +3077,7 @@ class CloseMatch(Token): else: loc = match_stringloc + 1 results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string + results['original'] = match_string results['mismatches'] = mismatches return loc, results @@ -2849,7 +3129,7 @@ class Word(Token): capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') + hostname = Word(alphas, alphanums + '-') # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") @@ -2857,15 +3137,16 @@ class Word(Token): # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() + def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None): + super(Word, self).__init__() if excludeChars: + excludeChars = set(excludeChars) initChars = ''.join(c for c in initChars if c not in excludeChars) if bodyChars: bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) self.initCharsOrig = initChars self.initChars = set(initChars) - if bodyChars : + if bodyChars: self.bodyCharsOrig = bodyChars self.bodyChars = set(bodyChars) else: @@ -2893,34 +3174,28 @@ class Word(Token): self.mayIndexError = False self.asKeyword = asKeyword - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0): if self.bodyCharsOrig == self.initCharsOrig: self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" + self.reString = r"\b" + self.reString + r"\b" + try: - self.re = re.compile( self.reString ) + self.re = re.compile(self.reString) except Exception: self.re = None + else: + self.re_match = self.re.match + self.__class__ = _WordRegex - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.initChars: raise ParseException(instring, loc, self.errmsg, self) start = loc @@ -2928,17 +3203,18 @@ class Word(Token): instrlen = len(instring) bodychars = self.bodyChars maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) + maxloc = min(maxloc, instrlen) while loc < maxloc and instring[loc] in bodychars: loc += 1 throwException = False if loc - start < self.minLen: throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + elif self.asKeyword: + if (start > 0 and instring[start - 1] in bodychars + or loc < instrlen and instring[loc] in bodychars): throwException = True if throwException: @@ -2946,38 +3222,49 @@ class Word(Token): return loc, instring[start:loc] - def __str__( self ): + def __str__(self): try: - return super(Word,self).__str__() + return super(Word, self).__str__() except Exception: pass - if self.strRepr is None: def charsAsStr(s): - if len(s)>4: - return s[:4]+"..." + if len(s) > 4: + return s[:4] + "..." else: return s - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + if self.initCharsOrig != self.bodyCharsOrig: + self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig)) else: self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) return self.strRepr +class _WordRegex(Word): + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + -class Char(Word): +class Char(_WordRegex): """A short-cut class for defining ``Word(characters, exact=1)``, when defining a match of any single character in a string of characters. """ - def __init__(self, charset): - super(Char, self).__init__(charset, exact=1) - self.reString = "[%s]" % _escapeRegexRangeChars(self.initCharsOrig) - self.re = re.compile( self.reString ) + def __init__(self, charset, asKeyword=False, excludeChars=None): + super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars) + self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars)) + if asKeyword: + self.reString = r"\b%s\b" % self.reString + self.re = re.compile(self.reString) + self.re_match = self.re.match class Regex(Token): @@ -2987,26 +3274,35 @@ class Regex(Token): If the given regex contains named groups (defined using ``(?P<name>...)``), these will be preserved as named parse results. + If instead of the Python stdlib re module you wish to use a different RE module + (such as the `regex` module), you can replace it by either building your + Regex object with a compiled RE that was compiled using regex: + Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + + # use regex module instead of stdlib re module to construct a Regex using + # a compiled regular expression + import regex + parser = pp.Regex(regex.compile(r'[0-9]')) + """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): + def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): """The parameters ``pattern`` and ``flags`` are passed to the ``re.compile()`` function as-is. See the Python `re module <https://docs.python.org/3/library/re.html>`_ module for an explanation of the acceptable patterns and flags. """ - super(Regex,self).__init__() + super(Regex, self).__init__() if isinstance(pattern, basestring): if not pattern: warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) self.pattern = pattern self.flags = flags @@ -3016,46 +3312,64 @@ class Regex(Token): self.reString = self.pattern except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise - elif isinstance(pattern, Regex.compiledREtype): + elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'): self.re = pattern - self.pattern = \ - self.reString = str(pattern) + self.pattern = self.reString = pattern.pattern self.flags = flags else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") + raise TypeError("Regex may only be constructed with a string or a compiled RE object") + + self.re_match = self.re.match self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayIndexError = False - self.mayReturnEmpty = True + self.mayReturnEmpty = self.re_match("") is not None self.asGroupList = asGroupList self.asMatch = asMatch + if self.asGroupList: + self.parseImpl = self.parseImplAsGroupList + if self.asMatch: + self.parseImpl = self.parseImplAsMatch - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) if not result: raise ParseException(instring, loc, self.errmsg, self) loc = result.end() - if self.asMatch: - ret = result - elif self.asGroupList: - ret = result.groups() - else: - ret = ParseResults(result.group()) - d = result.groupdict() - if d: - for k, v in d.items(): - ret[k] = v - return loc,ret - - def __str__( self ): + ret = ParseResults(result.group()) + d = result.groupdict() + if d: + for k, v in d.items(): + ret[k] = v + return loc, ret + + def parseImplAsGroupList(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.groups() + return loc, ret + + def parseImplAsMatch(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result + return loc, ret + + def __str__(self): try: - return super(Regex,self).__str__() + return super(Regex, self).__str__() except Exception: pass @@ -3065,7 +3379,7 @@ class Regex(Token): return self.strRepr def sub(self, repl): - """ + r""" Return Regex with an attached parse action to transform the parsed result as if called using `re.sub(expr, repl, string) <https://docs.python.org/3/library/re.html#re.sub>`_. @@ -3077,12 +3391,12 @@ class Regex(Token): """ if self.asGroupList: warnings.warn("cannot use sub() with Regex(asGroupList=True)", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise SyntaxError() if self.asMatch and callable(repl): warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise SyntaxError() if self.asMatch: @@ -3102,20 +3416,20 @@ class QuotedString(Token): - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash - (default= ``None`` ) + (default= ``None``) - escQuote - special quote sequence to escape an embedded quote string (such as SQL's ``""`` to escape an embedded ``"``) - (default= ``None`` ) + (default= ``None``) - multiline - boolean indicating whether quotes can span - multiple lines (default= ``False`` ) + multiple lines (default= ``False``) - unquoteResults - boolean indicating whether the matched text - should be unquoted (default= ``True`` ) + should be unquoted (default= ``True``) - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default= ``None`` => same as quoteChar) - convertWhitespaceEscapes - convert escaped whitespace (``'\t'``, ``'\n'``, etc.) to actual whitespace - (default= ``True`` ) + (default= ``True``) Example:: @@ -3132,13 +3446,14 @@ class QuotedString(Token): [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() + def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, + unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString, self).__init__() # remove white space from quote chars - wont work anyway quoteChar = quoteChar.strip() if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) raise SyntaxError() if endQuoteChar is None: @@ -3146,7 +3461,7 @@ class QuotedString(Token): else: endQuoteChar = endQuoteChar.strip() if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) raise SyntaxError() self.quoteChar = quoteChar @@ -3161,35 +3476,34 @@ class QuotedString(Token): if multiline: self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) else: self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) if len(self.endQuoteChar) > 1: self.pattern += ( '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')') + if escQuote: self.pattern += (r'|(?:%s)' % re.escape(escQuote)) if escChar: self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.escCharReplacePattern = re.escape(self.escChar) + "(.)" self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern + self.re_match = self.re.match except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise self.name = _ustr(self) @@ -3197,8 +3511,8 @@ class QuotedString(Token): self.mayIndexError = False self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + def parseImpl(self, instring, loc, doActions=True): + result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None if not result: raise ParseException(instring, loc, self.errmsg, self) @@ -3208,18 +3522,18 @@ class QuotedString(Token): if self.unquoteResults: # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + ret = ret[self.quoteCharLen: -self.endQuoteCharLen] - if isinstance(ret,basestring): + if isinstance(ret, basestring): # replace escaped whitespace if '\\' in ret and self.convertWhitespaceEscapes: ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', + r'\t': '\t', + r'\n': '\n', + r'\f': '\f', + r'\r': '\r', } - for wslit,wschar in ws_map.items(): + for wslit, wschar in ws_map.items(): ret = ret.replace(wslit, wschar) # replace escaped characters @@ -3232,9 +3546,9 @@ class QuotedString(Token): return loc, ret - def __str__( self ): + def __str__(self): try: - return super(QuotedString,self).__str__() + return super(QuotedString, self).__str__() except Exception: pass @@ -3264,15 +3578,14 @@ class CharsNotIn(Token): ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() + def __init__(self, notChars, min=1, max=0, exact=0): + super(CharsNotIn, self).__init__() self.skipWhitespace = False self.notChars = notChars if min < 1: - raise ValueError( - "cannot specify a minimum length < 1; use " + - "Optional(CharsNotIn()) if zero-length char group is permitted") + raise ValueError("cannot specify a minimum length < 1; use " + "Optional(CharsNotIn()) if zero-length char group is permitted") self.minLen = min @@ -3287,19 +3600,18 @@ class CharsNotIn(Token): self.name = _ustr(self) self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayReturnEmpty = (self.minLen == 0) self.mayIndexError = False - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if instring[loc] in self.notChars: raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): + maxlen = min(start + self.maxLen, len(instring)) + while loc < maxlen and instring[loc] not in notchars: loc += 1 if loc - start < self.minLen: @@ -3307,7 +3619,7 @@ class CharsNotIn(Token): return loc, instring[start:loc] - def __str__( self ): + def __str__(self): try: return super(CharsNotIn, self).__str__() except Exception: @@ -3336,30 +3648,30 @@ class White(Token): '\n': '<LF>', '\r': '<CR>', '\f': '<FF>', - 'u\00A0': '<NBSP>', - 'u\1680': '<OGHAM_SPACE_MARK>', - 'u\180E': '<MONGOLIAN_VOWEL_SEPARATOR>', - 'u\2000': '<EN_QUAD>', - 'u\2001': '<EM_QUAD>', - 'u\2002': '<EN_SPACE>', - 'u\2003': '<EM_SPACE>', - 'u\2004': '<THREE-PER-EM_SPACE>', - 'u\2005': '<FOUR-PER-EM_SPACE>', - 'u\2006': '<SIX-PER-EM_SPACE>', - 'u\2007': '<FIGURE_SPACE>', - 'u\2008': '<PUNCTUATION_SPACE>', - 'u\2009': '<THIN_SPACE>', - 'u\200A': '<HAIR_SPACE>', - 'u\200B': '<ZERO_WIDTH_SPACE>', - 'u\202F': '<NNBSP>', - 'u\205F': '<MMSP>', - 'u\3000': '<IDEOGRAPHIC_SPACE>', + u'\u00A0': '<NBSP>', + u'\u1680': '<OGHAM_SPACE_MARK>', + u'\u180E': '<MONGOLIAN_VOWEL_SEPARATOR>', + u'\u2000': '<EN_QUAD>', + u'\u2001': '<EM_QUAD>', + u'\u2002': '<EN_SPACE>', + u'\u2003': '<EM_SPACE>', + u'\u2004': '<THREE-PER-EM_SPACE>', + u'\u2005': '<FOUR-PER-EM_SPACE>', + u'\u2006': '<SIX-PER-EM_SPACE>', + u'\u2007': '<FIGURE_SPACE>', + u'\u2008': '<PUNCTUATION_SPACE>', + u'\u2009': '<THIN_SPACE>', + u'\u200A': '<HAIR_SPACE>', + u'\u200B': '<ZERO_WIDTH_SPACE>', + u'\u202F': '<NNBSP>', + u'\u205F': '<MMSP>', + u'\u3000': '<IDEOGRAPHIC_SPACE>', } def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() + super(White, self).__init__() self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() + self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) + # ~ self.leaveWhitespace() self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) self.mayReturnEmpty = True self.errmsg = "Expected " + self.name @@ -3375,13 +3687,13 @@ class White(Token): self.maxLen = exact self.minLen = exact - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.matchWhite: raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) + maxloc = min(maxloc, len(instring)) while loc < maxloc and instring[loc] in self.matchWhite: loc += 1 @@ -3392,9 +3704,9 @@ class White(Token): class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ + def __init__(self): + super(_PositionToken, self).__init__() + self.name = self.__class__.__name__ self.mayReturnEmpty = True self.mayIndexError = False @@ -3402,30 +3714,30 @@ class GoToColumn(_PositionToken): """Token to advance to a specific column of input text; useful for tabular report scraping. """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() + def __init__(self, colno): + super(GoToColumn, self).__init__() self.col = colno - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: + def preParse(self, instring, loc): + if col(loc, instring) != self.col: instrlen = len(instring) if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc = self._skipIgnorables(instring, loc) + while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: loc += 1 return loc - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) + def parseImpl(self, instring, loc, doActions=True): + thiscol = col(loc, instring) if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) + raise ParseException(instring, loc, "Text not in expected column", self) newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] + ret = instring[loc: newloc] return newloc, ret class LineStart(_PositionToken): - """Matches if current position is at the beginning of a line within + r"""Matches if current position is at the beginning of a line within the parse string Example:: @@ -3446,11 +3758,11 @@ class LineStart(_PositionToken): ['AAA', ' and this line'] """ - def __init__( self ): - super(LineStart,self).__init__() + def __init__(self): + super(LineStart, self).__init__() self.errmsg = "Expected start of line" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if col(loc, instring) == 1: return loc, [] raise ParseException(instring, loc, self.errmsg, self) @@ -3459,19 +3771,19 @@ class LineEnd(_PositionToken): """Matches if current position is at the end of a line within the parse string """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + def __init__(self): + super(LineEnd, self).__init__() + self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) self.errmsg = "Expected end of line" - def parseImpl( self, instring, loc, doActions=True ): - if loc<len(instring): + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): if instring[loc] == "\n": - return loc+1, "\n" + return loc + 1, "\n" else: raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): - return loc+1, [] + return loc + 1, [] else: raise ParseException(instring, loc, self.errmsg, self) @@ -3479,29 +3791,29 @@ class StringStart(_PositionToken): """Matches if current position is at the beginning of the parse string """ - def __init__( self ): - super(StringStart,self).__init__() + def __init__(self): + super(StringStart, self).__init__() self.errmsg = "Expected start of text" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc != 0: # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse( instring, 0 ): + if loc != self.preParse(instring, 0): raise ParseException(instring, loc, self.errmsg, self) return loc, [] class StringEnd(_PositionToken): """Matches if current position is at the end of the parse string """ - def __init__( self ): - super(StringEnd,self).__init__() + def __init__(self): + super(StringEnd, self).__init__() self.errmsg = "Expected end of text" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc < len(instring): raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): - return loc+1, [] + return loc + 1, [] elif loc > len(instring): return loc, [] else: @@ -3516,15 +3828,15 @@ class WordStart(_PositionToken): the beginning of the string being parsed, or at the beginning of a line. """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() + def __init__(self, wordChars=printables): + super(WordStart, self).__init__() self.wordChars = set(wordChars) self.errmsg = "Not at the start of a word" - def parseImpl(self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): + if (instring[loc - 1] in self.wordChars + or instring[loc] not in self.wordChars): raise ParseException(instring, loc, self.errmsg, self) return loc, [] @@ -3536,17 +3848,17 @@ class WordEnd(_PositionToken): will also match at the end of the string being parsed, or at the end of a line. """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() + def __init__(self, wordChars=printables): + super(WordEnd, self).__init__() self.wordChars = set(wordChars) self.skipWhitespace = False self.errmsg = "Not at the end of a word" - def parseImpl(self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): instrlen = len(instring) - if instrlen>0 and loc<instrlen: + if instrlen > 0 and loc < instrlen: if (instring[loc] in self.wordChars or - instring[loc-1] not in self.wordChars): + instring[loc - 1] not in self.wordChars): raise ParseException(instring, loc, self.errmsg, self) return loc, [] @@ -3555,90 +3867,89 @@ class ParseExpression(ParserElement): """Abstract subclass of ParserElement, for combining and post-processing parsed tokens. """ - def __init__( self, exprs, savelist = False ): - super(ParseExpression,self).__init__(savelist) - if isinstance( exprs, _generatorType ): + def __init__(self, exprs, savelist=False): + super(ParseExpression, self).__init__(savelist) + if isinstance(exprs, _generatorType): exprs = list(exprs) - if isinstance( exprs, basestring ): - self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, Iterable ): + if isinstance(exprs, basestring): + self.exprs = [self._literalStringClass(exprs)] + elif isinstance(exprs, ParserElement): + self.exprs = [exprs] + elif isinstance(exprs, Iterable): exprs = list(exprs) # if sequence of strings provided, wrap with Literal - if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(ParserElement._literalStringClass, exprs) + if any(isinstance(expr, basestring) for expr in exprs): + exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) self.exprs = list(exprs) else: try: - self.exprs = list( exprs ) + self.exprs = list(exprs) except TypeError: - self.exprs = [ exprs ] + self.exprs = [exprs] self.callPreparse = False - def __getitem__( self, i ): - return self.exprs[i] - - def append( self, other ): - self.exprs.append( other ) + def append(self, other): + self.exprs.append(other) self.strRepr = None return self - def leaveWhitespace( self ): + def leaveWhitespace(self): """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on all contained expressions.""" self.skipWhitespace = False - self.exprs = [ e.copy() for e in self.exprs ] + self.exprs = [e.copy() for e in self.exprs] for e in self.exprs: e.leaveWhitespace() return self - def ignore( self, other ): - if isinstance( other, Suppress ): + def ignore(self, other): + if isinstance(other, Suppress): if other not in self.ignoreExprs: - super( ParseExpression, self).ignore( other ) + super(ParseExpression, self).ignore(other) for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) + e.ignore(self.ignoreExprs[-1]) else: - super( ParseExpression, self).ignore( other ) + super(ParseExpression, self).ignore(other) for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) + e.ignore(self.ignoreExprs[-1]) return self - def __str__( self ): + def __str__(self): try: - return super(ParseExpression,self).__str__() + return super(ParseExpression, self).__str__() except Exception: pass if self.strRepr is None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) return self.strRepr - def streamline( self ): - super(ParseExpression,self).streamline() + def streamline(self): + super(ParseExpression, self).streamline() for e in self.exprs: e.streamline() - # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d) # but only if there are no parse actions or resultsNames on the nested And's # (likewise for Or's and MatchFirst's) - if ( len(self.exprs) == 2 ): + if len(self.exprs) == 2: other = self.exprs[0] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = other.exprs[:] + [ self.exprs[1] ] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = other.exprs[:] + [self.exprs[1]] self.strRepr = None self.mayReturnEmpty |= other.mayReturnEmpty self.mayIndexError |= other.mayIndexError other = self.exprs[-1] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): self.exprs = self.exprs[:-1] + other.exprs[:] self.strRepr = None self.mayReturnEmpty |= other.mayReturnEmpty @@ -3648,21 +3959,31 @@ class ParseExpression(ParserElement): return self - def setResultsName( self, name, listAllMatches=False ): - ret = super(ParseExpression,self).setResultsName(name,listAllMatches) - return ret - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] + def validate(self, validateTrace=None): + tmp = (validateTrace if validateTrace is not None else [])[:] + [self] for e in self.exprs: e.validate(tmp) - self.checkRecursion( [] ) + self.checkRecursion([]) def copy(self): - ret = super(ParseExpression,self).copy() + ret = super(ParseExpression, self).copy() ret.exprs = [e.copy() for e in self.exprs] return ret + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in self.exprs: + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(ParseExpression, self)._setResultsName(name, listAllMatches) + + class And(ParseExpression): """ Requires all given :class:`ParseExpression` s to be found in the given order. @@ -3676,33 +3997,59 @@ class And(ParseExpression): integer = Word(nums) name_expr = OneOrMore(Word(alphas)) - expr = And([integer("id"),name_expr("name"),integer("age")]) + expr = And([integer("id"), name_expr("name"), integer("age")]) # more easily written as: expr = integer("id") + name_expr("name") + integer("age") """ class _ErrorStop(Empty): def __init__(self, *args, **kwargs): - super(And._ErrorStop,self).__init__(*args, **kwargs) + super(And._ErrorStop, self).__init__(*args, **kwargs) self.name = '-' self.leaveWhitespace() - def __init__( self, exprs, savelist = True ): - super(And,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=True): + exprs = list(exprs) + if exprs and Ellipsis in exprs: + tmp = [] + for i, expr in enumerate(exprs): + if expr is Ellipsis: + if i < len(exprs) - 1: + skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + else: + raise Exception("cannot construct And with sequence ending in ...") + else: + tmp.append(expr) + exprs[:] = tmp + super(And, self).__init__(exprs, savelist) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.setWhitespaceChars(self.exprs[0].whiteChars) self.skipWhitespace = self.exprs[0].skipWhitespace self.callPreparse = True def streamline(self): + # collapse any _PendingSkip's + if self.exprs: + if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) + for e in self.exprs[:-1]): + for i, e in enumerate(self.exprs[:-1]): + if e is None: + continue + if (isinstance(e, ParseExpression) + and e.exprs and isinstance(e.exprs[-1], _PendingSkip)): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = None + self.exprs = [e for e in self.exprs if e is not None] + super(And, self).streamline() self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): # pass False as last arg to _parse for first element, since we already # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) errorStop = False for e in self.exprs[1:]: if isinstance(e, And._ErrorStop): @@ -3710,7 +4057,7 @@ class And(ParseExpression): continue if errorStop: try: - loc, exprtokens = e._parse( instring, loc, doActions ) + loc, exprtokens = e._parse(instring, loc, doActions) except ParseSyntaxException: raise except ParseBaseException as pe: @@ -3719,25 +4066,25 @@ class And(ParseExpression): except IndexError: raise ParseSyntaxException(instring, len(instring), self.errmsg, self) else: - loc, exprtokens = e._parse( instring, loc, doActions ) + loc, exprtokens = e._parse(instring, loc, doActions) if exprtokens or exprtokens.haskeys(): resultlist += exprtokens return loc, resultlist - def __iadd__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #And( [ self, other ] ) + def __iadd__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # And([self, other]) - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) if not e.mayReturnEmpty: break - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3763,8 +4110,8 @@ class Or(ParseExpression): [['123'], ['3.1416'], ['789']] """ - def __init__( self, exprs, savelist = False ): - super(Or,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=False): + super(Or, self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) else: @@ -3772,16 +4119,17 @@ class Or(ParseExpression): def streamline(self): super(Or, self).streamline() - self.saveAsList = any(e.saveAsList for e in self.exprs) + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): maxExcLoc = -1 maxException = None matches = [] for e in self.exprs: try: - loc2 = e.tryParse( instring, loc ) + loc2 = e.tryParse(instring, loc) except ParseException as err: err.__traceback__ = None if err.loc > maxExcLoc: @@ -3789,22 +4137,45 @@ class Or(ParseExpression): maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) + maxException = ParseException(instring, len(instring), e.errmsg, self) maxExcLoc = len(instring) else: # save match among all matches, to retry longest to shortest matches.append((loc2, e)) if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: + # re-evaluate all matches in descending order of length of match, in case attached actions + # might change whether or how much they match of the input. + matches.sort(key=itemgetter(0), reverse=True) + + if not doActions: + # no further conditions or parse actions to change the selection of + # alternative, so the first match will be the best match + best_expr = matches[0][1] + return best_expr._parse(instring, loc, doActions) + + longest = -1, None + for loc1, expr1 in matches: + if loc1 <= longest[0]: + # already have a longer match than this one will deliver, we are done + return longest + try: - return e._parse( instring, loc, doActions ) + loc2, toks = expr1._parse(instring, loc, doActions) except ParseException as err: err.__traceback__ = None if err.loc > maxExcLoc: maxException = err maxExcLoc = err.loc + else: + if loc2 >= loc1: + return loc2, toks + # didn't match as much as before + elif loc2 > longest[0]: + longest = loc2, toks + + if longest != (-1, None): + return longest if maxException is not None: maxException.msg = self.errmsg @@ -3813,13 +4184,13 @@ class Or(ParseExpression): raise ParseException(instring, loc, "no defined alternatives to match", self) - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) + def __ixor__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # Or([self, other]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3827,10 +4198,22 @@ class Or(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(Or, self)._setResultsName(name, listAllMatches) class MatchFirst(ParseExpression): @@ -3850,25 +4233,25 @@ class MatchFirst(ParseExpression): number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=False): + super(MatchFirst, self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - # self.saveAsList = any(e.saveAsList for e in self.exprs) else: self.mayReturnEmpty = True def streamline(self): super(MatchFirst, self).streamline() - self.saveAsList = any(e.saveAsList for e in self.exprs) + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): maxExcLoc = -1 maxException = None for e in self.exprs: try: - ret = e._parse( instring, loc, doActions ) + ret = e._parse(instring, loc, doActions) return ret except ParseException as err: if err.loc > maxExcLoc: @@ -3876,7 +4259,7 @@ class MatchFirst(ParseExpression): maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) + maxException = ParseException(instring, len(instring), e.errmsg, self) maxExcLoc = len(instring) # only got here if no expression matched, raise exception for match that made it the furthest @@ -3887,13 +4270,13 @@ class MatchFirst(ParseExpression): else: raise ParseException(instring, loc, "no defined alternatives to match", self) - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) + def __ior__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # MatchFirst([self, other]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3901,10 +4284,22 @@ class MatchFirst(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(MatchFirst, self)._setResultsName(name, listAllMatches) class Each(ParseExpression): @@ -3964,8 +4359,8 @@ class Each(ParseExpression): - shape: TRIANGLE - size: 20 """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=True): + super(Each, self).__init__(exprs, savelist) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) self.skipWhitespace = True self.initExprGroups = True @@ -3976,15 +4371,15 @@ class Each(ParseExpression): self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional)) + opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] + opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))] self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] + self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] + self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] self.required += self.multirequired self.initExprGroups = False tmpLoc = loc @@ -3998,11 +4393,11 @@ class Each(ParseExpression): failed = [] for e in tmpExprs: try: - tmpLoc = e.tryParse( instring, tmpLoc ) + tmpLoc = e.tryParse(instring, tmpLoc) except ParseException: failed.append(e) else: - matchOrder.append(self.opt1map.get(id(e),e)) + matchOrder.append(self.opt1map.get(id(e), e)) if e in tmpReqd: tmpReqd.remove(e) elif e in tmpOpt: @@ -4012,21 +4407,21 @@ class Each(ParseExpression): if tmpReqd: missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt] resultlist = [] for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) + loc, results = e._parse(instring, loc, doActions) resultlist.append(results) finalResults = sum(resultlist, ParseResults([])) return loc, finalResults - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4034,86 +4429,88 @@ class Each(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) class ParseElementEnhance(ParserElement): """Abstract subclass of :class:`ParserElement`, for combining and post-processing parsed tokens. """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) + def __init__(self, expr, savelist=False): + super(ParseElementEnhance, self).__init__(savelist) + if isinstance(expr, basestring): + if issubclass(self._literalStringClass, Token): + expr = self._literalStringClass(expr) else: - expr = ParserElement._literalStringClass(Literal(expr)) + expr = self._literalStringClass(Literal(expr)) self.expr = expr self.strRepr = None if expr is not None: self.mayIndexError = expr.mayIndexError self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) + self.setWhitespaceChars(expr.whiteChars) self.skipWhitespace = expr.skipWhitespace self.saveAsList = expr.saveAsList self.callPreparse = expr.callPreparse self.ignoreExprs.extend(expr.ignoreExprs) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) + return self.expr._parse(instring, loc, doActions, callPreParse=False) else: - raise ParseException("",loc,self.errmsg,self) + raise ParseException("", loc, self.errmsg, self) - def leaveWhitespace( self ): + def leaveWhitespace(self): self.skipWhitespace = False self.expr = self.expr.copy() if self.expr is not None: self.expr.leaveWhitespace() return self - def ignore( self, other ): - if isinstance( other, Suppress ): + def ignore(self, other): + if isinstance(other, Suppress): if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) + super(ParseElementEnhance, self).ignore(other) if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) + self.expr.ignore(self.ignoreExprs[-1]) else: - super( ParseElementEnhance, self).ignore( other ) + super(ParseElementEnhance, self).ignore(other) if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) + self.expr.ignore(self.ignoreExprs[-1]) return self - def streamline( self ): - super(ParseElementEnhance,self).streamline() + def streamline(self): + super(ParseElementEnhance, self).streamline() if self.expr is not None: self.expr.streamline() return self - def checkRecursion( self, parseElementList ): + def checkRecursion(self, parseElementList): if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] + raise RecursiveGrammarException(parseElementList + [self]) + subRecCheckList = parseElementList[:] + [self] if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) + self.expr.checkRecursion(subRecCheckList) - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + tmp = validateTrace[:] + [self] if self.expr is not None: self.expr.validate(tmp) - self.checkRecursion( [] ) + self.checkRecursion([]) - def __str__( self ): + def __str__(self): try: - return super(ParseElementEnhance,self).__str__() + return super(ParseElementEnhance, self).__str__() except Exception: pass if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) return self.strRepr @@ -4139,13 +4536,16 @@ class FollowedBy(ParseElementEnhance): [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) + def __init__(self, expr): + super(FollowedBy, self).__init__(expr) self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): + # by using self._expr.parse and deleting the contents of the returned ParseResults list + # we keep any named results that were defined in the FollowedBy expression _, ret = self.expr._parse(instring, loc, doActions=doActions) del ret[:] + return loc, ret @@ -4198,6 +4598,7 @@ class PrecededBy(ParseElementEnhance): self.retreat = retreat self.errmsg = "not preceded by " + str(expr) self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) def parseImpl(self, instring, loc=0, doActions=True): if self.exact: @@ -4208,19 +4609,18 @@ class PrecededBy(ParseElementEnhance): else: # retreat specified a maximum lookbehind window, iterate test_expr = self.expr + StringEnd() - instring_slice = instring[:loc] + instring_slice = instring[max(0, loc - self.retreat):loc] last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat+1)): + for offset in range(1, min(loc, self.retreat + 1)+1): try: - _, ret = test_expr._parse(instring_slice, loc-offset) + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) except ParseBaseException as pbe: last_expr = pbe else: break else: raise last_expr - # return empty list of tokens, but preserve any defined results names - del ret[:] return loc, ret @@ -4247,20 +4647,20 @@ class NotAny(ParseElementEnhance): # integers that are followed by "." are actually floats integer = Word(nums) + ~Char(".") """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() + def __init__(self, expr): + super(NotAny, self).__init__(expr) + # ~ self.leaveWhitespace() self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) + self.errmsg = "Found unwanted token, " + _ustr(self.expr) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.expr.canParseNext(instring, loc): raise ParseException(instring, loc, self.errmsg, self) return loc, [] - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4269,15 +4669,21 @@ class NotAny(ParseElementEnhance): return self.strRepr class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): + def __init__(self, expr, stopOn=None): super(_MultipleMatch, self).__init__(expr) self.saveAsList = True ender = stopOn if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) + ender = self._literalStringClass(ender) + self.stopOn(ender) + + def stopOn(self, ender): + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) self.not_ender = ~ender if ender is not None else None + return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): self_expr_parse = self.expr._parse self_skip_ignorables = self._skipIgnorables check_ender = self.not_ender is not None @@ -4288,24 +4694,38 @@ class _MultipleMatch(ParseElementEnhance): # if so, fail) if check_ender: try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) try: hasIgnoreExprs = (not not self.ignoreExprs) while 1: if check_ender: try_not_ender(instring, loc) if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) + preloc = self_skip_ignorables(instring, loc) else: preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + loc, tmptokens = self_expr_parse(instring, preloc, doActions) if tmptokens or tmptokens.haskeys(): tokens += tmptokens - except (ParseException,IndexError): + except (ParseException, IndexError): pass return loc, tokens + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in [self.expr] + getattr(self.expr, 'exprs', []): + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) + + class OneOrMore(_MultipleMatch): """Repetition of one or more of the given expression. @@ -4332,8 +4752,8 @@ class OneOrMore(_MultipleMatch): (attr_expr * (1,)).parseString(text).pprint() """ - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4352,18 +4772,18 @@ class ZeroOrMore(_MultipleMatch): Example: similar to :class:`OneOrMore` """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + def __init__(self, expr, stopOn=None): + super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): try: return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): + except (ParseException, IndexError): return loc, [] - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4371,6 +4791,7 @@ class ZeroOrMore(_MultipleMatch): return self.strRepr + class _NullToken(object): def __bool__(self): return False @@ -4378,7 +4799,6 @@ class _NullToken(object): def __str__(self): return "" -_optionalNotMatched = _NullToken() class Optional(ParseElementEnhance): """Optional matching of the given expression. @@ -4416,28 +4836,30 @@ class Optional(ParseElementEnhance): ^ FAIL: Expected end of text (at char 5), (line:1, col:6) """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) + __optionalNotMatched = _NullToken() + + def __init__(self, expr, default=__optionalNotMatched): + super(Optional, self).__init__(expr, savelist=False) self.saveAsList = self.expr.saveAsList self.defaultValue = default self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: + loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) + except (ParseException, IndexError): + if self.defaultValue is not self.__optionalNotMatched: if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) + tokens = ParseResults([self.defaultValue]) tokens[self.expr.resultsName] = self.defaultValue else: - tokens = [ self.defaultValue ] + tokens = [self.defaultValue] else: tokens = [] return loc, tokens - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4503,20 +4925,20 @@ class SkipTo(ParseElementEnhance): - issue_num: 79 - sev: Minor """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) + def __init__(self, other, include=False, ignore=None, failOn=None): + super(SkipTo, self).__init__(other) self.ignoreExpr = ignore self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include self.saveAsList = False if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) + self.failOn = self._literalStringClass(failOn) else: self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) + self.errmsg = "No match found for " + _ustr(self.expr) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): startloc = loc instrlen = len(instring) expr = self.expr @@ -4558,7 +4980,7 @@ class SkipTo(ParseElementEnhance): skipresult = ParseResults(skiptext) if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) skipresult += mat return loc, skipresult @@ -4590,17 +5012,17 @@ class Forward(ParseElementEnhance): See :class:`ParseResults.pprint` for an example of a recursive parser created using ``Forward``. """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) + def __init__(self, other=None): + super(Forward, self).__init__(other, savelist=False) - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) + def __lshift__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) self.expr = other self.strRepr = None self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) + self.setWhitespaceChars(self.expr.whiteChars) self.skipWhitespace = self.expr.skipWhitespace self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) @@ -4609,59 +5031,72 @@ class Forward(ParseElementEnhance): def __ilshift__(self, other): return self << other - def leaveWhitespace( self ): + def leaveWhitespace(self): self.skipWhitespace = False return self - def streamline( self ): + def streamline(self): if not self.streamlined: self.streamlined = True if self.expr is not None: self.expr.streamline() return self - def validate( self, validateTrace=[] ): + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + if self not in validateTrace: - tmp = validateTrace[:]+[self] + tmp = validateTrace[:] + [self] if self.expr is not None: self.expr.validate(tmp) self.checkRecursion([]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name - return self.__class__.__name__ + ": ..." + if self.strRepr is not None: + return self.strRepr + + # Avoid infinite recursion by setting a temporary strRepr + self.strRepr = ": ..." - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse + # Use the string representation of main expression. + retString = '...' try: if self.expr is not None: - retString = _ustr(self.expr) + retString = _ustr(self.expr)[:1000] else: retString = "None" finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString + self.strRepr = self.__class__.__name__ + ": " + retString + return self.strRepr def copy(self): if self.expr is not None: - return super(Forward,self).copy() + return super(Forward, self).copy() else: ret = Forward() ret <<= self return ret -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_name_set_on_empty_Forward: + if self.expr is None: + warnings.warn("{0}: setting results name {0!r} on {1} expression " + "that has no contained expression".format("warn_name_set_on_empty_Forward", + name, + type(self).__name__), + stacklevel=3) + + return super(Forward, self)._setResultsName(name, listAllMatches) class TokenConverter(ParseElementEnhance): """ Abstract subclass of :class:`ParseExpression`, for converting parsed results. """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) + def __init__(self, expr, savelist=False): + super(TokenConverter, self).__init__(expr) # , savelist) self.saveAsList = False class Combine(TokenConverter): @@ -4682,8 +5117,8 @@ class Combine(TokenConverter): # no match when there are internal spaces print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) + def __init__(self, expr, joinString="", adjacent=True): + super(Combine, self).__init__(expr) # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself if adjacent: self.leaveWhitespace() @@ -4692,20 +5127,20 @@ class Combine(TokenConverter): self.joinString = joinString self.callPreparse = True - def ignore( self, other ): + def ignore(self, other): if self.adjacent: ParserElement.ignore(self, other) else: - super( Combine, self).ignore( other ) + super(Combine, self).ignore(other) return self - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): retToks = tokenlist.copy() del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults) if self.resultsName and retToks.haskeys(): - return [ retToks ] + return [retToks] else: return retToks @@ -4719,17 +5154,17 @@ class Group(TokenConverter): num = Word(nums) term = ident | num func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + print(func.parseString("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + print(func.parseString("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = expr.saveAsList + def __init__(self, expr): + super(Group, self).__init__(expr) + self.saveAsList = True - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] + def postParse(self, instring, loc, tokenlist): + return [tokenlist] class Dict(TokenConverter): """Converter to return a repetitive expression as a list, but also @@ -4770,31 +5205,31 @@ class Dict(TokenConverter): See more examples at :class:`ParseResults` of accessing fields by results name. """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) + def __init__(self, expr): + super(Dict, self).__init__(expr) self.saveAsList = True - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): + def postParse(self, instring, loc, tokenlist): + for i, tok in enumerate(tokenlist): if len(tok) == 0: continue ikey = tok[0] - if isinstance(ikey,int): + if isinstance(ikey, int): ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + if len(tok) == 1: + tokenlist[ikey] = _ParseResultsWithOffset("", i) + elif len(tok) == 2 and not isinstance(tok[1], ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) else: - dictvalue = tok.copy() #ParseResults(i) + dictvalue = tok.copy() # ParseResults(i) del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) if self.resultsName: - return [ tokenlist ] + return [tokenlist] else: return tokenlist @@ -4821,10 +5256,10 @@ class Suppress(TokenConverter): (See also :class:`delimitedList`.) """ - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): return [] - def suppress( self ): + def suppress(self): return self @@ -4834,12 +5269,12 @@ class OnlyOnce(object): def __init__(self, methodCall): self.callable = _trim_arity(methodCall) self.called = False - def __call__(self,s,l,t): + def __call__(self, s, l, t): if not self.called: - results = self.callable(s,l,t) + results = self.callable(s, l, t) self.called = True return results - raise ParseException(s,l,"") + raise ParseException(s, l, "") def reset(self): self.called = False @@ -4871,16 +5306,16 @@ def traceParseAction(f): f = _trim_arity(f) def z(*paArgs): thisFunc = f.__name__ - s,l,t = paArgs[-3:] - if len(paArgs)>3: + s, l, t = paArgs[-3:] + if len(paArgs) > 3: thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) try: ret = f(*paArgs) except Exception as exc: - sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + sys.stderr.write("<<leaving %s (exception: %s)\n" % (thisFunc, exc)) raise - sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + sys.stderr.write("<<leaving %s (ret: %r)\n" % (thisFunc, ret)) return ret try: z.__name__ = f.__name__ @@ -4891,7 +5326,7 @@ def traceParseAction(f): # # global helpers # -def delimitedList( expr, delim=",", combine=False ): +def delimitedList(expr, delim=",", combine=False): """Helper to define a delimited list of expressions - the delimiter defaults to ','. By default, the list elements and delimiters can have intervening whitespace, and comments, but this can be @@ -4906,13 +5341,13 @@ def delimitedList( expr, delim=",", combine=False ): delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) -def countedArray( expr, intExpr=None ): +def countedArray(expr, intExpr=None): """Helper to define a counted list of expressions. This helper defines a pattern of the form:: @@ -4936,22 +5371,22 @@ def countedArray( expr, intExpr=None ): countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] """ arrayExpr = Forward() - def countFieldParseAction(s,l,t): + def countFieldParseAction(s, l, t): n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) return [] if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + intExpr = Word(nums).setParseAction(lambda t: int(t[0])) else: intExpr = intExpr.copy() intExpr.setName("arrayLen") intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...') def _flatten(L): ret = [] for i in L: - if isinstance(i,list): + if isinstance(i, list): ret.extend(_flatten(i)) else: ret.append(i) @@ -4973,7 +5408,7 @@ def matchPreviousLiteral(expr): enabled. """ rep = Forward() - def copyTokenToRepeater(s,l,t): + def copyTokenToRepeater(s, l, t): if t: if len(t) == 1: rep << t[0] @@ -5005,26 +5440,26 @@ def matchPreviousExpr(expr): rep = Forward() e2 = expr.copy() rep <<= e2 - def copyTokenToRepeater(s,l,t): + def copyTokenToRepeater(s, l, t): matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): + def mustMatchTheseTokens(s, l, t): theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + if theseTokens != matchTokens: + raise ParseException('', 0, '') + rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev) ' + _ustr(expr)) return rep def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") + # ~ escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") return _ustr(s) -def oneOf( strs, caseless=False, useRegex=True ): +def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): """Helper to quickly define a set of alternative Literals, and makes sure to do longest-first testing when there is a conflict, regardless of the input order, but returns @@ -5038,8 +5473,10 @@ def oneOf( strs, caseless=False, useRegex=True ): caseless - useRegex - (default= ``True``) - as an optimization, will generate a Regex object; otherwise, will generate - a :class:`MatchFirst` object (if ``caseless=True``, or if + a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if creating a :class:`Regex` raises an exception) + - asKeyword - (default=``False``) - enforce Keyword-style matching on the + generated expressions Example:: @@ -5054,57 +5491,62 @@ def oneOf( strs, caseless=False, useRegex=True ): [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] """ + if isinstance(caseless, basestring): + warnings.warn("More than one string argument passed to oneOf, pass " + "choices as a list or space-delimited string", stacklevel=2) + if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral + isequal = (lambda a, b: a.upper() == b.upper()) + masks = (lambda a, b: b.upper().startswith(a.upper())) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal + isequal = (lambda a, b: a == b) + masks = (lambda a, b: b.startswith(a)) + parseElementClass = Keyword if asKeyword else Literal symbols = [] - if isinstance(strs,basestring): + if isinstance(strs, basestring): symbols = strs.split() elif isinstance(strs, Iterable): symbols = list(strs) else: warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) if not symbols: return NoMatch() - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 + if not asKeyword: + # if not producing keywords, need to reorder to take care to avoid masking + # longer choices with shorter ones + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1:]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + if not (caseless or asKeyword) and useRegex: + # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols])) try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + if len(symbols) == len("".join(symbols)): + return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols)) else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols)) except Exception: warnings.warn("Exception creating Regex for oneOf, building MatchFirst", SyntaxWarning, stacklevel=2) - # last resort, just use MatchFirst return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) -def dictOf( key, value ): +def dictOf(key, value): """Helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value. Takes care of defining the :class:`Dict`, :class:`ZeroOrMore`, and @@ -5162,8 +5604,8 @@ def originalTextFor(expr, asString=True): Example:: src = "this is test <b> bold <i>text</i> </b> normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) + for tag in ("b", "i"): + opener, closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) @@ -5172,14 +5614,14 @@ def originalTextFor(expr, asString=True): ['<b> bold <i>text</i> </b>'] ['<i>text</i>'] """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) + locMarker = Empty().setParseAction(lambda s, loc, t: loc) endlocMarker = locMarker.copy() endlocMarker.callPreparse = False matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] + extractText = lambda s, l, t: s[t._original_start: t._original_end] else: - def extractText(s,l,t): + def extractText(s, l, t): t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] matchExpr.setParseAction(extractText) matchExpr.ignoreExprs = expr.ignoreExprs @@ -5189,7 +5631,7 @@ def ungroup(expr): """Helper to undo pyparsing's default grouping of And expressions, even if all but one are non-empty. """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) + return TokenConverter(expr).addParseAction(lambda t: t[0]) def locatedExpr(expr): """Helper to decorate a returned token with its starting and ending @@ -5216,7 +5658,7 @@ def locatedExpr(expr): [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] """ - locator = Empty().setParseAction(lambda s,l,t: l) + locator = Empty().setParseAction(lambda s, l, t: l) return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) @@ -5227,12 +5669,12 @@ lineEnd = LineEnd().setName("lineEnd") stringStart = StringStart().setName("stringStart") stringEnd = StringEnd().setName("stringEnd") -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8))) _singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) _charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" def srange(s): r"""Helper to easily define string ranges for use in Word @@ -5260,7 +5702,7 @@ def srange(s): - any combination of the above (``'aeiouy'``, ``'a-zA-Z0-9_$'``, etc.) """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) try: return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) except Exception: @@ -5270,9 +5712,9 @@ def matchOnlyAtCol(n): """Helper method for defining parse actions that require matching at a specific column in the input text. """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) + def verifyCol(strg, locn, toks): + if col(locn, strg) != n: + raise ParseException(strg, locn, "matched token not at column %d" % n) return verifyCol def replaceWith(replStr): @@ -5288,9 +5730,9 @@ def replaceWith(replStr): OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] """ - return lambda s,l,t: [replStr] + return lambda s, l, t: [replStr] -def removeQuotes(s,l,t): +def removeQuotes(s, l, t): """Helper parse action for removing quotation marks from parsed quoted strings. @@ -5341,7 +5783,7 @@ def tokenMap(func, *args): now is the winter of our discontent made glorious summer by this sun of york ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] """ - def pa(s,l,t): + def pa(s, l, t): return [func(tokn, *args) for tokn in t] try: @@ -5361,33 +5803,41 @@ downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) """(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" -def _makeTags(tagStr, xml): +def _makeTags(tagStr, xml, + suppress_LT=Suppress("<"), + suppress_GT=Suppress(">")): """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): + if isinstance(tagStr, basestring): resname = tagStr tagStr = Keyword(tagStr, caseless=not xml) else: resname = tagStr.name - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("</") + tagStr + ">") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + + Optional(Suppress("=") + tagAttrValue)))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + closeTag = Combine(_L("</") + tagStr + ">", adjacent=False) + + openTag.setName("<%s>" % resname) + # add start<tagname> results name in parse action now that ungrouped names are not reported at two levels + openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) + closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("</%s>" % resname) openTag.tag = resname closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) return openTag, closeTag def makeHTMLTags(tagStr): @@ -5400,7 +5850,7 @@ def makeHTMLTags(tagStr): text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' # makeHTMLTags returns pyparsing expressions for the opening and # closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") + a, a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end for link in link_expr.searchString(text): @@ -5412,7 +5862,7 @@ def makeHTMLTags(tagStr): pyparsing -> https://github.com/pyparsing/pyparsing/wiki """ - return _makeTags( tagStr, False ) + return _makeTags(tagStr, False) def makeXMLTags(tagStr): """Helper to construct opening and closing tag expressions for XML, @@ -5420,9 +5870,9 @@ def makeXMLTags(tagStr): Example: similar to :class:`makeHTMLTags` """ - return _makeTags( tagStr, True ) + return _makeTags(tagStr, True) -def withAttribute(*args,**attrDict): +def withAttribute(*args, **attrDict): """Helper to create a validating parse action to be used with start tags created with :class:`makeXMLTags` or :class:`makeHTMLTags`. Use ``withAttribute`` to qualify @@ -5435,7 +5885,7 @@ def withAttribute(*args,**attrDict): - keyword arguments, as in ``(align="right")``, or - as an explicit dict with ``**`` operator, when an attribute name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` - - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align","right"))`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. @@ -5482,13 +5932,13 @@ def withAttribute(*args,**attrDict): attrs = args[:] else: attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: + attrs = [(k, v) for k, v in attrs] + def pa(s, l, tokens): + for attrName, attrValue in attrs: if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) + raise ParseException(s, l, "no matching attribute " + attrName) if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % (attrName, tokens[attrName], attrValue)) return pa withAttribute.ANY_VALUE = object() @@ -5529,13 +5979,13 @@ def withClass(classname, namespace=''): 1,3 2,3 1,1 """ classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) + return withAttribute(**{classattr: classname}) opAssoc = SimpleNamespace() opAssoc.LEFT = object() opAssoc.RIGHT = object() -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): +def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')): """Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be @@ -5613,9 +6063,9 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): return loc, [] ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + lastExpr = baseExpr | (lpar + ret + rpar) + for i, operDef in enumerate(opList): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4] termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr if arity == 3: if opExpr is None or len(opExpr) != 2: @@ -5625,15 +6075,15 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): thisExpr = Forward().setName(termName) if rightLeftAssoc == opAssoc.LEFT: if arity == 1: - matchExpr = _FB(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) elif arity == 2: if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) else: - matchExpr = _FB(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") elif rightLeftAssoc == opAssoc.RIGHT: @@ -5641,15 +6091,15 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): # try to avoid LR with this extra test if not isinstance(opExpr, Optional): opExpr = Optional(opExpr) - matchExpr = _FB(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) elif arity == 2: if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) else: - matchExpr = _FB(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") else: @@ -5659,7 +6109,7 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): matchExpr.setParseAction(*pa) else: matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + thisExpr <<= (matchExpr.setName(termName) | lastExpr) lastExpr = thisExpr ret <<= lastExpr return ret @@ -5668,10 +6118,10 @@ operatorPrecedence = infixNotation """(Deprecated) Former name of :class:`infixNotation`, will be dropped in a future release.""" -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' + | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): @@ -5707,7 +6157,7 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") + LPAR, RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) @@ -5742,33 +6192,40 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop if opener == closer: raise ValueError("opening and closing strings cannot be the same") if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: + if isinstance(opener, basestring) and isinstance(closer, basestring): + if len(opener) == 1 and len(closer) == 1: if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).setParseAction(lambda t: t[0].strip())) else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) + content = (empty.copy() + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t: t[0].strip())) else: if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) else: raise ValueError("opening and closing arguments must be strings if no content expression is given") ret = Forward() if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.setName('nested %s%s expression' % (opener, closer)) return ret def indentedBlock(blockStatementExpr, indentStack, indent=True): @@ -5783,7 +6240,7 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond - the the current level; set to False for block of left-most + the current level; set to False for block of left-most statements (default= ``True``) A valid block must contain at least one ``blockStatement``. @@ -5816,15 +6273,15 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): stmt = Forward() identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) + funcDef = Group(funcDecl + func_body) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) + stmt << (funcDef | assignment | identifier) module_body = OneOrMore(stmt) @@ -5852,47 +6309,56 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] """ - def checkPeerIndent(s,l,t): + backup_stack = indentStack[:] + + def reset_stack(): + indentStack[:] = backup_stack + + def checkPeerIndent(s, l, t): if l >= len(s): return - curCol = col(l,s) + curCol = col(l, s) if curCol != indentStack[-1]: if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") - def checkSubIndent(s,l,t): - curCol = col(l,s) + def checkSubIndent(s, l, t): + curCol = col(l, s) if curCol > indentStack[-1]: - indentStack.append( curCol ) + indentStack.append(curCol) else: - raise ParseException(s,l,"not a subentry") + raise ParseException(s, l, "not a subentry") - def checkUnindent(s,l,t): + def checkUnindent(s, l, t): if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() + curCol = col(l, s) + if not(indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') PEER = Empty().setParseAction(checkPeerIndent).setName('') UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + smExpr = Group(Optional(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + smExpr = Group(Optional(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + smExpr.setFailAction(lambda a, b, c, d: reset_stack()) blockStatementExpr.ignore(_bslash + LineEnd()) return smExpr.setName('indented block') alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") def replaceHTMLEntity(t): """Helper parser action to replace common HTML entities with their special characters""" @@ -5909,7 +6375,7 @@ restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") "Comment of the form ``// ... (to end of line)``" -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment") "Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" javaStyleComment = cppStyleComment @@ -5918,10 +6384,10 @@ javaStyleComment = cppStyleComment pythonStyleComment = Regex(r"#.*").setName("Python style comment") "Comment of the form ``# ... (to end of line)``" -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional(Word(" \t") + + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") +commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList") """(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. @@ -6087,7 +6553,7 @@ class pyparsing_common: integer = Word(nums).setName("integer").setParseAction(convertToInteger) """expression that parses an unsigned integer, returns an int""" - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) """expression that parses a hexadecimal integer, returns an int""" signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) @@ -6101,10 +6567,10 @@ class pyparsing_common: """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" mixed_integer.addParseAction(sum) - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat) """expression that parses a floating point number and returns a float""" - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) """expression that parses a floating point number with optional scientific notation and returns a float""" @@ -6115,15 +6581,18 @@ class pyparsing_common: fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) """any int or real number, returned as float""" - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + identifier = Word(alphas + '_', alphanums + '_').setName("identifier") """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") "IPv4 address (``0.0.0.0 - 255.255.255.255``)" _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + + "::" + + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + ).setName("short IPv6 address") _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") @@ -6150,7 +6619,7 @@ class pyparsing_common: [datetime.date(1999, 12, 31)] """ - def cvt_fn(s,l,t): + def cvt_fn(s, l, t): try: return datetime.strptime(t[0], fmt).date() except ValueError as ve: @@ -6175,7 +6644,7 @@ class pyparsing_common: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] """ - def cvt_fn(s,l,t): + def cvt_fn(s, l, t): try: return datetime.strptime(t[0], fmt) except ValueError as ve: @@ -6200,7 +6669,7 @@ class pyparsing_common: # strip HTML links from normal text text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' - td,td_end = makeHTMLTags("TD") + td, td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) @@ -6210,9 +6679,13 @@ class pyparsing_common: """ return pyparsing_common._html_stripper.transformString(tokens[0]) - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + _commasepitem = Combine(OneOrMore(~Literal(",") + + ~LineEnd() + + Word(printables, excludeChars=',') + + Optional(White(" \t")))).streamline().setName("commaItem") + comma_separated_list = delimitedList(Optional(quotedString.copy() + | _commasepitem, default='') + ).setName("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) @@ -6231,7 +6704,8 @@ class _lazyclassproperty(object): def __get__(self, obj, cls): if cls is None: cls = type(obj) - if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) for superclass in cls.__mro__[1:]): + if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) + for superclass in cls.__mro__[1:]): cls._intern = {} attrname = self.fn.__name__ if attrname not in cls._intern: @@ -6262,7 +6736,7 @@ class unicode_set(object): if cc is unicode_set: break for rr in cc._ranges: - ret.extend(range(rr[0], rr[-1]+1)) + ret.extend(range(rr[0], rr[-1] + 1)) return [unichr(c) for c in sorted(set(ret))] @_lazyclassproperty @@ -6318,27 +6792,27 @@ class pyparsing_unicode(unicode_set): class Chinese(unicode_set): "Unicode set for Chinese Unicode Character Range" - _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f), ] + _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),] class Japanese(unicode_set): "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" - _ranges = [ ] + _ranges = [] class Kanji(unicode_set): "Unicode set for Kanji Unicode Character Range" - _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f), ] + _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),] class Hiragana(unicode_set): "Unicode set for Hiragana Unicode Character Range" - _ranges = [(0x3040, 0x309f), ] + _ranges = [(0x3040, 0x309f),] class Katakana(unicode_set): "Unicode set for Katakana Unicode Character Range" - _ranges = [(0x30a0, 0x30ff), ] + _ranges = [(0x30a0, 0x30ff),] class Korean(unicode_set): "Unicode set for Korean Unicode Character Range" - _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f), ] + _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),] class CJK(Chinese, Japanese, Korean): "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" @@ -6346,15 +6820,15 @@ class pyparsing_unicode(unicode_set): class Thai(unicode_set): "Unicode set for Thai Unicode Character Range" - _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b), ] + _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),] class Arabic(unicode_set): "Unicode set for Arabic Unicode Character Range" - _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f), ] + _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),] class Hebrew(unicode_set): "Unicode set for Hebrew Unicode Character Range" - _ranges = [(0x0590, 0x05ff), ] + _ranges = [(0x0590, 0x05ff),] class Devanagari(unicode_set): "Unicode set for Devanagari Unicode Character Range" @@ -6366,18 +6840,199 @@ pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges # define ranges in language character sets if PY_3: - setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic) - setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese) - setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic) - setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek) - setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew) - setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese) - setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji) - setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana) - setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana) - setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean) - setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai) - setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari) + setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, u"ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example: + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.setDefaultWhitespaceChars( + self._save_context["default_whitespace"] + ) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.asList(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.asDict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (rpt[0], rpt[1], expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print("no validation for {!r}".format(test_string)) + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield if __name__ == "__main__": diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/__init__.py deleted file mode 100644 index 8ed060ff..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import TomlError -from .parser import load, loads -from .test import translate_to_test -from .writer import dump, dumps \ No newline at end of file diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/core.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/core.py deleted file mode 100644 index c182734e..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/core.py +++ /dev/null @@ -1,13 +0,0 @@ -class TomlError(RuntimeError): - def __init__(self, message, line, col, filename): - RuntimeError.__init__(self, message, line, col, filename) - self.message = message - self.line = line - self.col = col - self.filename = filename - - def __str__(self): - return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) - - def __repr__(self): - return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/parser.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/parser.py deleted file mode 100644 index 3493aa64..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/parser.py +++ /dev/null @@ -1,341 +0,0 @@ -import string, re, sys, datetime -from .core import TomlError -from .utils import rfc3339_re, parse_rfc3339_re - -if sys.version_info[0] == 2: - _chr = unichr -else: - _chr = chr - -def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): - return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) - -def loads(s, filename='<string>', translate=lambda t, x, v: v, object_pairs_hook=dict): - if isinstance(s, bytes): - s = s.decode('utf-8') - - s = s.replace('\r\n', '\n') - - root = object_pairs_hook() - tables = object_pairs_hook() - scope = root - - src = _Source(s, filename=filename) - ast = _p_toml(src, object_pairs_hook=object_pairs_hook) - - def error(msg): - raise TomlError(msg, pos[0], pos[1], filename) - - def process_value(v, object_pairs_hook): - kind, text, value, pos = v - if kind == 'str' and value.startswith('\n'): - value = value[1:] - if kind == 'array': - if value and any(k != value[0][0] for k, t, v, p in value[1:]): - error('array-type-mismatch') - value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] - elif kind == 'table': - value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) - return translate(kind, text, value) - - for kind, value, pos in ast: - if kind == 'kv': - k, v = value - if k in scope: - error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) - scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) - else: - is_table_array = (kind == 'table_array') - cur = tables - for name in value[:-1]: - if isinstance(cur.get(name), list): - d, cur = cur[name][-1] - else: - d, cur = cur.setdefault(name, (None, object_pairs_hook())) - - scope = object_pairs_hook() - name = value[-1] - if name not in cur: - if is_table_array: - cur[name] = [(scope, object_pairs_hook())] - else: - cur[name] = (scope, object_pairs_hook()) - elif isinstance(cur[name], list): - if not is_table_array: - error('table_type_mismatch') - cur[name].append((scope, object_pairs_hook())) - else: - if is_table_array: - error('table_type_mismatch') - old_scope, next_table = cur[name] - if old_scope is not None: - error('duplicate_tables') - cur[name] = (scope, next_table) - - def merge_tables(scope, tables): - if scope is None: - scope = object_pairs_hook() - for k in tables: - if k in scope: - error('key_table_conflict') - v = tables[k] - if isinstance(v, list): - scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] - else: - scope[k] = merge_tables(v[0], v[1]) - return scope - - return merge_tables(root, tables) - -class _Source: - def __init__(self, s, filename=None): - self.s = s - self._pos = (1, 1) - self._last = None - self._filename = filename - self.backtrack_stack = [] - - def last(self): - return self._last - - def pos(self): - return self._pos - - def fail(self): - return self._expect(None) - - def consume_dot(self): - if self.s: - self._last = self.s[0] - self.s = self[1:] - self._advance(self._last) - return self._last - return None - - def expect_dot(self): - return self._expect(self.consume_dot()) - - def consume_eof(self): - if not self.s: - self._last = '' - return True - return False - - def expect_eof(self): - return self._expect(self.consume_eof()) - - def consume(self, s): - if self.s.startswith(s): - self.s = self.s[len(s):] - self._last = s - self._advance(s) - return True - return False - - def expect(self, s): - return self._expect(self.consume(s)) - - def consume_re(self, re): - m = re.match(self.s) - if m: - self.s = self.s[len(m.group(0)):] - self._last = m - self._advance(m.group(0)) - return m - return None - - def expect_re(self, re): - return self._expect(self.consume_re(re)) - - def __enter__(self): - self.backtrack_stack.append((self.s, self._pos)) - - def __exit__(self, type, value, traceback): - if type is None: - self.backtrack_stack.pop() - else: - self.s, self._pos = self.backtrack_stack.pop() - return type == TomlError - - def commit(self): - self.backtrack_stack[-1] = (self.s, self._pos) - - def _expect(self, r): - if not r: - raise TomlError('msg', self._pos[0], self._pos[1], self._filename) - return r - - def _advance(self, s): - suffix_pos = s.rfind('\n') - if suffix_pos == -1: - self._pos = (self._pos[0], self._pos[1] + len(s)) - else: - self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) - -_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') -def _p_ews(s): - s.expect_re(_ews_re) - -_ws_re = re.compile(r'[ \t]*') -def _p_ws(s): - s.expect_re(_ws_re) - -_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', - '\\': '\\', 'f': '\f' } - -_basicstr_re = re.compile(r'[^"\\\000-\037]*') -_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') -_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') -_escapes_re = re.compile(r'[btnfr\"\\]') -_newline_esc_re = re.compile('\n[ \t\n]*') -def _p_basicstr_content(s, content=_basicstr_re): - res = [] - while True: - res.append(s.expect_re(content).group(0)) - if not s.consume('\\'): - break - if s.consume_re(_newline_esc_re): - pass - elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): - v = int(s.last().group(1), 16) - if 0xd800 <= v < 0xe000: - s.fail() - res.append(_chr(v)) - else: - s.expect_re(_escapes_re) - res.append(_escapes[s.last().group(0)]) - return ''.join(res) - -_key_re = re.compile(r'[0-9a-zA-Z-_]+') -def _p_key(s): - with s: - s.expect('"') - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return r - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return r - return s.expect_re(_key_re).group(0) - -_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') - -_basicstr_ml_re = re.compile(r'(?:""?(?!")|[^"\\\000-\011\013-\037])*') -_litstr_re = re.compile(r"[^'\000\010\012-\037]*") -_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") -def _p_value(s, object_pairs_hook): - pos = s.pos() - - if s.consume('true'): - return 'bool', s.last(), True, pos - if s.consume('false'): - return 'bool', s.last(), False, pos - - if s.consume('"'): - if s.consume('""'): - r = _p_basicstr_content(s, _basicstr_ml_re) - s.expect('"""') - else: - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return 'str', r, r, pos - - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return 'str', r, r, pos - - if s.consume_re(rfc3339_re): - m = s.last() - return 'datetime', m.group(0), parse_rfc3339_re(m), pos - - if s.consume_re(_float_re): - m = s.last().group(0) - r = m.replace('_','') - if '.' in m or 'e' in m or 'E' in m: - return 'float', m, float(r), pos - else: - return 'int', m, int(r, 10), pos - - if s.consume('['): - items = [] - with s: - while True: - _p_ews(s) - items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) - s.commit() - _p_ews(s) - s.expect(',') - s.commit() - _p_ews(s) - s.expect(']') - return 'array', None, items, pos - - if s.consume('{'): - _p_ws(s) - items = object_pairs_hook() - if not s.consume('}'): - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - while s.consume(','): - _p_ws(s) - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - s.expect('}') - return 'table', None, items, pos - - s.fail() - -def _p_stmt(s, object_pairs_hook): - pos = s.pos() - if s.consume( '['): - is_array = s.consume('[') - _p_ws(s) - keys = [_p_key(s)] - _p_ws(s) - while s.consume('.'): - _p_ws(s) - keys.append(_p_key(s)) - _p_ws(s) - s.expect(']') - if is_array: - s.expect(']') - return 'table_array' if is_array else 'table', keys, pos - - key = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - value = _p_value(s, object_pairs_hook=object_pairs_hook) - return 'kv', (key, value), pos - -_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') -def _p_toml(s, object_pairs_hook): - stmts = [] - _p_ews(s) - with s: - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - while True: - s.commit() - s.expect_re(_stmtsep_re) - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - _p_ews(s) - s.expect_eof() - return stmts diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/test.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/test.py deleted file mode 100644 index ec8abfc6..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/test.py +++ /dev/null @@ -1,30 +0,0 @@ -import datetime -from .utils import format_rfc3339 - -try: - _string_types = (str, unicode) - _int_types = (int, long) -except NameError: - _string_types = str - _int_types = int - -def translate_to_test(v): - if isinstance(v, dict): - return { k: translate_to_test(v) for k, v in v.items() } - if isinstance(v, list): - a = [translate_to_test(x) for x in v] - if v and isinstance(v[0], dict): - return a - else: - return {'type': 'array', 'value': a} - if isinstance(v, datetime.datetime): - return {'type': 'datetime', 'value': format_rfc3339(v)} - if isinstance(v, bool): - return {'type': 'bool', 'value': 'true' if v else 'false'} - if isinstance(v, _int_types): - return {'type': 'integer', 'value': str(v)} - if isinstance(v, float): - return {'type': 'float', 'value': '{:.17}'.format(v)} - if isinstance(v, _string_types): - return {'type': 'string', 'value': v} - raise RuntimeError('unexpected value: {!r}'.format(v)) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/utils.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/utils.py deleted file mode 100644 index 636a680b..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/utils.py +++ /dev/null @@ -1,67 +0,0 @@ -import datetime -import re - -rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') - -def parse_rfc3339(v): - m = rfc3339_re.match(v) - if not m or m.group(0) != v: - return None - return parse_rfc3339_re(m) - -def parse_rfc3339_re(m): - r = map(int, m.groups()[:6]) - if m.group(7): - micro = float(m.group(7)) - else: - micro = 0 - - if m.group(8): - g = int(m.group(8), 10) * 60 + int(m.group(9), 10) - tz = _TimeZone(datetime.timedelta(0, g * 60)) - else: - tz = _TimeZone(datetime.timedelta(0, 0)) - - y, m, d, H, M, S = r - return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) - - -def format_rfc3339(v): - offs = v.utcoffset() - offs = int(offs.total_seconds()) // 60 if offs is not None else 0 - - if offs == 0: - suffix = 'Z' - else: - if offs > 0: - suffix = '+' - else: - suffix = '-' - offs = -offs - suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60) - - if v.microsecond: - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix - else: - return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix - -class _TimeZone(datetime.tzinfo): - def __init__(self, offset): - self._offset = offset - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return None - - def tzname(self, dt): - m = self._offset.total_seconds() // 60 - if m < 0: - res = '-' - m = -m - else: - res = '+' - h = m // 60 - m = m - h * 60 - return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/writer.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/writer.py deleted file mode 100644 index 73b5089c..00000000 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/pytoml/writer.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import unicode_literals -import io, datetime, math, string, sys - -from .utils import format_rfc3339 - -if sys.version_info[0] == 3: - long = int - unicode = str - - -def dumps(obj, sort_keys=False): - fout = io.StringIO() - dump(obj, fout, sort_keys=sort_keys) - return fout.getvalue() - - -_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} - - -def _escape_string(s): - res = [] - start = 0 - - def flush(): - if start != i: - res.append(s[start:i]) - return i + 1 - - i = 0 - while i < len(s): - c = s[i] - if c in '"\\\n\r\t\b\f': - start = flush() - res.append('\\' + _escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append('\\u%04x' % ord(c)) - i += 1 - - flush() - return '"' + ''.join(res) + '"' - - -_key_chars = string.digits + string.ascii_letters + '-_' -def _escape_id(s): - if any(c not in _key_chars for c in s): - return _escape_string(s) - return s - - -def _format_value(v): - if isinstance(v, bool): - return 'true' if v else 'false' - if isinstance(v, int) or isinstance(v, long): - return unicode(v) - if isinstance(v, float): - if math.isnan(v) or math.isinf(v): - raise ValueError("{0} is not a valid TOML value".format(v)) - else: - return repr(v) - elif isinstance(v, unicode) or isinstance(v, bytes): - return _escape_string(v) - elif isinstance(v, datetime.datetime): - return format_rfc3339(v) - elif isinstance(v, list): - return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) - elif isinstance(v, dict): - return '{{{0}}}'.format(', '.join('{} = {}'.format(_escape_id(k), _format_value(obj)) for k, obj in v.items())) - else: - raise RuntimeError(v) - - -def dump(obj, fout, sort_keys=False): - tables = [((), obj, False)] - - while tables: - name, table, is_array = tables.pop() - if name: - section_name = '.'.join(_escape_id(c) for c in name) - if is_array: - fout.write('[[{0}]]\n'.format(section_name)) - else: - fout.write('[{0}]\n'.format(section_name)) - - table_keys = sorted(table.keys()) if sort_keys else table.keys() - new_tables = [] - has_kv = False - for k in table_keys: - v = table[k] - if isinstance(v, dict): - new_tables.append((name + (k,), v, False)) - elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): - new_tables.extend((name + (k,), d, True) for d in v) - elif v is None: - # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 - fout.write( - '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) - has_kv = True - else: - fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) - has_kv = True - - tables.extend(reversed(new_tables)) - - if (name or has_kv) and tables: - fout.write('\n') diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__init__.py index 80c4ce1d..517458b5 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__init__.py @@ -9,14 +9,14 @@ Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ -Requests is an HTTP library, written in Python, for human beings. Basic GET -usage: +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 - >>> 'Python is a programming language' in r.content + >>> b'Python is a programming language' in r.content True ... or POST: @@ -27,14 +27,14 @@ usage: { ... "form": { - "key2": "value2", - "key1": "value1" + "key1": "value1", + "key2": "value2" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation -is at <http://python-requests.org>. +is at <https://requests.readthedocs.io>. :copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.24 + # urllib3 >= 1.21.1, <= 1.25 assert major == 1 assert minor >= 21 - assert minor <= 24 + assert minor <= 25 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] @@ -90,18 +90,29 @@ except (AssertionError, ValueError): "version!".format(urllib3.__version__, chardet.__version__), RequestsDependencyWarning) -# Attempt to enable urllib3's SNI support, if possible -from pip._internal.utils.compat import WINDOWS -if not WINDOWS: +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): from pip._vendor.urllib3.contrib import pyopenssl pyopenssl.inject_into_urllib3() # Check cryptography version from cryptography import __version__ as cryptography_version _check_cryptography(cryptography_version) - except ImportError: - pass +except ImportError: + pass # urllib3's DependencyWarnings should be silenced. from pip._vendor.urllib3.exceptions import DependencyWarning diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__version__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__version__.py index f5b5d036..531e26ce 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__version__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/__version__.py @@ -4,11 +4,11 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' -__url__ = 'http://python-requests.org' -__version__ = '2.21.0' -__build__ = 0x022100 +__url__ = 'https://requests.readthedocs.io' +__version__ = '2.24.0' +__build__ = 0x022400 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2018 Kenneth Reitz' +__copyright__ = 'Copyright 2020 Kenneth Reitz' __cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/api.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/api.py index abada96d..e978e203 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/api.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/api.py @@ -16,10 +16,10 @@ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. - :param method: method for the new :class:`Request` object. + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. @@ -50,6 +50,7 @@ def request(method, url, **kwargs): >>> import requests >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req <Response [200]> """ @@ -65,7 +66,7 @@ def get(url, params=None, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response @@ -92,7 +93,9 @@ def head(url, **kwargs): r"""Sends a HEAD request. :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). :return: :class:`Response <Response>` object :rtype: requests.Response """ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/auth.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/auth.py index bdde51c7..eeface39 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/auth.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/auth.py @@ -50,7 +50,7 @@ def _basic_auth_str(username, password): "Non-string passwords will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({!r}) to " "a string or bytes object in the near future to avoid " - "problems.".format(password), + "problems.".format(type(password)), category=DeprecationWarning, ) password = str(password) @@ -239,7 +239,7 @@ class HTTPDigestAuth(AuthBase): """ # If response is not 4xx, do not auth - # See https://github.com/requests/requests/issues/3772 + # See https://github.com/psf/requests/issues/3772 if not 400 <= r.status_code < 500: self._thread_local.num_401_calls = 1 return r diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/compat.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/compat.py index 6a86893d..9e293716 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/compat.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/compat.py @@ -47,6 +47,7 @@ if is_py2: import cookielib from Cookie import Morsel from StringIO import StringIO + # Keep OrderedDict for backwards compatibility. from collections import Callable, Mapping, MutableMapping, OrderedDict @@ -63,6 +64,7 @@ elif is_py3: from http import cookiejar as cookielib from http.cookies import Morsel from io import StringIO + # Keep OrderedDict for backwards compatibility. from collections import OrderedDict from collections.abc import Callable, Mapping, MutableMapping diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/exceptions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/exceptions.py index a91e1fd1..9ef9e6e9 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/exceptions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/exceptions.py @@ -94,11 +94,11 @@ class ChunkedEncodingError(RequestException): class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" + """Failed to decode response content.""" class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" + """The content for this response was already consumed.""" class RetryError(RequestException): @@ -106,21 +106,18 @@ class RetryError(RequestException): class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body""" + """Requests encountered an error when trying to rewind a body.""" # Warnings class RequestsWarning(Warning): """Base warning for Requests.""" - pass class FileModeWarning(RequestsWarning, DeprecationWarning): """A file was opened in text mode, but Requests determined its binary length.""" - pass class RequestsDependencyWarning(RequestsWarning): """An imported dependency doesn't match the expected version range.""" - pass diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/models.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/models.py index 08399574..015e715d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/models.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/models.py @@ -12,7 +12,7 @@ import sys # Import encoding now, to avoid implicit import later. # Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. import encodings.idna from pip._vendor.urllib3.fields import RequestField @@ -280,6 +280,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): >>> import requests >>> req = requests.Request('GET', 'https://httpbin.org/get') >>> r = req.prepare() + >>> r <PreparedRequest [GET]> >>> s = requests.Session() @@ -358,7 +359,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. - #: https://github.com/requests/requests/pull/2238 + #: https://github.com/psf/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: @@ -472,12 +473,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): not isinstance(data, (basestring, list, tuple, Mapping)) ]) - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + body = data if getattr(body, 'tell', None) is not None: @@ -608,7 +609,7 @@ class Response(object): #: File-like object representation of response (for advanced usage). #: Use of ``raw`` requires that ``stream=True`` be set on the request. - # This requirement does not apply for use internally to Requests. + #: This requirement does not apply for use internally to Requests. self.raw = None #: Final URL location of Response. @@ -915,7 +916,7 @@ class Response(object): return l def raise_for_status(self): - """Raises stored :class:`HTTPError`, if one occurred.""" + """Raises :class:`HTTPError`, if one occurred.""" http_error_msg = '' if isinstance(self.reason, bytes): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/sessions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/sessions.py index d73d700f..e8e2d609 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/sessions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/sessions.py @@ -11,9 +11,10 @@ import os import sys import time from datetime import timedelta +from collections import OrderedDict from .auth import _basic_auth_str -from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping +from .compat import cookielib, is_py3, urljoin, urlparse, Mapping from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT @@ -162,7 +163,7 @@ class SessionRedirectMixin(object): resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) + raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) # Release the connection back into the pool. resp.close() @@ -170,7 +171,7 @@ class SessionRedirectMixin(object): # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) - url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) + url = ':'.join([to_native_string(parsed_rurl.scheme), url]) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) @@ -192,19 +193,16 @@ class SessionRedirectMixin(object): self.rebuild_method(prepared_request, resp) - # https://github.com/requests/requests/issues/1084 + # https://github.com/psf/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): - # https://github.com/requests/requests/issues/3490 + # https://github.com/psf/requests/issues/3490 purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers - try: - del headers['Cookie'] - except KeyError: - pass + headers.pop('Cookie', None) # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared @@ -271,7 +269,6 @@ class SessionRedirectMixin(object): if new_auth is not None: prepared_request.prepare_auth(new_auth) - return def rebuild_proxies(self, prepared_request, proxies): """This method re-evaluates the proxy configuration by considering the @@ -352,13 +349,13 @@ class Session(SessionRedirectMixin): Or as a context manager:: >>> with requests.Session() as s: - >>> s.get('https://httpbin.org/get') + ... s.get('https://httpbin.org/get') <Response [200]> """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'cert', 'adapters', 'stream', 'trust_env', 'max_redirects', ] @@ -661,11 +658,13 @@ class Session(SessionRedirectMixin): extract_cookies_to_jar(self.cookies, request, r.raw) - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - # Resolve redirects if allowed. - history = [resp for resp in gen] if allow_redirects else [] + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] # Shuffle things around if there's history. if history: @@ -728,7 +727,7 @@ class Session(SessionRedirectMixin): return adapter # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for '%s'" % url) + raise InvalidSchema("No connection adapters were found for {!r}".format(url)) def close(self): """Closes all adapters and as such the session""" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/status_codes.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/status_codes.py index 813e8c4e..d80a7cd4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/status_codes.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/status_codes.py @@ -5,12 +5,15 @@ The ``codes`` object defines a mapping from common names for HTTP statuses to their numerical codes, accessible either as attributes or as dictionary items. ->>> requests.codes['temporary_redirect'] -307 ->>> requests.codes.teapot -418 ->>> requests.codes['\o/'] -200 +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 Some codes have multiple names, and both upper- and lower-case versions of the names are allowed. For example, ``codes.ok``, ``codes.OK``, and diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/structures.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/structures.py index da930e28..8ee0ba7a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/structures.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/structures.py @@ -7,7 +7,9 @@ requests.structures Data structures that power Requests. """ -from .compat import OrderedDict, Mapping, MutableMapping +from collections import OrderedDict + +from .compat import Mapping, MutableMapping class CaseInsensitiveDict(MutableMapping): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/utils.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/utils.py index 8170a8d2..c1700d7f 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/utils.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/requests/utils.py @@ -19,6 +19,7 @@ import sys import tempfile import warnings import zipfile +from collections import OrderedDict from .__version__ import __version__ from . import certs @@ -26,7 +27,7 @@ from . import certs from ._internal_utils import to_native_string from .compat import parse_http_list as _parse_list_header from .compat import ( - quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, + quote, urlparse, bytes, str, unquote, getproxies, proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment, getproxies_environment, Mapping) from .cookies import cookiejar_from_dict @@ -179,7 +180,7 @@ def get_netrc_auth(url, raise_errors=False): except KeyError: # os.path.expanduser can fail when $HOME is undefined and # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/requests/requests/issues/1846 + # https://github.com/psf/requests/issues/1846 return if os.path.exists(loc): @@ -266,6 +267,8 @@ def from_key_val_list(value): >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') + Traceback (most recent call last): + ... ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) @@ -292,7 +295,9 @@ def to_key_val_list(value): >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples. + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples :rtype: list """ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 00000000..3b444545 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "0.4.0" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + Resolver, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, +) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 00000000..366cc5e2 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Sequence"] + +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/providers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 00000000..68b7290d --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,109 @@ +class AbstractProvider(object): + """Delegate class to provide requirement interface for the resolver. + """ + + def identify(self, dependency): + """Given a dependency, return an identifier for it. + + This is used in many places to identify the dependency, e.g. whether + two requirements should have their specifier parts merged, whether + two specifications would conflict with each other (because they the + same name but different versions). + """ + raise NotImplementedError + + def get_preference(self, resolution, candidates, information): + """Produce a sort key for given specification based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param resolution: Currently pinned candidate, or `None`. + :param candidates: A list of possible candidates. + :param information: A list of requirement information. + + Each information instance is a named tuple with two entries: + + * `requirement` specifies a requirement contributing to the current + candidate list + * `parent` specifies the candidate that provides (dependend on) the + requirement, or `None` to indicate a root requirement. + + The preference could depend on a various of issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the `key` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this specification is (i.e. the sorting function + is called with `reverse=False`). + """ + raise NotImplementedError + + def find_matches(self, requirements): + """Find all possible candidates that satisfy the given requirements. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + :param requirements: A collection of requirements which all of the the + returned candidates must match. All requirements are guaranteed to + have the same identifier. The collection is never empty. + :returns: An iterable that orders candidates by preference, e.g. the + most preferred candidate should come first. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guarenteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether `candidate` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work. + """ + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/reporters.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 00000000..a0a2a458 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,42 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver. + """ + + def starting(self): + """Called before the resolution actually starts. + """ + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully. + """ + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def backtracking(self, candidate): + """Called when rejecting a candidate during backtracking. + """ + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution. + """ diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 00000000..4497f976 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,428 @@ +import collections + +from .compat import collections_abc +from .providers import AbstractResolver +from .structs import DirectedGraph + + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + @classmethod + def from_requirement(cls, provider, requirement, parent): + """Build an instance from a requirement. + """ + candidates = provider.find_matches([requirement]) + if not isinstance(candidates, collections_abc.Sequence): + candidates = list(candidates) + criterion = cls( + candidates=candidates, + information=[RequirementInformation(requirement, parent)], + incompatibilities=[], + ) + if not candidates: + raise RequirementsConflicted(criterion) + return criterion + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + def merged_with(self, provider, requirement, parent): + """Build a new instance from this and a new requirement. + """ + infos = list(self.information) + infos.append(RequirementInformation(requirement, parent)) + candidates = provider.find_matches([r for r, _ in infos]) + if not isinstance(candidates, collections_abc.Sequence): + candidates = list(candidates) + criterion = type(self)(candidates, infos, list(self.incompatibilities)) + if not candidates: + raise RequirementsConflicted(criterion) + return criterion + + def excluded_of(self, candidate): + """Build a new instance from this, but excluding specified candidate. + + Returns the new instance, or None if we still have no valid candidates. + """ + incompats = list(self.incompatibilities) + incompats.append(candidate) + candidates = [c for c in self.candidates if c != candidate] + if not candidates: + return None + criterion = type(self)(candidates, list(self.information), incompats) + return criterion + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + try: + base = self._states[-1] + except IndexError: + state = State(mapping=collections.OrderedDict(), criteria={}) + else: + state = State( + mapping=base.mapping.copy(), criteria=base.criteria.copy(), + ) + self._states.append(state) + + def _merge_into_criterion(self, requirement, parent): + self._r.adding_requirement(requirement, parent) + name = self._p.identify(requirement) + try: + crit = self.state.criteria[name] + except KeyError: + crit = Criterion.from_requirement(self._p, requirement, parent) + else: + crit = crit.merged_with(self._p, requirement, parent) + return name, crit + + def _get_criterion_item_preference(self, item): + name, criterion = item + try: + pinned = self.state.mapping[name] + except KeyError: + pinned = None + return self._p.get_preference( + pinned, criterion.candidates, criterion.information, + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(r, current_pin) + for r in criterion.iter_requirement() + ) + + def _get_criteria_to_update(self, candidate): + criteria = {} + for r in self._p.get_dependencies(candidate): + name, crit = self._merge_into_criterion(r, parent=candidate) + criteria[name] = crit + return criteria + + def _attempt_to_pin_criterion(self, name, criterion): + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_criteria_to_update(candidate) + except RequirementsConflicted as e: + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(r, candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self._r.pinning(candidate) + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + self.state.criteria.update(criteria) + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backtrack(self): + # Drop the current state, it's known not to work. + del self._states[-1] + + # We need at least 2 states here: + # (a) One to backtrack to. + # (b) One to restore state (a) to its state prior to candidate-pinning, + # so we can pin another one instead. + + while len(self._states) >= 2: + # Retract the last candidate pin. + prev_state = self._states.pop() + try: + name, candidate = prev_state.mapping.popitem() + except KeyError: + continue + self._r.backtracking(candidate) + + # Create a new state to work on, with the newly known not-working + # candidate excluded. + self._push_new_state() + + # Mark the retracted candidate as incompatible. + criterion = self.state.criteria[name].excluded_of(candidate) + if criterion is None: + # This state still does not work. Try the still previous state. + del self._states[-1] + continue + self.state.criteria[name] = criterion + + return True + + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._push_new_state() + for r in requirements: + try: + name, crit = self._merge_into_criterion(r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + self.state.criteria[name] = crit + + self._r.starting() + + for round_index in range(max_rounds): + self._r.starting_round(round_index) + + self._push_new_state() + curr = self.state + + unsatisfied_criterion_items = [ + item + for item in self.state.criteria.items() + if not self._is_current_pin_satisfying(*item) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_criterion_items: + del self._states[-1] + self._r.ending(curr) + return self.state + + # Choose the most preferred unpinned criterion to try. + name, criterion = min( + unsatisfied_criterion_items, + key=self._get_criterion_item_preference, + ) + failure_causes = self._attempt_to_pin_criterion(name, criterion) + + # Backtrack if pinning fails. + if failure_causes: + result = self._backtrack() + if not result: + causes = [ + i for crit in failure_causes for i in crit.information + ] + raise ResolutionImpossible(causes) + + self._r.ending_round(round_index, curr) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work. + """ + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/structs.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 00000000..1eee08b3 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,68 @@ +class DirectedGraph(object): + """A graph structure with directed edges. + """ + + def __init__(self): + self._vertices = set() + self._forwards = {} # <key> -> Set[<key>] + self._backwards = {} # <key> -> Set[<key>] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph. + """ + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph. + """ + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it. + """ + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/six.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/six.py index 89b2188f..83f69783 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/six.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2018 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.12.0" +__version__ = "1.15.0" # Useful for very coarse version differentiation. @@ -255,9 +255,11 @@ _moved_attributes = [ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), @@ -637,13 +639,16 @@ if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): return s @@ -665,6 +670,7 @@ else: _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") @@ -681,6 +687,10 @@ def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") @@ -716,16 +726,7 @@ else: """) -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): +if sys.version_info[:2] > (3,): exec_("""def raise_from(value, from_value): try: raise value from from_value @@ -805,13 +806,33 @@ if sys.version_info[:2] < (3, 3): _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + else: wraps = functools.wraps @@ -824,7 +845,15 @@ def with_metaclass(meta, *bases): class metaclass(type): def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): @@ -861,12 +890,11 @@ def ensure_binary(s, encoding='utf-8', errors='strict'): - `str` -> encoded to `bytes` - `bytes` -> `bytes` """ + if isinstance(s, binary_type): + return s if isinstance(s, text_type): return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError("not expecting type '%s'" % type(s)) def ensure_str(s, encoding='utf-8', errors='strict'): @@ -880,12 +908,15 @@ def ensure_str(s, encoding='utf-8', errors='strict'): - `str` -> `str` - `bytes` -> decoded to `str` """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) + # Optimization: Fast return for the common case. + if type(s) is str: + return s if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) + return s.encode(encoding, errors) elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) return s @@ -908,10 +939,9 @@ def ensure_text(s, encoding='utf-8', errors='strict'): raise TypeError("not expecting type '%s'" % type(s)) - def python_2_unicode_compatible(klass): """ - A decorator that defines __unicode__ and __str__ methods under Python 2. + A class decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/__init__.py new file mode 100644 index 00000000..7a08fe72 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/__init__.py @@ -0,0 +1,25 @@ +"""Python module which parses and emits TOML. + +Released under the MIT license. +""" + +from pip._vendor.toml import encoder +from pip._vendor.toml import decoder + +__version__ = "0.10.1" +_spec_ = "0.5.0" + +load = decoder.load +loads = decoder.loads +TomlDecoder = decoder.TomlDecoder +TomlDecodeError = decoder.TomlDecodeError +TomlPreserveCommentDecoder = decoder.TomlPreserveCommentDecoder + +dump = encoder.dump +dumps = encoder.dumps +TomlEncoder = encoder.TomlEncoder +TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder +TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder +TomlNumpyEncoder = encoder.TomlNumpyEncoder +TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder +TomlPathlibEncoder = encoder.TomlPathlibEncoder diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/common.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/common.py new file mode 100644 index 00000000..a5d673da --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/common.py @@ -0,0 +1,6 @@ +# content after the \ +escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/decoder.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/decoder.py new file mode 100644 index 00000000..e4887770 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/decoder.py @@ -0,0 +1,1052 @@ +import datetime +import io +from os import linesep +import re +import sys + +from pip._vendor.toml.tz import TomlTz + +if sys.version_info < (3,): + _range = xrange # noqa: F821 +else: + unicode = str + _range = range + basestring = str + unichr = chr + + +def _detect_pathlib_path(p): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(p, pathlib.PurePath): + return True + return False + + +def _ispath(p): + if isinstance(p, (bytes, basestring)): + return True + return _detect_pathlib_path(p) + + +def _getpath(p): + if (3, 6) <= sys.version_info: + import os + return os.fspath(p) + if _detect_pathlib_path(p): + return str(p) + return p + + +try: + FNFError = FileNotFoundError +except NameError: + FNFError = IOError + + +TIME_RE = re.compile(r"([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") + + +class TomlDecodeError(ValueError): + """Base toml Exception / Error.""" + + def __init__(self, msg, doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + colno = pos - doc.rfind('\n', 0, pos) + emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos) + ValueError.__init__(self, emsg) + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno + + +# Matches a TOML number, which allows underscores for readability +_number_with_underscores = re.compile('([0-9])(_([0-9]))*') + + +class CommentValue(object): + def __init__(self, val, comment, beginline, _dict): + self.val = val + separator = "\n" if beginline else " " + self.comment = separator + comment + self._dict = _dict + + def __getitem__(self, key): + return self.val[key] + + def __setitem__(self, key, value): + self.val[key] = value + + def dump(self, dump_value_func): + retstr = dump_value_func(self.val) + if isinstance(self.val, self._dict): + return self.comment + "\n" + unicode(retstr) + else: + return unicode(retstr) + self.comment + + +def _strictly_valid_num(n): + n = n.strip() + if not n: + return False + if n[0] == '_': + return False + if n[-1] == '_': + return False + if "_." in n or "._" in n: + return False + if len(n) == 1: + return True + if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']: + return False + if n[0] == '+' or n[0] == '-': + n = n[1:] + if len(n) > 1 and n[0] == '0' and n[1] != '.': + return False + if '__' in n: + return False + return True + + +def load(f, _dict=dict, decoder=None): + """Parses named file or files as toml and returns a dictionary + + Args: + f: Path to the file to open, array of files to read into single dict + or a file descriptor + _dict: (optional) Specifies the class of the returned toml dictionary + decoder: The decoder to use + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError -- When f is invalid type + TomlDecodeError: Error while decoding toml + IOError / FileNotFoundError -- When an array with no valid (existing) + (Python 2 / Python 3) file paths is passed + """ + + if _ispath(f): + with io.open(_getpath(f), encoding='utf-8') as ffile: + return loads(ffile.read(), _dict, decoder) + elif isinstance(f, list): + from os import path as op + from warnings import warn + if not [path for path in f if op.exists(path)]: + error_msg = "Load expects a list to contain filenames only." + error_msg += linesep + error_msg += ("The list needs to contain the path of at least one " + "existing file.") + raise FNFError(error_msg) + if decoder is None: + decoder = TomlDecoder(_dict) + d = decoder.get_empty_table() + for l in f: # noqa: E741 + if op.exists(l): + d.update(load(l, _dict, decoder)) + else: + warn("Non-existent filename in list with at least one valid " + "filename") + return d + else: + try: + return loads(f.read(), _dict, decoder) + except AttributeError: + raise TypeError("You can only load a file descriptor, filename or " + "list") + + +_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$') + + +def loads(s, _dict=dict, decoder=None): + """Parses string as toml + + Args: + s: String to be parsed + _dict: (optional) Specifies the class of the returned toml dictionary + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError: When a non-string is passed + TomlDecodeError: Error while decoding toml + """ + + implicitgroups = [] + if decoder is None: + decoder = TomlDecoder(_dict) + retval = decoder.get_empty_table() + currentlevel = retval + if not isinstance(s, basestring): + raise TypeError("Expecting something like a string") + + if not isinstance(s, unicode): + s = s.decode('utf8') + + original = s + sl = list(s) + openarr = 0 + openstring = False + openstrchar = "" + multilinestr = False + arrayoftables = False + beginline = True + keygroup = False + dottedkey = False + keyname = 0 + key = '' + prev_key = '' + line_no = 1 + + for i, item in enumerate(sl): + if item == '\r' and sl[i + 1] == '\n': + sl[i] = ' ' + continue + if keyname: + key += item + if item == '\n': + raise TomlDecodeError("Key name found without value." + " Reached end of line.", original, i) + if openstring: + if item == openstrchar: + oddbackslash = False + k = 1 + while i >= k and sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + if not oddbackslash: + keyname = 2 + openstring = False + openstrchar = "" + continue + elif keyname == 1: + if item.isspace(): + keyname = 2 + continue + elif item == '.': + dottedkey = True + continue + elif item.isalnum() or item == '_' or item == '-': + continue + elif (dottedkey and sl[i - 1] == '.' and + (item == '"' or item == "'")): + openstring = True + openstrchar = item + continue + elif keyname == 2: + if item.isspace(): + if dottedkey: + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '.': + dottedkey = True + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '=': + keyname = 0 + prev_key = key[:-1].rstrip() + key = '' + dottedkey = False + else: + raise TomlDecodeError("Found invalid character in key name: '" + + item + "'. Try quoting the key name.", + original, i) + if item == "'" and openstrchar != '"': + k = 1 + try: + while sl[i - k] == "'": + k += 1 + if k == 3: + break + except IndexError: + pass + if k == 3: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = "'" + else: + openstrchar = "" + if item == '"' and openstrchar != "'": + oddbackslash = False + k = 1 + tripquote = False + try: + while sl[i - k] == '"': + k += 1 + if k == 3: + tripquote = True + break + if k == 1 or (k == 3 and tripquote): + while sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + except IndexError: + pass + if not oddbackslash: + if tripquote: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = '"' + else: + openstrchar = "" + if item == '#' and (not openstring and not keygroup and + not arrayoftables): + j = i + comment = "" + try: + while sl[j] != '\n': + comment += s[j] + sl[j] = ' ' + j += 1 + except IndexError: + break + if not openarr: + decoder.preserve_comment(line_no, prev_key, comment, beginline) + if item == '[' and (not openstring and not keygroup and + not arrayoftables): + if beginline: + if len(sl) > i + 1 and sl[i + 1] == '[': + arrayoftables = True + else: + keygroup = True + else: + openarr += 1 + if item == ']' and not openstring: + if keygroup: + keygroup = False + elif arrayoftables: + if sl[i - 1] == ']': + arrayoftables = False + else: + openarr -= 1 + if item == '\n': + if openstring or multilinestr: + if not multilinestr: + raise TomlDecodeError("Unbalanced quotes", original, i) + if ((sl[i - 1] == "'" or sl[i - 1] == '"') and ( + sl[i - 2] == sl[i - 1])): + sl[i] = sl[i - 1] + if sl[i - 3] == sl[i - 1]: + sl[i - 3] = ' ' + elif openarr: + sl[i] = ' ' + else: + beginline = True + line_no += 1 + elif beginline and sl[i] != ' ' and sl[i] != '\t': + beginline = False + if not keygroup and not arrayoftables: + if sl[i] == '=': + raise TomlDecodeError("Found empty keyname. ", original, i) + keyname = 1 + key += item + if keyname: + raise TomlDecodeError("Key name found without value." + " Reached end of file.", original, len(s)) + if openstring: # reached EOF and have an unterminated string + raise TomlDecodeError("Unterminated string found." + " Reached end of file.", original, len(s)) + s = ''.join(sl) + s = s.split('\n') + multikey = None + multilinestr = "" + multibackslash = False + pos = 0 + for idx, line in enumerate(s): + if idx > 0: + pos += len(s[idx - 1]) + 1 + + decoder.embed_comments(idx, currentlevel) + + if not multilinestr or multibackslash or '\n' not in multilinestr: + line = line.strip() + if line == "" and (not multikey or multibackslash): + continue + if multikey: + if multibackslash: + multilinestr += line + else: + multilinestr += line + multibackslash = False + closed = False + if multilinestr[0] == '[': + closed = line[-1] == ']' + elif len(line) > 2: + closed = (line[-1] == multilinestr[0] and + line[-2] == multilinestr[0] and + line[-3] == multilinestr[0]) + if closed: + try: + value, vtype = decoder.load_value(multilinestr) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + currentlevel[multikey] = value + multikey = None + multilinestr = "" + else: + k = len(multilinestr) - 1 + while k > -1 and multilinestr[k] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = multilinestr[:-1] + else: + multilinestr += "\n" + continue + if line[0] == '[': + arrayoftables = False + if len(line) == 1: + raise TomlDecodeError("Opening key group bracket on line by " + "itself.", original, pos) + if line[1] == '[': + arrayoftables = True + line = line[2:] + splitstr = ']]' + else: + line = line[1:] + splitstr = ']' + i = 1 + quotesplits = decoder._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and splitstr in quotesplit: + break + i += quotesplit.count(splitstr) + quoted = not quoted + line = line.split(splitstr, i) + if len(line) < i + 1 or line[-1].strip() != "": + raise TomlDecodeError("Key group not on a line by itself.", + original, pos) + groups = splitstr.join(line[:-1]).split('.') + i = 0 + while i < len(groups): + groups[i] = groups[i].strip() + if len(groups[i]) > 0 and (groups[i][0] == '"' or + groups[i][0] == "'"): + groupstr = groups[i] + j = i + 1 + while not groupstr[0] == groupstr[-1]: + j += 1 + if j > len(groups) + 2: + raise TomlDecodeError("Invalid group name '" + + groupstr + "' Something " + + "went wrong.", original, pos) + groupstr = '.'.join(groups[i:j]).strip() + groups[i] = groupstr[1:-1] + groups[i + 1:j] = [] + else: + if not _groupname_re.match(groups[i]): + raise TomlDecodeError("Invalid group name '" + + groups[i] + "'. Try quoting it.", + original, pos) + i += 1 + currentlevel = retval + for i in _range(len(groups)): + group = groups[i] + if group == "": + raise TomlDecodeError("Can't have a keygroup with an empty " + "name", original, pos) + try: + currentlevel[group] + if i == len(groups) - 1: + if group in implicitgroups: + implicitgroups.remove(group) + if arrayoftables: + raise TomlDecodeError("An implicitly defined " + "table can't be an array", + original, pos) + elif arrayoftables: + currentlevel[group].append(decoder.get_empty_table() + ) + else: + raise TomlDecodeError("What? " + group + + " already exists?" + + str(currentlevel), + original, pos) + except TypeError: + currentlevel = currentlevel[-1] + if group not in currentlevel: + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + except KeyError: + if i != len(groups) - 1: + implicitgroups.append(group) + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + currentlevel = currentlevel[group] + if arrayoftables: + try: + currentlevel = currentlevel[-1] + except KeyError: + pass + elif line[0] == "{": + if line[-1] != "}": + raise TomlDecodeError("Line breaks are not allowed in inline" + "objects", original, pos) + try: + decoder.load_inline_object(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + elif "=" in line: + try: + ret = decoder.load_line(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + if ret is not None: + multikey, multilinestr, multibackslash = ret + return retval + + +def _load_date(val): + microsecond = 0 + tz = None + try: + if len(val) > 19: + if val[19] == '.': + if val[-1].upper() == 'Z': + subsecondval = val[20:-1] + tzval = "Z" + else: + subsecondvalandtz = val[20:] + if '+' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('+') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + elif '-' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('-') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + else: + tzval = None + subsecondval = subsecondvalandtz + if tzval is not None: + tz = TomlTz(tzval) + microsecond = int(int(subsecondval) * + (10 ** (6 - len(subsecondval)))) + else: + tz = TomlTz(val[19:]) + except ValueError: + tz = None + if "-" not in val[1:]: + return None + try: + if len(val) == 10: + d = datetime.date( + int(val[:4]), int(val[5:7]), + int(val[8:10])) + else: + d = datetime.datetime( + int(val[:4]), int(val[5:7]), + int(val[8:10]), int(val[11:13]), + int(val[14:16]), int(val[17:19]), microsecond, tz) + except ValueError: + return None + return d + + +def _load_unicode_escapes(v, hexbytes, prefix): + skip = False + i = len(v) - 1 + while i > -1 and v[i] == '\\': + skip = not skip + i -= 1 + for hx in hexbytes: + if skip: + skip = False + i = len(hx) - 1 + while i > -1 and hx[i] == '\\': + skip = not skip + i -= 1 + v += prefix + v += hx + continue + hxb = "" + i = 0 + hxblen = 4 + if prefix == "\\U": + hxblen = 8 + hxb = ''.join(hx[i:i + hxblen]).lower() + if hxb.strip('0123456789abcdef'): + raise ValueError("Invalid escape sequence: " + hxb) + if hxb[0] == "d" and hxb[1].strip('01234567'): + raise ValueError("Invalid escape sequence: " + hxb + + ". Only scalar unicode points are allowed.") + v += unichr(int(hxb, 16)) + v += unicode(hx[len(hxb):]) + return v + + +# Unescape TOML string values. + +# content after the \ +_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +_escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) + + +def _unescape(v): + """Unescape characters in a TOML string.""" + i = 0 + backslash = False + while i < len(v): + if backslash: + backslash = False + if v[i] in _escapes: + v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:] + elif v[i] == '\\': + v = v[:i - 1] + v[i:] + elif v[i] == 'u' or v[i] == 'U': + i += 1 + else: + raise ValueError("Reserved escape sequence used") + continue + elif v[i] == '\\': + backslash = True + i += 1 + return v + + +class InlineTableDict(object): + """Sentinel subclass of dict for inline tables.""" + + +class TomlDecoder(object): + + def __init__(self, _dict=dict): + self._dict = _dict + + def get_empty_table(self): + return self._dict() + + def get_empty_inline_table(self): + class DynamicInlineTableDict(self._dict, InlineTableDict): + """Concrete sentinel subclass for inline tables. + It is a subclass of _dict which is passed in dynamically at load + time + + It is also a subclass of InlineTableDict + """ + + return DynamicInlineTableDict() + + def load_inline_object(self, line, currentlevel, multikey=False, + multibackslash=False): + candidate_groups = line[1:-1].split(",") + groups = [] + if len(candidate_groups) == 1 and not candidate_groups[0].strip(): + candidate_groups.pop() + while len(candidate_groups) > 0: + candidate_group = candidate_groups.pop(0) + try: + _, value = candidate_group.split('=', 1) + except ValueError: + raise ValueError("Invalid inline table encountered") + value = value.strip() + if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( + value[0] in '-0123456789' or + value in ('true', 'false') or + (value[0] == "[" and value[-1] == "]") or + (value[0] == '{' and value[-1] == '}'))): + groups.append(candidate_group) + elif len(candidate_groups) > 0: + candidate_groups[0] = (candidate_group + "," + + candidate_groups[0]) + else: + raise ValueError("Invalid inline table value encountered") + for group in groups: + status = self.load_line(group, currentlevel, multikey, + multibackslash) + if status is not None: + break + + def _get_split_on_quotes(self, line): + doublequotesplits = line.split('"') + quoted = False + quotesplits = [] + if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: + singlequotesplits = doublequotesplits[0].split("'") + doublequotesplits = doublequotesplits[1:] + while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): + singlequotesplits[-1] += '"' + doublequotesplits[0] + doublequotesplits = doublequotesplits[1:] + if "'" in singlequotesplits[-1]: + singlequotesplits = (singlequotesplits[:-1] + + singlequotesplits[-1].split("'")) + quotesplits += singlequotesplits + for doublequotesplit in doublequotesplits: + if quoted: + quotesplits.append(doublequotesplit) + else: + quotesplits += doublequotesplit.split("'") + quoted = not quoted + return quotesplits + + def load_line(self, line, currentlevel, multikey, multibackslash): + i = 1 + quotesplits = self._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and '=' in quotesplit: + break + i += quotesplit.count('=') + quoted = not quoted + pair = line.split('=', i) + strictly_valid = _strictly_valid_num(pair[-1]) + if _number_with_underscores.match(pair[-1]): + pair[-1] = pair[-1].replace('_', '') + while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and + pair[-1][0] != "'" and pair[-1][0] != '"' and + pair[-1][0] != '[' and pair[-1][0] != '{' and + pair[-1].strip() != 'true' and + pair[-1].strip() != 'false'): + try: + float(pair[-1]) + break + except ValueError: + pass + if _load_date(pair[-1]) is not None: + break + if TIME_RE.match(pair[-1]): + break + i += 1 + prev_val = pair[-1] + pair = line.split('=', i) + if prev_val == pair[-1]: + raise ValueError("Invalid date or number") + if strictly_valid: + strictly_valid = _strictly_valid_num(pair[-1]) + pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] + if '.' in pair[0]: + if '"' in pair[0] or "'" in pair[0]: + quotesplits = self._get_split_on_quotes(pair[0]) + quoted = False + levels = [] + for quotesplit in quotesplits: + if quoted: + levels.append(quotesplit) + else: + levels += [level.strip() for level in + quotesplit.split('.')] + quoted = not quoted + else: + levels = pair[0].split('.') + while levels[-1] == "": + levels = levels[:-1] + for level in levels[:-1]: + if level == "": + continue + if level not in currentlevel: + currentlevel[level] = self.get_empty_table() + currentlevel = currentlevel[level] + pair[0] = levels[-1].strip() + elif (pair[0][0] == '"' or pair[0][0] == "'") and \ + (pair[0][-1] == pair[0][0]): + pair[0] = _unescape(pair[0][1:-1]) + k, koffset = self._load_line_multiline_str(pair[1]) + if k > -1: + while k > -1 and pair[1][k + koffset] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = pair[1][:-1] + else: + multilinestr = pair[1] + "\n" + multikey = pair[0] + else: + value, vtype = self.load_value(pair[1], strictly_valid) + try: + currentlevel[pair[0]] + raise ValueError("Duplicate keys!") + except TypeError: + raise ValueError("Duplicate keys!") + except KeyError: + if multikey: + return multikey, multilinestr, multibackslash + else: + currentlevel[pair[0]] = value + + def _load_line_multiline_str(self, p): + poffset = 0 + if len(p) < 3: + return -1, poffset + if p[0] == '[' and (p.strip()[-1] != ']' and + self._load_array_isstrarray(p)): + newp = p[1:].strip().split(',') + while len(newp) > 1 and newp[-1][0] != '"' and newp[-1][0] != "'": + newp = newp[:-2] + [newp[-2] + ',' + newp[-1]] + newp = newp[-1] + poffset = len(p) - len(newp) + p = newp + if p[0] != '"' and p[0] != "'": + return -1, poffset + if p[1] != p[0] or p[2] != p[0]: + return -1, poffset + if len(p) > 5 and p[-1] == p[0] and p[-2] == p[0] and p[-3] == p[0]: + return -1, poffset + return len(p) - 1, poffset + + def load_value(self, v, strictly_valid=True): + if not v: + raise ValueError("Empty value is invalid") + if v == 'true': + return (True, "bool") + elif v == 'false': + return (False, "bool") + elif v[0] == '"' or v[0] == "'": + quotechar = v[0] + testv = v[1:].split(quotechar) + triplequote = False + triplequotecount = 0 + if len(testv) > 1 and testv[0] == '' and testv[1] == '': + testv = testv[2:] + triplequote = True + closed = False + for tv in testv: + if tv == '': + if triplequote: + triplequotecount += 1 + else: + closed = True + else: + oddbackslash = False + try: + i = -1 + j = tv[i] + while j == '\\': + oddbackslash = not oddbackslash + i -= 1 + j = tv[i] + except IndexError: + pass + if not oddbackslash: + if closed: + raise ValueError("Found tokens after a closed " + + "string. Invalid TOML.") + else: + if not triplequote or triplequotecount > 1: + closed = True + else: + triplequotecount = 0 + if quotechar == '"': + escapeseqs = v.split('\\')[1:] + backslash = False + for i in escapeseqs: + if i == '': + backslash = not backslash + else: + if i[0] not in _escapes and (i[0] != 'u' and + i[0] != 'U' and + not backslash): + raise ValueError("Reserved escape sequence used") + if backslash: + backslash = False + for prefix in ["\\u", "\\U"]: + if prefix in v: + hexbytes = v.split(prefix) + v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], + prefix) + v = _unescape(v) + if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or + v[1] == v[2]): + v = v[2:-2] + return (v[1:-1], "str") + elif v[0] == '[': + return (self.load_array(v), "array") + elif v[0] == '{': + inline_object = self.get_empty_inline_table() + self.load_inline_object(v, inline_object) + return (inline_object, "inline_object") + elif TIME_RE.match(v): + h, m, s, _, ms = TIME_RE.match(v).groups() + time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0) + return (time, "time") + else: + parsed_date = _load_date(v) + if parsed_date is not None: + return (parsed_date, "date") + if not strictly_valid: + raise ValueError("Weirdness with leading zeroes or " + "underscores in your number.") + itype = "int" + neg = False + if v[0] == '-': + neg = True + v = v[1:] + elif v[0] == '+': + v = v[1:] + v = v.replace('_', '') + lowerv = v.lower() + if '.' in v or ('x' not in v and ('e' in v or 'E' in v)): + if '.' in v and v.split('.', 1)[1] == '': + raise ValueError("This float is missing digits after " + "the point") + if v[0] not in '0123456789': + raise ValueError("This float doesn't have a leading " + "digit") + v = float(v) + itype = "float" + elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'): + v = float(v) + itype = "float" + if itype == "int": + v = int(v, 0) + if neg: + return (0 - v, itype) + return (v, itype) + + def bounded_string(self, s): + if len(s) == 0: + return True + if s[-1] != s[0]: + return False + i = -2 + backslash = False + while len(s) + i > 0: + if s[i] == "\\": + backslash = not backslash + i -= 1 + else: + break + return not backslash + + def _load_array_isstrarray(self, a): + a = a[1:-1].strip() + if a != '' and (a[0] == '"' or a[0] == "'"): + return True + return False + + def load_array(self, a): + atype = None + retval = [] + a = a.strip() + if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): + strarray = self._load_array_isstrarray(a) + if not a[1:-1].strip().startswith('{'): + a = a[1:-1].split(',') + else: + # a is an inline object, we must find the matching parenthesis + # to define groups + new_a = [] + start_group_index = 1 + end_group_index = 2 + open_bracket_count = 1 if a[start_group_index] == '{' else 0 + in_str = False + while end_group_index < len(a[1:]): + if a[end_group_index] == '"' or a[end_group_index] == "'": + if in_str: + backslash_index = end_group_index - 1 + while (backslash_index > -1 and + a[backslash_index] == '\\'): + in_str = not in_str + backslash_index -= 1 + in_str = not in_str + if not in_str and a[end_group_index] == '{': + open_bracket_count += 1 + if in_str or a[end_group_index] != '}': + end_group_index += 1 + continue + elif a[end_group_index] == '}' and open_bracket_count > 1: + open_bracket_count -= 1 + end_group_index += 1 + continue + + # Increase end_group_index by 1 to get the closing bracket + end_group_index += 1 + + new_a.append(a[start_group_index:end_group_index]) + + # The next start index is at least after the closing + # bracket, a closing bracket can be followed by a comma + # since we are in an array. + start_group_index = end_group_index + 1 + while (start_group_index < len(a[1:]) and + a[start_group_index] != '{'): + start_group_index += 1 + end_group_index = start_group_index + 1 + a = new_a + b = 0 + if strarray: + while b < len(a) - 1: + ab = a[b].strip() + while (not self.bounded_string(ab) or + (len(ab) > 2 and + ab[0] == ab[1] == ab[2] and + ab[-2] != ab[0] and + ab[-3] != ab[0])): + a[b] = a[b] + ',' + a[b + 1] + ab = a[b].strip() + if b < len(a) - 2: + a = a[:b + 1] + a[b + 2:] + else: + a = a[:b + 1] + b += 1 + else: + al = list(a[1:-1]) + a = [] + openarr = 0 + j = 0 + for i in _range(len(al)): + if al[i] == '[': + openarr += 1 + elif al[i] == ']': + openarr -= 1 + elif al[i] == ',' and not openarr: + a.append(''.join(al[j:i])) + j = i + 1 + a.append(''.join(al[j:])) + for i in _range(len(a)): + a[i] = a[i].strip() + if a[i] != '': + nval, ntype = self.load_value(a[i]) + if atype: + if ntype != atype: + raise ValueError("Not a homogeneous array") + else: + atype = ntype + retval.append(nval) + return retval + + def preserve_comment(self, line_no, key, comment, beginline): + pass + + def embed_comments(self, idx, currentlevel): + pass + + +class TomlPreserveCommentDecoder(TomlDecoder): + + def __init__(self, _dict=dict): + self.saved_comments = {} + super(TomlPreserveCommentDecoder, self).__init__(_dict) + + def preserve_comment(self, line_no, key, comment, beginline): + self.saved_comments[line_no] = (key, comment, beginline) + + def embed_comments(self, idx, currentlevel): + if idx not in self.saved_comments: + return + + key, comment, beginline = self.saved_comments[idx] + currentlevel[key] = CommentValue(currentlevel[key], comment, beginline, + self._dict) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/encoder.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/encoder.py new file mode 100644 index 00000000..a8b03c7b --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/encoder.py @@ -0,0 +1,304 @@ +import datetime +import re +import sys +from decimal import Decimal + +from pip._vendor.toml.decoder import InlineTableDict + +if sys.version_info >= (3,): + unicode = str + + +def dump(o, f, encoder=None): + """Writes out dict as toml to a file + + Args: + o: Object to dump into toml + f: File descriptor where the toml should be stored + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dictionary + + Raises: + TypeError: When anything other than file descriptor is passed + """ + + if not f.write: + raise TypeError("You can only dump an object to a file descriptor") + d = dumps(o, encoder=encoder) + f.write(d) + return d + + +def dumps(o, encoder=None): + """Stringifies input dict as toml + + Args: + o: Object to dump into toml + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dict + + Examples: + ```python + >>> import toml + >>> output = { + ... 'a': "I'm a string", + ... 'b': ["I'm", "a", "list"], + ... 'c': 2400 + ... } + >>> toml.dumps(output) + 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' + ``` + """ + + retval = "" + if encoder is None: + encoder = TomlEncoder(o.__class__) + addtoretval, sections = encoder.dump_sections(o, "") + retval += addtoretval + outer_objs = [id(o)] + while sections: + section_ids = [id(section) for section in sections] + for outer_obj in outer_objs: + if outer_obj in section_ids: + raise ValueError("Circular reference detected") + outer_objs += section_ids + newsections = encoder.get_empty_table() + for section in sections: + addtoretval, addtosections = encoder.dump_sections( + sections[section], section) + + if addtoretval or (not addtoretval and not addtosections): + if retval and retval[-2:] != "\n\n": + retval += "\n" + retval += "[" + section + "]\n" + if addtoretval: + retval += addtoretval + for s in addtosections: + newsections[section + "." + s] = addtosections[s] + sections = newsections + return retval + + +def _dump_str(v): + if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): + v = v.decode('utf-8') + v = "%r" % v + if v[0] == 'u': + v = v[1:] + singlequote = v.startswith("'") + if singlequote or v.startswith('"'): + v = v[1:-1] + if singlequote: + v = v.replace("\\'", "'") + v = v.replace('"', '\\"') + v = v.split("\\x") + while len(v) > 1: + i = -1 + if not v[0]: + v = v[1:] + v[0] = v[0].replace("\\\\", "\\") + # No, I don't know why != works and == breaks + joinx = v[0][i] != "\\" + while v[0][:i] and v[0][i] == "\\": + joinx = not joinx + i -= 1 + if joinx: + joiner = "x" + else: + joiner = "u00" + v = [v[0] + joiner + v[1]] + v[2:] + return unicode('"' + v[0] + '"') + + +def _dump_float(v): + return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") + + +def _dump_time(v): + utcoffset = v.utcoffset() + if utcoffset is None: + return v.isoformat() + # The TOML norm specifies that it's local time thus we drop the offset + return v.isoformat()[:-6] + + +class TomlEncoder(object): + + def __init__(self, _dict=dict, preserve=False): + self._dict = _dict + self.preserve = preserve + self.dump_funcs = { + str: _dump_str, + unicode: _dump_str, + list: self.dump_list, + bool: lambda v: unicode(v).lower(), + int: lambda v: v, + float: _dump_float, + Decimal: _dump_float, + datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), + datetime.time: _dump_time, + datetime.date: lambda v: v.isoformat() + } + + def get_empty_table(self): + return self._dict() + + def dump_list(self, v): + retval = "[" + for u in v: + retval += " " + unicode(self.dump_value(u)) + "," + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + + https://github.com/toml-lang/toml#user-content-inline-table + """ + retval = "" + if isinstance(section, dict): + val_list = [] + for k, v in section.items(): + val = self.dump_inline_table(v) + val_list.append(k + " = " + val) + retval += "{ " + ", ".join(val_list) + " }\n" + return retval + else: + return unicode(self.dump_value(section)) + + def dump_value(self, v): + # Lookup function corresponding to v's type + dump_fn = self.dump_funcs.get(type(v)) + if dump_fn is None and hasattr(v, '__iter__'): + dump_fn = self.dump_funcs[list] + # Evaluate function (if it exists) else return v + return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + section = unicode(section) + qsection = section + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = _dump_str(section) + if not isinstance(o[section], dict): + arrayoftables = False + if isinstance(o[section], list): + for a in o[section]: + if isinstance(a, dict): + arrayoftables = True + if arrayoftables: + for a in o[section]: + arraytabstr = "\n" + arraystr += "[[" + sup + qsection + "]]\n" + s, d = self.dump_sections(a, sup + qsection) + if s: + if s[0] == "[": + arraytabstr += s + else: + arraystr += s + while d: + newd = self._dict() + for dsec in d: + s1, d1 = self.dump_sections(d[dsec], sup + + qsection + "." + + dsec) + if s1: + arraytabstr += ("[" + sup + qsection + + "." + dsec + "]\n") + arraytabstr += s1 + for s1 in d1: + newd[dsec + "." + s1] = d1[s1] + d = newd + arraystr += arraytabstr + else: + if o[section] is not None: + retstr += (qsection + " = " + + unicode(self.dump_value(o[section])) + '\n') + elif self.preserve and isinstance(o[section], InlineTableDict): + retstr += (qsection + " = " + + self.dump_inline_table(o[section])) + else: + retdict[qsection] = o[section] + retstr += arraystr + return (retstr, retdict) + + +class TomlPreserveInlineDictEncoder(TomlEncoder): + + def __init__(self, _dict=dict): + super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) + + +class TomlArraySeparatorEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False, separator=","): + super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) + if separator.strip() == "": + separator = "," + separator + elif separator.strip(' \t\n\r,'): + raise ValueError("Invalid separator for arrays") + self.separator = separator + + def dump_list(self, v): + t = [] + retval = "[" + for u in v: + t.append(self.dump_value(u)) + while t != []: + s = [] + for u in t: + if isinstance(u, list): + for r in u: + s.append(r) + else: + retval += " " + unicode(u) + self.separator + t = s + retval += "]" + return retval + + +class TomlNumpyEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + import numpy as np + super(TomlNumpyEncoder, self).__init__(_dict, preserve) + self.dump_funcs[np.float16] = _dump_float + self.dump_funcs[np.float32] = _dump_float + self.dump_funcs[np.float64] = _dump_float + self.dump_funcs[np.int16] = self._dump_int + self.dump_funcs[np.int32] = self._dump_int + self.dump_funcs[np.int64] = self._dump_int + + def _dump_int(self, v): + return "{}".format(int(v)) + + +class TomlPreserveCommentEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + from pip._vendor.toml.decoder import CommentValue + super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) + self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) + + +class TomlPathlibEncoder(TomlEncoder): + + def _dump_pathlib_path(self, v): + return _dump_str(str(v)) + + def dump_value(self, v): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(v, pathlib.PurePath): + v = str(v) + return super(TomlPathlibEncoder, self).dump_value(v) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/ordered.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/ordered.py new file mode 100644 index 00000000..6052016e --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/ordered.py @@ -0,0 +1,15 @@ +from collections import OrderedDict +from pip._vendor.toml import TomlEncoder +from pip._vendor.toml import TomlDecoder + + +class TomlOrderedDecoder(TomlDecoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) + + +class TomlOrderedEncoder(TomlEncoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/tz.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/tz.py new file mode 100644 index 00000000..93c3c8ad --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/toml/tz.py @@ -0,0 +1,21 @@ +from datetime import tzinfo, timedelta + + +class TomlTz(tzinfo): + def __init__(self, toml_offset): + if toml_offset == "Z": + self._raw_offset = "+00:00" + else: + self._raw_offset = toml_offset + self._sign = -1 if self._raw_offset[0] == '-' else 1 + self._hours = int(self._raw_offset[1:3]) + self._minutes = int(self._raw_offset[4:6]) + + def tzname(self, dt): + return "UTC" + self._raw_offset + + def utcoffset(self, dt): + return self._sign * timedelta(hours=self._hours, minutes=self._minutes) + + def dst(self, dt): + return timedelta(0) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/__init__.py index 148a9c31..667e9bce 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/__init__.py @@ -1,15 +1,10 @@ """ urllib3 - Thread-safe connection pooling and re-using. """ - from __future__ import absolute_import import warnings -from .connectionpool import ( - HTTPConnectionPool, - HTTPSConnectionPool, - connection_from_url -) +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url from . import exceptions from .filepost import encode_multipart_formdata @@ -25,25 +20,25 @@ from .util.retry import Retry import logging from logging import NullHandler -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '1.24.1' +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = "1.25.9" __all__ = ( - 'HTTPConnectionPool', - 'HTTPSConnectionPool', - 'PoolManager', - 'ProxyManager', - 'HTTPResponse', - 'Retry', - 'Timeout', - 'add_stderr_logger', - 'connection_from_url', - 'disable_warnings', - 'encode_multipart_formdata', - 'get_host', - 'make_headers', - 'proxy_from_url', + "HTTPConnectionPool", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "get_host", + "make_headers", + "proxy_from_url", ) logging.getLogger(__name__).addHandler(NullHandler()) @@ -60,10 +55,10 @@ def add_stderr_logger(level=logging.DEBUG): # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) logger.addHandler(handler) logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s', __name__) + logger.debug("Added a stderr logging handler to logger: %s", __name__) return handler @@ -75,18 +70,17 @@ del NullHandler # shouldn't be: otherwise, it's very hard for users to use most Python # mechanisms to silence them. # SecurityWarning's always go off by default. -warnings.simplefilter('always', exceptions.SecurityWarning, append=True) +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) # SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) +warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) # InsecurePlatformWarning's don't vary between requests, so we keep it default. -warnings.simplefilter('default', exceptions.InsecurePlatformWarning, - append=True) +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) # SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) +warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) def disable_warnings(category=exceptions.HTTPWarning): """ Helper for quickly disabling all urllib3 warnings. """ - warnings.simplefilter('ignore', category) + warnings.simplefilter("ignore", category) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/_collections.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/_collections.py index 34f23811..019d1511 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/_collections.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/_collections.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + try: from collections.abc import Mapping, MutableMapping except ImportError: @@ -6,6 +7,7 @@ except ImportError: try: from threading import RLock except ImportError: # Platform-specific: No threads available + class RLock: def __enter__(self): pass @@ -19,7 +21,7 @@ from .exceptions import InvalidHeader from .packages.six import iterkeys, itervalues, PY3 -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] _Null = object() @@ -82,7 +84,9 @@ class RecentlyUsedContainer(MutableMapping): return len(self._container) def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) def clear(self): with self.lock: @@ -150,7 +154,7 @@ class HTTPHeaderDict(MutableMapping): def __getitem__(self, key): val = self._container[key.lower()] - return ', '.join(val[1:]) + return ", ".join(val[1:]) def __delitem__(self, key): del self._container[key.lower()] @@ -159,12 +163,13 @@ class HTTPHeaderDict(MutableMapping): return key.lower() in self._container def __eq__(self, other): - if not isinstance(other, Mapping) and not hasattr(other, 'keys'): + if not isinstance(other, Mapping) and not hasattr(other, "keys"): return False if not isinstance(other, type(self)): other = type(self)(other) - return (dict((k.lower(), v) for k, v in self.itermerged()) == - dict((k.lower(), v) for k, v in other.itermerged())) + return dict((k.lower(), v) for k, v in self.itermerged()) == dict( + (k.lower(), v) for k, v in other.itermerged() + ) def __ne__(self, other): return not self.__eq__(other) @@ -184,9 +189,9 @@ class HTTPHeaderDict(MutableMapping): yield vals[0] def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. - ''' + """ # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. # So let's reinvent the wheel. @@ -228,8 +233,10 @@ class HTTPHeaderDict(MutableMapping): with self.add instead of self.__setitem__ """ if len(args) > 1: - raise TypeError("extend() takes at most 1 positional " - "arguments ({0} given)".format(len(args))) + raise TypeError( + "extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args)) + ) other = args[0] if len(args) >= 1 else () if isinstance(other, HTTPHeaderDict): @@ -295,7 +302,7 @@ class HTTPHeaderDict(MutableMapping): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] - yield val[0], ', '.join(val[1:]) + yield val[0], ", ".join(val[1:]) def items(self): return list(self.iteritems()) @@ -306,7 +313,7 @@ class HTTPHeaderDict(MutableMapping): # python2.7 does not expose a proper API for exporting multiheaders # efficiently. This function re-reads raw lines from the message # object and extracts the multiheaders properly. - obs_fold_continued_leaders = (' ', '\t') + obs_fold_continued_leaders = (" ", "\t") headers = [] for line in message.headers: @@ -316,14 +323,14 @@ class HTTPHeaderDict(MutableMapping): # in RFC-7230 S3.2.4. This indicates a multiline header, but # there exists no previous header to which we can attach it. raise InvalidHeader( - 'Header continuation with no previous header: %s' % line + "Header continuation with no previous header: %s" % line ) else: key, value = headers[-1] - headers[-1] = (key, value + ' ' + line.strip()) + headers[-1] = (key, value + " " + line.strip()) continue - key, value = line.split(':', 1) + key, value = line.split(":", 1) headers.append((key, value.strip())) return cls(headers) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connection.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connection.py index 02b36654..6da1cf4b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connection.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connection.py @@ -1,4 +1,5 @@ from __future__ import absolute_import +import re import datetime import logging import os @@ -11,6 +12,7 @@ from .packages.six.moves.http_client import HTTPException # noqa: F401 try: # Compiled with SSL? import ssl + BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. ssl = None @@ -19,10 +21,11 @@ except (ImportError, AttributeError): # Platform-specific: No SSL. pass -try: # Python 3: - # Not a no-op, we're adding this to the namespace so it can be imported. +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError -except NameError: # Python 2: +except NameError: + # Python 2 class ConnectionError(Exception): pass @@ -40,7 +43,7 @@ from .util.ssl_ import ( resolve_ssl_version, assert_fingerprint, create_urllib3_context, - ssl_wrap_socket + ssl_wrap_socket, ) @@ -50,20 +53,18 @@ from ._collections import HTTPHeaderDict log = logging.getLogger(__name__) -port_by_scheme = { - 'http': 80, - 'https': 443, -} +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2019, 1, 1) -# When updating RECENT_DATE, move it to within two years of the current date, -# and not less than 6 months ago. -# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or -# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) -RECENT_DATE = datetime.date(2017, 6, 30) +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") class DummyConnection(object): """Used to detect a failed ConnectionCls import.""" + pass @@ -91,7 +92,7 @@ class HTTPConnection(_HTTPConnection, object): Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ - default_port = port_by_scheme['http'] + default_port = port_by_scheme["http"] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` @@ -101,15 +102,15 @@ class HTTPConnection(_HTTPConnection, object): is_verified = False def __init__(self, *args, **kw): - if six.PY3: # Python 3 - kw.pop('strict', None) + if not six.PY2: + kw.pop("strict", None) # Pre-set source_address. - self.source_address = kw.get('source_address') + self.source_address = kw.get("source_address") #: The socket options provided by the user. If no options are #: provided, we use the default options. - self.socket_options = kw.pop('socket_options', self.default_socket_options) + self.socket_options = kw.pop("socket_options", self.default_socket_options) _HTTPConnection.__init__(self, *args, **kw) @@ -130,7 +131,7 @@ class HTTPConnection(_HTTPConnection, object): those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). """ - return self._dns_host.rstrip('.') + return self._dns_host.rstrip(".") @host.setter def host(self, value): @@ -149,29 +150,34 @@ class HTTPConnection(_HTTPConnection, object): """ extra_kw = {} if self.source_address: - extra_kw['source_address'] = self.source_address + extra_kw["source_address"] = self.source_address if self.socket_options: - extra_kw['socket_options'] = self.socket_options + extra_kw["socket_options"] = self.socket_options try: conn = connection.create_connection( - (self._dns_host, self.port), self.timeout, **extra_kw) + (self._dns_host, self.port), self.timeout, **extra_kw + ) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) except SocketError as e: raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) + self, "Failed to establish a new connection: %s" % e + ) return conn def _prepare_conn(self, conn): self.sock = conn - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, "_tunnel_host", None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -181,24 +187,32 @@ class HTTPConnection(_HTTPConnection, object): conn = self._new_conn() self._prepare_conn(conn) + def putrequest(self, method, url, *args, **kwargs): + """Send a request to the server""" + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + "Method cannot contain non-token characters %r (found at least %r)" + % (method, match.group()) + ) + + return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = 'accept-encoding' in headers - skip_host = 'host' in headers + skip_accept_encoding = "accept-encoding" in headers + skip_host = "host" in headers self.putrequest( - method, - url, - skip_accept_encoding=skip_accept_encoding, - skip_host=skip_host + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) for header, value in headers.items(): self.putheader(header, value) - if 'transfer-encoding' not in headers: - self.putheader('Transfer-Encoding', 'chunked') + if "transfer-encoding" not in headers: + self.putheader("Transfer-Encoding", "chunked") self.endheaders() if body is not None: @@ -209,99 +223,93 @@ class HTTPConnection(_HTTPConnection, object): if not chunk: continue if not isinstance(chunk, bytes): - chunk = chunk.encode('utf8') + chunk = chunk.encode("utf8") len_str = hex(len(chunk))[2:] - self.send(len_str.encode('utf-8')) - self.send(b'\r\n') + self.send(len_str.encode("utf-8")) + self.send(b"\r\n") self.send(chunk) - self.send(b'\r\n') + self.send(b"\r\n") # After the if clause, to always have a closed body - self.send(b'0\r\n\r\n') + self.send(b"0\r\n\r\n") class HTTPSConnection(HTTPConnection): - default_port = port_by_scheme['https'] + default_port = port_by_scheme["https"] + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ca_cert_data = None ssl_version = None + assert_fingerprint = None - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, server_hostname=None, **kw): - - HTTPConnection.__init__(self, host, port, strict=strict, - timeout=timeout, **kw) + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + key_password=None, + strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, + server_hostname=None, + **kw + ): + + HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) self.key_file = key_file self.cert_file = cert_file + self.key_password = key_password self.ssl_context = ssl_context self.server_hostname = server_hostname # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) - self._protocol = 'https' - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(None), - cert_reqs=resolve_cert_reqs(None), - ) - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ssl_context=self.ssl_context, - server_hostname=self.server_hostname - ) - - -class VerifiedHTTPSConnection(HTTPSConnection): - """ - Based on httplib.HTTPSConnection but wraps the socket with - SSL certification. - """ - cert_reqs = None - ca_certs = None - ca_cert_dir = None - ssl_version = None - assert_fingerprint = None - - def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None): + self._protocol = "https" + + def set_cert( + self, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + ca_cert_data=None, + ): """ This method should only be called once, before the connection is used. """ - # If cert_reqs is not provided, we can try to guess. If the user gave - # us a cert database, we assume they want to use it: otherwise, if - # they gave us an SSL Context object we should use whatever is set for - # it. + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. if cert_reqs is None: - if ca_certs or ca_cert_dir: - cert_reqs = 'CERT_REQUIRED' - elif self.ssl_context is not None: + if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, "_tunnel_host", None): self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. @@ -318,15 +326,19 @@ class VerifiedHTTPSConnection(HTTPSConnection): is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: - warnings.warn(( - 'System time is way off (before {0}). This will probably ' - 'lead to SSL verification errors').format(RECENT_DATE), - SystemTimeWarning + warnings.warn( + ( + "System time is way off (before {0}). This will probably " + "lead to SSL verification errors" + ).format(RECENT_DATE), + SystemTimeWarning, ) # Wrap socket using verification with the root certs in # trusted_root_certs + default_ssl_context = False if self.ssl_context is None: + default_ssl_context = True self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(self.ssl_version), cert_reqs=resolve_cert_reqs(self.cert_reqs), @@ -334,38 +346,58 @@ class VerifiedHTTPSConnection(HTTPSConnection): context = self.ssl_context context.verify_mode = resolve_cert_reqs(self.cert_reqs) + + # Try to load OS default certs if none are given. + # Works well on Windows (requires Python3.4+) + if ( + not self.ca_certs + and not self.ca_cert_dir + and not self.ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, + key_password=self.key_password, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, server_hostname=server_hostname, - ssl_context=context) + ssl_context=context, + ) if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif context.verify_mode != ssl.CERT_NONE \ - and not getattr(context, 'check_hostname', False) \ - and self.assert_hostname is not False: + assert_fingerprint( + self.sock.getpeercert(binary_form=True), self.assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not getattr(context, "check_hostname", False) + and self.assert_hostname is not False + ): # While urllib3 attempts to always turn off hostname matching from # the TLS library, this cannot always be done. So we check whether # the TLS Library still thinks it's matching hostnames. cert = self.sock.getpeercert() - if not cert.get('subjectAltName', ()): - warnings.warn(( - 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' - '`commonName` for now. This feature is being removed by major browsers and ' - 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' - 'for details.)'.format(hostname)), - SubjectAltNameWarning + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, ) _match_hostname(cert, self.assert_hostname or server_hostname) self.is_verified = ( - context.verify_mode == ssl.CERT_REQUIRED or - self.assert_fingerprint is not None + context.verify_mode == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None ) @@ -373,9 +405,10 @@ def _match_hostname(cert, asserted_hostname): try: match_hostname(cert, asserted_hostname) except CertificateError as e: - log.error( - 'Certificate did not match expected hostname: %s. ' - 'Certificate: %s', asserted_hostname, cert + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, ) # Add cert to exception and reraise so client code can inspect # the cert when catching the exception, if they want to @@ -383,9 +416,8 @@ def _match_hostname(cert, asserted_hostname): raise -if ssl: - # Make a copy for testing. - UnverifiedHTTPSConnection = HTTPSConnection - HTTPSConnection = VerifiedHTTPSConnection -else: - HTTPSConnection = DummyConnection +if not ssl: + HTTPSConnection = DummyConnection # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py index f7a8f193..5f044dbd 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py @@ -29,8 +29,11 @@ from .packages.six.moves import queue from .connection import ( port_by_scheme, DummyConnection, - HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, + HTTPConnection, + HTTPSConnection, + VerifiedHTTPSConnection, + HTTPException, + BaseSSLError, ) from .request import RequestMethods from .response import HTTPResponse @@ -40,7 +43,13 @@ from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import get_host, Url, NORMALIZABLE_SCHEMES +from .util.url import ( + get_host, + parse_url, + Url, + _normalize_host as normalize_host, + _encode_target, +) from .util.queue import LifoQueue @@ -56,6 +65,11 @@ class ConnectionPool(object): """ Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. """ scheme = None @@ -65,13 +79,12 @@ class ConnectionPool(object): if not host: raise LocationValueError("No host specified.") - self.host = _ipv6_host(host, self.scheme) + self.host = _normalize_host(host, scheme=self.scheme) self._proxy_host = host.lower() self.port = port def __str__(self): - return '%s(host=%r, port=%r)' % (type(self).__name__, - self.host, self.port) + return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) def __enter__(self): return self @@ -152,15 +165,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :class:`urllib3.connection.HTTPSConnection` instances. """ - scheme = 'http' + scheme = "http" ConnectionCls = HTTPConnection ResponseCls = HTTPResponse - def __init__(self, host, port=None, strict=False, - timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, retries=None, - _proxy=None, _proxy_headers=None, - **conn_kw): + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + **conn_kw + ): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) @@ -194,19 +216,27 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. # We cannot know if the user has added default socket options, so we cannot replace the # list. - self.conn_kw.setdefault('socket_options', []) + self.conn_kw.setdefault("socket_options", []) def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s:%s", - self.num_connections, self.host, self.port or "80") - - conn = self.ConnectionCls(host=self.host, port=self.port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + **self.conn_kw + ) return conn def _get_conn(self, timeout=None): @@ -230,16 +260,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): except queue.Empty: if self.block: - raise EmptyPoolError(self, - "Pool reached maximum size and no more " - "connections are allowed.") + raise EmptyPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) pass # Oh well, we'll create a new connection then # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): log.debug("Resetting dropped connection: %s", self.host) conn.close() - if getattr(conn, 'auto_open', 1) == 0: + if getattr(conn, "auto_open", 1) == 0: # This is a proxied connection that has been mutated by # httplib._tunnel() and cannot be reused (since it would # attempt to bypass the proxy) @@ -269,9 +300,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): pass except queue.Full: # This should never happen if self.block == True - log.warning( - "Connection pool is full, discarding connection: %s", - self.host) + log.warning("Connection pool is full, discarding connection: %s", self.host) # Connection never got put back into the pool, close it. if conn: @@ -303,21 +332,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): """Is the error actually a timeout? Will raise a ReadTimeout or pass""" if isinstance(err, SocketTimeout): - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) # See the above comment about EAGAIN in Python 3. In Python 2 we have # to specifically catch it and throw the timeout error - if hasattr(err, 'errno') and err.errno in _blocking_errnos: - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - def _make_request(self, conn, method, url, timeout=_Default, chunked=False, - **httplib_request_kw): + if "timed out" in str(err) or "did not complete (read)" in str( + err + ): # Python < 2.7.4 + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + def _make_request( + self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw + ): """ Perform a request on a given urllib connection object taken from our pool. @@ -357,7 +395,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr - if getattr(conn, 'sock', None): + if getattr(conn, "sock", None): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching @@ -365,7 +403,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) + self, url, "Read timed out. (read timeout=%s)" % read_timeout + ) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) else: # None or a value @@ -373,31 +412,45 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Receive the response from the server try: - try: # Python 2.7, use buffering of HTTP responses + try: + # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 3 + except TypeError: + # Python 3 try: httplib_response = conn.getresponse() - except Exception as e: - # Remove the TypeError from the exception chain in Python 3; - # otherwise it looks like a programming error was the cause. + except BaseException as e: + # Remove the TypeError from the exception chain in + # Python 3 (including for exceptions like SystemExit). + # Otherwise it looks like a bug in the code. six.raise_from(e, None) except (SocketTimeout, BaseSSLError, SocketError) as e: self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise # AppEngine doesn't have a version attr. - http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, - method, url, http_version, httplib_response.status, - httplib_response.length) + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + http_version, + httplib_response.status, + httplib_response.length, + ) try: assert_header_parsing(httplib_response.msg) except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 log.warning( - 'Failed to parse headers (url=%s): %s', - self._absolute_url(url), hpe, exc_info=True) + "Failed to parse headers (url=%s): %s", + self._absolute_url(url), + hpe, + exc_info=True, + ) return httplib_response @@ -427,13 +480,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): Check if the given ``url`` is a member of the same host as this connection pool. """ - if url.startswith('/'): + if url.startswith("/"): return True # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) - - host = _ipv6_host(host, self.scheme) + if host is not None: + host = _normalize_host(host, scheme=scheme) # Use explicit default port for comparison when none is given if self.port and not port: @@ -443,10 +496,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): return (scheme, host, port) == (self.scheme, self.host, self.port) - def urlopen(self, method, url, body=None, headers=None, retries=None, - redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, chunked=False, - body_pos=None, **response_kw): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + assert_same_host=True, + timeout=_Default, + pool_timeout=None, + release_conn=None, + chunked=False, + body_pos=None, + **response_kw + ): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all @@ -544,12 +609,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: - release_conn = response_kw.get('preload_content', True) + release_conn = response_kw.get("preload_content", True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = six.ensure_str(_encode_target(url)) + else: + url = six.ensure_str(parse_url(url).url) + conn = None # Track whether `conn` needs to be released before @@ -560,13 +631,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # # See issue #651 [1] for details. # - # [1] <https://github.com/shazow/urllib3/issues/651> + # [1] <https://github.com/urllib3/urllib3/issues/651> release_this_conn = release_conn # Merge the proxy headers. Only do this in HTTP. We have to copy the # headers dict so we can safely change it without those changes being # reflected in anyone else's copy. - if self.scheme == 'http': + if self.scheme == "http": headers = headers.copy() headers.update(self.proxy_headers) @@ -589,15 +660,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): conn.timeout = timeout_obj.connect_timeout - is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) + is_new_proxy_conn = self.proxy is not None and not getattr( + conn, "sock", None + ) if is_new_proxy_conn: self._prepare_proxy(conn) # Make the request on the httplib connection object. - httplib_response = self._make_request(conn, method, url, - timeout=timeout_obj, - body=body, headers=headers, - chunked=chunked) + httplib_response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + ) # If we're going to release the connection in ``finally:``, then # the response doesn't need to know about the connection. Otherwise @@ -606,14 +684,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): response_conn = conn if not release_conn else None # Pass method to Response for length checking - response_kw['request_method'] = method + response_kw["request_method"] = method # Import httplib's response into our own wrapper object - response = self.ResponseCls.from_httplib(httplib_response, - pool=self, - connection=response_conn, - retries=retries, - **response_kw) + response = self.ResponseCls.from_httplib( + httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw + ) # Everything went great! clean_exit = True @@ -622,20 +702,28 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Timed out by queue. raise EmptyPoolError(self, "No pool connections are available.") - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError, CertificateError) as e: + except ( + TimeoutError, + HTTPException, + SocketError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ) as e: # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False if isinstance(e, (BaseSSLError, CertificateError)): e = SSLError(e) elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: - e = ProxyError('Cannot connect to proxy.', e) + e = ProxyError("Cannot connect to proxy.", e) elif isinstance(e, (SocketError, HTTPException)): - e = ProtocolError('Connection aborted.', e) + e = ProtocolError("Connection aborted.", e) - retries = retries.increment(method, url, error=e, _pool=self, - _stacktrace=sys.exc_info()[2]) + retries = retries.increment( + method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] + ) retries.sleep() # Keep track of the error for the retry warning. @@ -658,77 +746,87 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if not conn: # Try again - log.warning("Retrying (%r) after connection " - "broken by '%r': %s", retries, err, url) - return self.urlopen(method, url, body, headers, retries, - redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - def drain_and_release_conn(response): - try: - # discard any remaining response body, the connection will be - # released back to the pool once the entire response is read - response.read() - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError) as e: - pass + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) # Handle redirect? redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: - method = 'GET' + method = "GET" try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) + response.drain_conn() raise return response - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - + response.drain_conn() retries.sleep_for_retry(response) log.debug("Redirecting %s -> %s", url, redirect_location) return self.urlopen( - method, redirect_location, body, headers, - retries=retries, redirect=redirect, + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader('Retry-After')) + has_retry_after = bool(response.getheader("Retry-After")) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_status: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) + response.drain_conn() raise return response - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - + response.drain_conn() retries.sleep(response) log.debug("Retry: %s", url) return self.urlopen( - method, url, body, headers, - retries=retries, redirect=redirect, + method, + url, + body, + headers, + retries=retries, + redirect=redirect, assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, + timeout=timeout, + pool_timeout=pool_timeout, release_conn=release_conn, - body_pos=body_pos, **response_kw) + chunked=chunked, + body_pos=body_pos, + **response_kw + ) return response @@ -746,33 +844,57 @@ class HTTPSConnectionPool(HTTPConnectionPool): If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is - available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ - scheme = 'https' + scheme = "https" ConnectionCls = HTTPSConnection - def __init__(self, host, port=None, - strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, - block=False, headers=None, retries=None, - _proxy=None, _proxy_headers=None, - key_file=None, cert_file=None, cert_reqs=None, - ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None, **conn_kw): - - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, retries, _proxy, _proxy_headers, - **conn_kw) - - if ca_certs and cert_reqs is None: - cert_reqs = 'CERT_REQUIRED' + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + ssl_version=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + **conn_kw + ): + + HTTPConnectionPool.__init__( + self, + host, + port, + strict, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw + ) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version @@ -786,13 +908,16 @@ class HTTPSConnectionPool(HTTPConnectionPool): """ if isinstance(conn, VerifiedHTTPSConnection): - conn.set_cert(key_file=self.key_file, - cert_file=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint) + conn.set_cert( + key_file=self.key_file, + key_password=self.key_password, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) conn.ssl_version = self.ssl_version return conn @@ -809,12 +934,17 @@ class HTTPSConnectionPool(HTTPConnectionPool): Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s:%s", - self.num_connections, self.host, self.port or "443") + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - raise SSLError("Can't connect to HTTPS URL because the SSL " - "module is not available.") + raise SSLError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) actual_host = self.host actual_port = self.port @@ -822,9 +952,16 @@ class HTTPSConnectionPool(HTTPConnectionPool): actual_host = self.proxy.host actual_port = self.proxy.port - conn = self.ConnectionCls(host=actual_host, port=actual_port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) + conn = self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + **self.conn_kw + ) return self._prepare_conn(conn) @@ -835,16 +972,19 @@ class HTTPSConnectionPool(HTTPConnectionPool): super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. - if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + if not getattr(conn, "sock", None): # AppEngine might not have `.sock` conn.connect() if not conn.is_verified: - warnings.warn(( - 'Unverified HTTPS request is being made. ' - 'Adding certificate verification is strongly advised. See: ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings'), - InsecureRequestWarning) + warnings.warn( + ( + "Unverified HTTPS request is being made to host '%s'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings" % conn.host + ), + InsecureRequestWarning, + ) def connection_from_url(url, **kw): @@ -869,28 +1009,25 @@ def connection_from_url(url, **kw): """ scheme, host, port = get_host(url) port = port or port_by_scheme.get(scheme, 80) - if scheme == 'https': + if scheme == "https": return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw) -def _ipv6_host(host, scheme): +def _normalize_host(host, scheme): """ - Process IPv6 address literals + Normalize hosts for comparisons and use with sockets. """ + host = normalize_host(host, scheme) + # httplib doesn't like it when we include brackets in IPv6 addresses # Specifically, if we include brackets but also pass the port then # httplib crazily doubles up the square brackets on the Host header. # Instead, we need to make sure we never pass ``None`` as the port. # However, for backward compatibility reasons we can't actually # *assert* that. See http://bugs.python.org/issue28539 - # - # Also if an IPv6 address literal has a zone identifier, the - # percent sign might be URIencoded, convert it back into ASCII - if host.startswith('[') and host.endswith(']'): - host = host.replace('%25', '%').strip('[]') - if scheme in NORMALIZABLE_SCHEMES: - host = host.lower() + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] return host diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py index f3e00942..8765b907 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py @@ -6,25 +6,31 @@ import os def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) + return is_local_appengine() or is_prod_appengine() def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Development/") def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Google App Engine/") def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' + """Deprecated.""" + return False diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py index bcf41c02..d9b67333 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py @@ -34,29 +34,35 @@ from __future__ import absolute_import import platform from ctypes.util import find_library from ctypes import ( - c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, - c_bool + c_void_p, + c_int32, + c_char_p, + c_size_t, + c_byte, + c_uint32, + c_ulong, + c_long, + c_bool, ) from ctypes import CDLL, POINTER, CFUNCTYPE -security_path = find_library('Security') +security_path = find_library("Security") if not security_path: - raise ImportError('The library Security could not be found') + raise ImportError("The library Security could not be found") -core_foundation_path = find_library('CoreFoundation') +core_foundation_path = find_library("CoreFoundation") if not core_foundation_path: - raise ImportError('The library CoreFoundation could not be found') + raise ImportError("The library CoreFoundation could not be found") version = platform.mac_ver()[0] -version_info = tuple(map(int, version.split('.'))) +version_info = tuple(map(int, version.split("."))) if version_info < (10, 8): raise OSError( - 'Only OS X 10.8 and newer are supported, not %s.%s' % ( - version_info[0], version_info[1] - ) + "Only OS X 10.8 and newer are supported, not %s.%s" + % (version_info[0], version_info[1]) ) Security = CDLL(security_path, use_errno=True) @@ -129,27 +135,19 @@ try: Security.SecKeyGetTypeID.argtypes = [] Security.SecKeyGetTypeID.restype = CFTypeID - Security.SecCertificateCreateWithData.argtypes = [ - CFAllocatorRef, - CFDataRef - ] + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] Security.SecCertificateCreateWithData.restype = SecCertificateRef - Security.SecCertificateCopyData.argtypes = [ - SecCertificateRef - ] + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] Security.SecCertificateCopyData.restype = CFDataRef - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SecIdentityCreateWithCertificate.argtypes = [ CFTypeRef, SecCertificateRef, - POINTER(SecIdentityRef) + POINTER(SecIdentityRef), ] Security.SecIdentityCreateWithCertificate.restype = OSStatus @@ -159,201 +157,126 @@ try: c_void_p, Boolean, c_void_p, - POINTER(SecKeychainRef) + POINTER(SecKeychainRef), ] Security.SecKeychainCreate.restype = OSStatus - Security.SecKeychainDelete.argtypes = [ - SecKeychainRef - ] + Security.SecKeychainDelete.argtypes = [SecKeychainRef] Security.SecKeychainDelete.restype = OSStatus Security.SecPKCS12Import.argtypes = [ CFDataRef, CFDictionaryRef, - POINTER(CFArrayRef) + POINTER(CFArrayRef), ] Security.SecPKCS12Import.restype = OSStatus SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) - SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE( + OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) + ) - Security.SSLSetIOFuncs.argtypes = [ - SSLContextRef, - SSLReadFunc, - SSLWriteFunc - ] + Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] Security.SSLSetIOFuncs.restype = OSStatus - Security.SSLSetPeerID.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] + Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerID.restype = OSStatus - Security.SSLSetCertificate.argtypes = [ - SSLContextRef, - CFArrayRef - ] + Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] Security.SSLSetCertificate.restype = OSStatus - Security.SSLSetCertificateAuthorities.argtypes = [ - SSLContextRef, - CFTypeRef, - Boolean - ] + Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] Security.SSLSetCertificateAuthorities.restype = OSStatus - Security.SSLSetConnection.argtypes = [ - SSLContextRef, - SSLConnectionRef - ] + Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] Security.SSLSetConnection.restype = OSStatus - Security.SSLSetPeerDomainName.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] + Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerDomainName.restype = OSStatus - Security.SSLHandshake.argtypes = [ - SSLContextRef - ] + Security.SSLHandshake.argtypes = [SSLContextRef] Security.SSLHandshake.restype = OSStatus - Security.SSLRead.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] + Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLRead.restype = OSStatus - Security.SSLWrite.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] + Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLWrite.restype = OSStatus - Security.SSLClose.argtypes = [ - SSLContextRef - ] + Security.SSLClose.argtypes = [SSLContextRef] Security.SSLClose.restype = OSStatus - Security.SSLGetNumberSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(c_size_t) - ] + Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberSupportedCiphers.restype = OSStatus Security.SSLGetSupportedCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - POINTER(c_size_t) + POINTER(c_size_t), ] Security.SSLGetSupportedCiphers.restype = OSStatus Security.SSLSetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - c_size_t + c_size_t, ] Security.SSLSetEnabledCiphers.restype = OSStatus - Security.SSLGetNumberEnabledCiphers.argtype = [ - SSLContextRef, - POINTER(c_size_t) - ] + Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberEnabledCiphers.restype = OSStatus Security.SSLGetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - POINTER(c_size_t) + POINTER(c_size_t), ] Security.SSLGetEnabledCiphers.restype = OSStatus - Security.SSLGetNegotiatedCipher.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite) - ] + Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] Security.SSLGetNegotiatedCipher.restype = OSStatus Security.SSLGetNegotiatedProtocolVersion.argtypes = [ SSLContextRef, - POINTER(SSLProtocol) + POINTER(SSLProtocol), ] Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus - Security.SSLCopyPeerTrust.argtypes = [ - SSLContextRef, - POINTER(SecTrustRef) - ] + Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] Security.SSLCopyPeerTrust.restype = OSStatus - Security.SecTrustSetAnchorCertificates.argtypes = [ - SecTrustRef, - CFArrayRef - ] + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] Security.SecTrustSetAnchorCertificates.restype = OSStatus - Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ - SecTrustRef, - Boolean - ] + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus - Security.SecTrustEvaluate.argtypes = [ - SecTrustRef, - POINTER(SecTrustResultType) - ] + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] Security.SecTrustEvaluate.restype = OSStatus - Security.SecTrustGetCertificateCount.argtypes = [ - SecTrustRef - ] + Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] Security.SecTrustGetCertificateCount.restype = CFIndex - Security.SecTrustGetCertificateAtIndex.argtypes = [ - SecTrustRef, - CFIndex - ] + Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef Security.SSLCreateContext.argtypes = [ CFAllocatorRef, SSLProtocolSide, - SSLConnectionType + SSLConnectionType, ] Security.SSLCreateContext.restype = SSLContextRef - Security.SSLSetSessionOption.argtypes = [ - SSLContextRef, - SSLSessionOption, - Boolean - ] + Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] Security.SSLSetSessionOption.restype = OSStatus - Security.SSLSetProtocolVersionMin.argtypes = [ - SSLContextRef, - SSLProtocol - ] + Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMin.restype = OSStatus - Security.SSLSetProtocolVersionMax.argtypes = [ - SSLContextRef, - SSLProtocol - ] + Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMax.restype = OSStatus - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SSLReadFunc = SSLReadFunc @@ -369,64 +292,47 @@ try: Security.OSStatus = OSStatus Security.kSecImportExportPassphrase = CFStringRef.in_dll( - Security, 'kSecImportExportPassphrase' + Security, "kSecImportExportPassphrase" ) Security.kSecImportItemIdentity = CFStringRef.in_dll( - Security, 'kSecImportItemIdentity' + Security, "kSecImportItemIdentity" ) # CoreFoundation time! - CoreFoundation.CFRetain.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFRetain.argtypes = [CFTypeRef] CoreFoundation.CFRetain.restype = CFTypeRef - CoreFoundation.CFRelease.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFRelease.argtypes = [CFTypeRef] CoreFoundation.CFRelease.restype = None - CoreFoundation.CFGetTypeID.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] CoreFoundation.CFGetTypeID.restype = CFTypeID CoreFoundation.CFStringCreateWithCString.argtypes = [ CFAllocatorRef, c_char_p, - CFStringEncoding + CFStringEncoding, ] CoreFoundation.CFStringCreateWithCString.restype = CFStringRef - CoreFoundation.CFStringGetCStringPtr.argtypes = [ - CFStringRef, - CFStringEncoding - ] + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] CoreFoundation.CFStringGetCStringPtr.restype = c_char_p CoreFoundation.CFStringGetCString.argtypes = [ CFStringRef, c_char_p, CFIndex, - CFStringEncoding + CFStringEncoding, ] CoreFoundation.CFStringGetCString.restype = c_bool - CoreFoundation.CFDataCreate.argtypes = [ - CFAllocatorRef, - c_char_p, - CFIndex - ] + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] CoreFoundation.CFDataCreate.restype = CFDataRef - CoreFoundation.CFDataGetLength.argtypes = [ - CFDataRef - ] + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] CoreFoundation.CFDataGetLength.restype = CFIndex - CoreFoundation.CFDataGetBytePtr.argtypes = [ - CFDataRef - ] + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] CoreFoundation.CFDataGetBytePtr.restype = c_void_p CoreFoundation.CFDictionaryCreate.argtypes = [ @@ -435,14 +341,11 @@ try: POINTER(CFTypeRef), CFIndex, CFDictionaryKeyCallBacks, - CFDictionaryValueCallBacks + CFDictionaryValueCallBacks, ] CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef - CoreFoundation.CFDictionaryGetValue.argtypes = [ - CFDictionaryRef, - CFTypeRef - ] + CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef CoreFoundation.CFArrayCreate.argtypes = [ @@ -456,36 +359,30 @@ try: CoreFoundation.CFArrayCreateMutable.argtypes = [ CFAllocatorRef, CFIndex, - CFArrayCallBacks + CFArrayCallBacks, ] CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef - CoreFoundation.CFArrayAppendValue.argtypes = [ - CFMutableArrayRef, - c_void_p - ] + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] CoreFoundation.CFArrayAppendValue.restype = None - CoreFoundation.CFArrayGetCount.argtypes = [ - CFArrayRef - ] + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] CoreFoundation.CFArrayGetCount.restype = CFIndex - CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ - CFArrayRef, - CFIndex - ] + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( - CoreFoundation, 'kCFAllocatorDefault' + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeArrayCallBacks" ) - CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' + CoreFoundation, "kCFTypeDictionaryKeyCallBacks" ) CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryValueCallBacks' + CoreFoundation, "kCFTypeDictionaryValueCallBacks" ) CoreFoundation.CFTypeRef = CFTypeRef @@ -494,7 +391,7 @@ try: CoreFoundation.CFDictionaryRef = CFDictionaryRef except (AttributeError): - raise ImportError('Error initializing ctypes') + raise ImportError("Error initializing ctypes") class CFConst(object): @@ -502,6 +399,7 @@ class CFConst(object): A class object that acts as essentially a namespace for CoreFoundation constants. """ + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) @@ -509,6 +407,7 @@ class SecurityConst(object): """ A class object that acts as essentially a namespace for Security constants. """ + kSSLSessionOptionBreakOnServerAuth = 0 kSSLProtocol2 = 1 @@ -516,6 +415,9 @@ class SecurityConst(object): kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 kSSLClientSide = 1 kSSLStreamType = 0 @@ -558,30 +460,27 @@ class SecurityConst(object): errSecInvalidTrustSettings = -25262 # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D @@ -590,4 +489,5 @@ class SecurityConst(object): TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F TLS_AES_128_GCM_SHA256 = 0x1301 TLS_AES_256_GCM_SHA384 = 0x1302 - TLS_CHACHA20_POLY1305_SHA256 = 0x1303 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py index b13cd9e7..e60168ca 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py @@ -66,22 +66,18 @@ def _cf_string_to_unicode(value): value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) string = CoreFoundation.CFStringGetCStringPtr( - value_as_void_p, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, CFConst.kCFStringEncodingUTF8 ) if string is None: buffer = ctypes.create_string_buffer(1024) result = CoreFoundation.CFStringGetCString( - value_as_void_p, - buffer, - 1024, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 ) if not result: - raise OSError('Error copying C string from CFStringRef') + raise OSError("Error copying C string from CFStringRef") string = buffer.value if string is not None: - string = string.decode('utf-8') + string = string.decode("utf-8") return string @@ -97,8 +93,8 @@ def _assert_no_error(error, exception_class=None): output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) - if output is None or output == u'': - output = u'OSStatus %s' % error + if output is None or output == u"": + output = u"OSStatus %s" % error if exception_class is None: exception_class = ssl.SSLError @@ -115,8 +111,7 @@ def _cert_array_from_pem(pem_bundle): pem_bundle = pem_bundle.replace(b"\r\n", b"\n") der_certs = [ - base64.b64decode(match.group(1)) - for match in _PEM_CERTS_RE.finditer(pem_bundle) + base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") @@ -124,7 +119,7 @@ def _cert_array_from_pem(pem_bundle): cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") @@ -186,21 +181,16 @@ def _temporary_keychain(): # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) - filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + filename = base64.b16encode(random_bytes[:8]).decode("utf-8") password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() - keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') + keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") # We now want to create the keychain itself. keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( - keychain_path, - len(password), - password, - False, - None, - ctypes.byref(keychain) + keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) @@ -219,14 +209,12 @@ def _load_items_from_file(keychain, path): identities = [] result_array = None - with open(path, 'rb') as f: + with open(path, "rb") as f: raw_filedata = f.read() try: filedata = CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, - raw_filedata, - len(raw_filedata) + CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) ) result_array = CoreFoundation.CFArrayRef() result = Security.SecItemImport( @@ -237,7 +225,7 @@ def _load_items_from_file(keychain, path): 0, # import flags None, # key params, can include passphrase in the future keychain, # The keychain to insert into - ctypes.byref(result_array) # Results + ctypes.byref(result_array), # Results ) _assert_no_error(result) @@ -247,9 +235,7 @@ def _load_items_from_file(keychain, path): # keychain already has them! result_count = CoreFoundation.CFArrayGetCount(result_array) for index in range(result_count): - item = CoreFoundation.CFArrayGetValueAtIndex( - result_array, index - ) + item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) item = ctypes.cast(item, CoreFoundation.CFTypeRef) if _is_cert(item): @@ -307,9 +293,7 @@ def _load_client_cert_chain(keychain, *paths): try: for file_path in paths: - new_identities, new_certs = _load_items_from_file( - keychain, file_path - ) + new_identities, new_certs = _load_items_from_file(keychain, file_path) identities.extend(new_identities) certificates.extend(new_certs) @@ -318,9 +302,7 @@ def _load_client_cert_chain(keychain, *paths): if not identities: new_identity = Security.SecIdentityRef() status = Security.SecIdentityCreateWithCertificate( - keychain, - certificates[0], - ctypes.byref(new_identity) + keychain, certificates[0], ctypes.byref(new_identity) ) _assert_no_error(status) identities.append(new_identity) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py index 9b42952d..d09d2be6 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py @@ -50,7 +50,7 @@ from ..exceptions import ( MaxRetryError, ProtocolError, TimeoutError, - SSLError + SSLError, ) from ..request import RequestMethods @@ -96,23 +96,24 @@ class AppEngineManager(RequestMethods): Beyond those cases, it will raise normal urllib3 errors. """ - def __init__(self, headers=None, retries=None, validate_certificate=True, - urlfetch_retries=True): + def __init__( + self, + headers=None, + retries=None, + validate_certificate=True, + urlfetch_retries=True, + ): if not urlfetch: raise AppEnginePlatformError( - "URLFetch is not available in this environment.") - - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment.") + "URLFetch is not available in this environment." + ) warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate @@ -127,17 +128,22 @@ class AppEngineManager(RequestMethods): # Return False to re-raise any potential exceptions return False - def urlopen(self, method, url, body=None, headers=None, - retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, - **response_kw): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw + ): retries = self._get_retries(retries, redirect) try: - follow_redirects = ( - redirect and - retries.redirect != 0 and - retries.total) + follow_redirects = redirect and retries.redirect != 0 and retries.total response = urlfetch.fetch( url, payload=body, @@ -152,44 +158,52 @@ class AppEngineManager(RequestMethods): raise TimeoutError(self, e) except urlfetch.InvalidURLError as e: - if 'too large' in str(e): + if "too large" in str(e): raise AppEnginePlatformError( "URLFetch request too large, URLFetch only " - "supports requests up to 10mb in size.", e) + "supports requests up to 10mb in size.", + e, + ) raise ProtocolError(e) except urlfetch.DownloadError as e: - if 'Too many redirects' in str(e): + if "Too many redirects" in str(e): raise MaxRetryError(self, url, reason=e) raise ProtocolError(e) except urlfetch.ResponseTooLargeError as e: raise AppEnginePlatformError( "URLFetch response too large, URLFetch only supports" - "responses up to 32mb in size.", e) + "responses up to 32mb in size.", + e, + ) except urlfetch.SSLCertificateError as e: raise SSLError(e) except urlfetch.InvalidMethodError as e: raise AppEnginePlatformError( - "URLFetch does not support method: %s" % method, e) + "URLFetch does not support method: %s" % method, e + ) http_response = self._urlfetch_response_to_http_response( - response, retries=retries, **response_kw) + response, retries=retries, **response_kw + ) # Handle redirect? redirect_location = redirect and http_response.get_redirect_location() if redirect_location: # Check for redirect response - if (self.urlfetch_retries and retries.raise_on_redirect): + if self.urlfetch_retries and retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") else: if http_response.status == 303: - method = 'GET' + method = "GET" try: - retries = retries.increment(method, url, response=http_response, _pool=self) + retries = retries.increment( + method, url, response=http_response, _pool=self + ) except MaxRetryError: if retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") @@ -199,22 +213,32 @@ class AppEngineManager(RequestMethods): log.debug("Redirecting %s -> %s", url, redirect_location) redirect_url = urljoin(url, redirect_location) return self.urlopen( - method, redirect_url, body, headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + redirect_url, + body, + headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader('Retry-After')) + has_retry_after = bool(http_response.getheader("Retry-After")) if retries.is_retry(method, http_response.status, has_retry_after): - retries = retries.increment( - method, url, response=http_response, _pool=self) + retries = retries.increment(method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) retries.sleep(http_response) return self.urlopen( - method, url, - body=body, headers=headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + url, + body=body, + headers=headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) return http_response @@ -223,18 +247,18 @@ class AppEngineManager(RequestMethods): if is_prod_appengine(): # Production GAE handles deflate encoding automatically, but does # not remove the encoding header. - content_encoding = urlfetch_resp.headers.get('content-encoding') + content_encoding = urlfetch_resp.headers.get("content-encoding") - if content_encoding == 'deflate': - del urlfetch_resp.headers['content-encoding'] + if content_encoding == "deflate": + del urlfetch_resp.headers["content-encoding"] - transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + transfer_encoding = urlfetch_resp.headers.get("transfer-encoding") # We have a full response's content, # so let's make sure we don't report ourselves as chunked data. - if transfer_encoding == 'chunked': + if transfer_encoding == "chunked": encodings = transfer_encoding.split(",") - encodings.remove('chunked') - urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + encodings.remove("chunked") + urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings) original_response = HTTPResponse( # In order for decoding to work, we must present the content as @@ -262,20 +286,21 @@ class AppEngineManager(RequestMethods): warnings.warn( "URLFetch does not support granular timeout settings, " "reverting to total or default URLFetch timeout.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return timeout.total return timeout def _get_retries(self, retries, redirect): if not isinstance(retries, Retry): - retries = Retry.from_int( - retries, redirect=redirect, default=self.retries) + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if retries.connect or retries.read or retries.redirect: warnings.warn( "URLFetch only supports total retries and does not " "recognize connect, read, or redirect retry parameters.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return retries diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py index 8ea127c5..1fd242a6 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -20,7 +20,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): Implements an NTLM authentication version of an urllib3 connection pool """ - scheme = 'https' + scheme = "https" def __init__(self, user, pw, authurl, *args, **kwargs): """ @@ -31,7 +31,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user - user_parts = user.split('\\', 1) + user_parts = user.split("\\", 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw @@ -40,72 +40,82 @@ class NTLMConnectionPool(HTTPSConnectionPool): # Performs the NTLM handshake that secures the connection. The socket # must be kept open while requests are performed. self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', - self.num_connections, self.host, self.authurl) + log.debug( + "Starting NTLM HTTPS connection no. %d: https://%s%s", + self.num_connections, + self.host, + self.authurl, + ) - headers = {'Connection': 'Keep-Alive'} - req_header = 'Authorization' - resp_header = 'www-authenticate' + headers = {"Connection": "Keep-Alive"} + req_header = "Authorization" + resp_header = "www-authenticate" conn = HTTPSConnection(host=self.host, port=self.port) # Send negotiation message - headers[req_header] = ( - 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) + headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( + self.rawuser + ) + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) res = conn.getresponse() reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', reshdr) - log.debug('Response data: %s [...]', res.read(100)) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", reshdr) + log.debug("Response data: %s [...]", res.read(100)) # Remove the reference to the socket, so that it can not be closed by # the response object (we want to keep the socket open) res.fp = None # Server should respond with a challenge message - auth_header_values = reshdr[resp_header].split(', ') + auth_header_values = reshdr[resp_header].split(", ") auth_header_value = None for s in auth_header_values: - if s[:5] == 'NTLM ': + if s[:5] == "NTLM ": auth_header_value = s[5:] if auth_header_value is None: - raise Exception('Unexpected %s response header: %s' % - (resp_header, reshdr[resp_header])) + raise Exception( + "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) + ) # Send authentication message - ServerChallenge, NegotiateFlags = \ - ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) - auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, - self.user, - self.domain, - self.pw, - NegotiateFlags) - headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) + ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( + auth_header_value + ) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( + ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags + ) + headers[req_header] = "NTLM %s" % auth_msg + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) res = conn.getresponse() - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', dict(res.getheaders())) - log.debug('Response data: %s [...]', res.read()[:100]) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response data: %s [...]", res.read()[:100]) if res.status != 200: if res.status == 401: - raise Exception('Server rejected request: wrong ' - 'username or password') - raise Exception('Wrong server response: %s %s' % - (res.status, res.reason)) + raise Exception("Server rejected request: wrong username or password") + raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) res.fp = None - log.debug('Connection established') + log.debug("Connection established") return conn - def urlopen(self, method, url, body=None, headers=None, retries=3, - redirect=True, assert_same_host=True): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=3, + redirect=True, + assert_same_host=True, + ): if headers is None: headers = {} - headers['Connection'] = 'Keep-Alive' - return super(NTLMConnectionPool, self).urlopen(method, url, body, - headers, retries, - redirect, - assert_same_host) + headers["Connection"] = "Keep-Alive" + return super(NTLMConnectionPool, self).urlopen( + method, url, body, headers, retries, redirect, assert_same_host + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py index 363667cb..d8fe0629 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -47,6 +47,7 @@ import OpenSSL.SSL from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend from cryptography.hazmat.backends.openssl.x509 import _Certificate + try: from cryptography.x509 import UnsupportedExtension except ImportError: @@ -54,6 +55,7 @@ except ImportError: class UnsupportedExtension(Exception): pass + from socket import timeout, error as SocketError from io import BytesIO @@ -70,37 +72,35 @@ import sys from .. import util -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works. HAS_SNI = True # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): +if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"): + _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) +_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items()) # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 @@ -113,10 +113,11 @@ log = logging.getLogger(__name__) def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." _validate_dependencies_met() + util.SSLContext = PyOpenSSLContext util.ssl_.SSLContext = PyOpenSSLContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -125,8 +126,9 @@ def inject_into_urllib3(): def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' + "Undo monkey-patching by :func:`inject_into_urllib3`." + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -141,17 +143,23 @@ def _validate_dependencies_met(): """ # Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions + if getattr(Extensions, "get_extension_for_class", None) is None: - raise ImportError("'cryptography' module missing required functionality. " - "Try upgrading to v1.3.4 or newer.") + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 + x509 = X509() if getattr(x509, "_x509", None) is None: - raise ImportError("'pyOpenSSL' module missing required functionality. " - "Try upgrading to v0.14 or newer.") + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) def _dnsname_to_stdlib(name): @@ -167,6 +175,7 @@ def _dnsname_to_stdlib(name): If the name cannot be idna-encoded then we return None signalling that the name given should be skipped. """ + def idna_encode(name): """ Borrowed wholesale from the Python Cryptography Project. It turns out @@ -176,19 +185,23 @@ def _dnsname_to_stdlib(name): from pip._vendor import idna try: - for prefix in [u'*.', u'.']: + for prefix in [u"*.", u"."]: if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) return idna.encode(name) except idna.core.IDNAError: return None + # Don't send IPv6 addresses through the IDNA encoder. + if ":" in name: + return name + name = idna_encode(name) if name is None: return None elif sys.version_info >= (3, 0): - name = name.decode('utf-8') + name = name.decode("utf-8") return name @@ -207,14 +220,16 @@ def get_subj_alt_name(peer_cert): # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] - except (x509.DuplicateExtension, UnsupportedExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( @@ -233,23 +248,23 @@ def get_subj_alt_name(peer_cert): # does with certificates, and so we need to attempt to do the same. # We also want to skip over names which cannot be idna encoded. names = [ - ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) if name is not None ] names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. + """API-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. - ''' + """ def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection @@ -272,20 +287,24 @@ class WrappedSocket(object): try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' + return b"" else: raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) else: return data @@ -293,21 +312,25 @@ class WrappedSocket(object): try: return self.connection.recv_into(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): return 0 else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return 0 else: raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv_into(*args, **kwargs) + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + def settimeout(self, timeout): return self.socket.settimeout(timeout) @@ -325,7 +348,9 @@ class WrappedSocket(object): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) total_sent += sent def shutdown(self): @@ -349,17 +374,16 @@ class WrappedSocket(object): return x509 if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) + "subject": ((("commonName", x509.get_subject().CN),),), + "subjectAltName": get_subj_alt_name(x509), } + def version(self): + return self.connection.get_protocol_version_name() + def _reuse(self): self._makefile_refs += 1 @@ -371,9 +395,12 @@ class WrappedSocket(object): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 makefile = backport_makefile @@ -386,6 +413,7 @@ class PyOpenSSLContext(object): for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. """ + def __init__(self, protocol): self.protocol = _openssl_versions[protocol] self._ctx = OpenSSL.SSL.Context(self.protocol) @@ -407,41 +435,48 @@ class PyOpenSSLContext(object): @verify_mode.setter def verify_mode(self, value): - self._ctx.set_verify( - _stdlib_to_openssl_verify[value], - _verify_callback - ) + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) def set_default_verify_paths(self): self._ctx.set_default_verify_paths() def set_ciphers(self, ciphers): if isinstance(ciphers, six.text_type): - ciphers = ciphers.encode('utf-8') + ciphers = ciphers.encode("utf-8") self._ctx.set_cipher_list(ciphers) def load_verify_locations(self, cafile=None, capath=None, cadata=None): if cafile is not None: - cafile = cafile.encode('utf-8') + cafile = cafile.encode("utf-8") if capath is not None: - capath = capath.encode('utf-8') - self._ctx.load_verify_locations(cafile, capath) - if cadata is not None: - self._ctx.load_verify_locations(BytesIO(cadata)) + capath = capath.encode("utf-8") + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("unable to load trusted certificates: %r" % e) def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.use_certificate_chain_file(certfile) if password is not None: - self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) + if not isinstance(password, six.binary_type): + password = password.encode("utf-8") + self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): cnx = OpenSSL.SSL.Connection(self._ctx, sock) if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") if server_hostname is not None: cnx.set_tlsext_host_name(server_hostname) @@ -453,10 +488,10 @@ class PyOpenSSLContext(object): cnx.do_handshake() except OpenSSL.SSL.WantReadError: if not util.wait_for_read(sock, sock.gettimeout()): - raise timeout('select timed out') + raise timeout("select timed out") continue except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) + raise ssl.SSLError("bad handshake: %r" % e) break return WrappedSocket(cnx, sock) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py index 77cb59ed..a6b7e94a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py @@ -23,6 +23,31 @@ To use this module, simply import and inject it:: urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond <will@wbond.net> + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. """ from __future__ import absolute_import @@ -37,12 +62,12 @@ import threading import weakref from .. import util -from ._securetransport.bindings import ( - Security, SecurityConst, CoreFoundation -) +from ._securetransport.bindings import Security, SecurityConst, CoreFoundation from ._securetransport.low_level import ( - _assert_no_error, _cert_array_from_pem, _temporary_keychain, - _load_client_cert_chain + _assert_no_error, + _cert_array_from_pem, + _temporary_keychain, + _load_client_cert_chain, ) try: # Platform-specific: Python 2 @@ -51,7 +76,7 @@ except ImportError: # Platform-specific: Python 3 _fileobject = None from ..packages.backports.makefile import backport_makefile -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works HAS_SNI = True @@ -86,35 +111,32 @@ SSL_WRITE_BLOCKSIZE = 16384 # individual cipher suites. We need to do this because this is how # SecureTransport wants them. CIPHER_SUITES = [ - SecurityConst.TLS_AES_256_GCM_SHA384, - SecurityConst.TLS_CHACHA20_POLY1305_SHA256, - SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_AES_128_CCM_8_SHA256, + SecurityConst.TLS_AES_128_CCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, @@ -123,38 +145,43 @@ CIPHER_SUITES = [ # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { - ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12) } if hasattr(ssl, "PROTOCOL_SSLv2"): _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( - SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 + SecurityConst.kSSLProtocol2, + SecurityConst.kSSLProtocol2, ) if hasattr(ssl, "PROTOCOL_SSLv3"): _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( - SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 + SecurityConst.kSSLProtocol3, + SecurityConst.kSSLProtocol3, ) if hasattr(ssl, "PROTOCOL_TLSv1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( - SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 + SecurityConst.kTLSProtocol1, + SecurityConst.kTLSProtocol1, ) if hasattr(ssl, "PROTOCOL_TLSv1_1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( - SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 + SecurityConst.kTLSProtocol11, + SecurityConst.kTLSProtocol11, ) if hasattr(ssl, "PROTOCOL_TLSv1_2"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( - SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 + SecurityConst.kTLSProtocol12, + SecurityConst.kTLSProtocol12, ) -if hasattr(ssl, "PROTOCOL_TLS"): - _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] def inject_into_urllib3(): """ Monkey-patch urllib3 with SecureTransport-backed SSL-support. """ + util.SSLContext = SecureTransportContext util.ssl_.SSLContext = SecureTransportContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -166,6 +193,7 @@ def extract_from_urllib3(): """ Undo monkey-patching by :func:`inject_into_urllib3`. """ + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -195,7 +223,7 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): while read_count < requested_length: if timeout is None or timeout >= 0: if not util.wait_for_read(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") remaining = requested_length - read_count buffer = (ctypes.c_char * remaining).from_address( @@ -251,7 +279,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): while sent < bytes_to_write: if timeout is None or timeout >= 0: if not util.wait_for_write(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") chunk_sent = base_socket.send(data) sent += chunk_sent @@ -293,6 +321,7 @@ class WrappedSocket(object): Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. """ + def __init__(self, socket): self.socket = socket self.context = None @@ -357,7 +386,7 @@ class WrappedSocket(object): # We want data in memory, so load it up. if os.path.isfile(trust_bundle): - with open(trust_bundle, 'rb') as f: + with open(trust_bundle, "rb") as f: trust_bundle = f.read() cert_array = None @@ -371,9 +400,7 @@ class WrappedSocket(object): # created for this connection, shove our CAs into it, tell ST to # ignore everything else it knows, and then ask if it can build a # chain. This is a buuuunch of code. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: raise ssl.SSLError("Failed to copy trust reference") @@ -385,9 +412,7 @@ class WrappedSocket(object): _assert_no_error(result) trust_result = Security.SecTrustResultType() - result = Security.SecTrustEvaluate( - trust, ctypes.byref(trust_result) - ) + result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) _assert_no_error(result) finally: if trust: @@ -399,23 +424,24 @@ class WrappedSocket(object): # Ok, now we can look at what the result was. successes = ( SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed + SecurityConst.kSecTrustResultProceed, ) if trust_result.value not in successes: raise ssl.SSLError( - "certificate verify failed, error code: %d" % - trust_result.value + "certificate verify failed, error code: %d" % trust_result.value ) - def handshake(self, - server_hostname, - verify, - trust_bundle, - min_version, - max_version, - client_cert, - client_key, - client_key_passphrase): + def handshake( + self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase, + ): """ Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. @@ -445,7 +471,7 @@ class WrappedSocket(object): # If we have a server hostname, we should set that too. if server_hostname: if not isinstance(server_hostname, bytes): - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") result = Security.SSLSetPeerDomainName( self.context, server_hostname, len(server_hostname) @@ -458,6 +484,7 @@ class WrappedSocket(object): # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) + result = Security.SSLSetProtocolVersionMax(self.context, max_version) _assert_no_error(result) @@ -467,9 +494,7 @@ class WrappedSocket(object): # authing in that case. if not verify or trust_bundle is not None: result = Security.SSLSetSessionOption( - self.context, - SecurityConst.kSSLSessionOptionBreakOnServerAuth, - True + self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True ) _assert_no_error(result) @@ -479,9 +504,7 @@ class WrappedSocket(object): self._client_cert_chain = _load_client_cert_chain( self._keychain, client_cert, client_key ) - result = Security.SSLSetCertificate( - self.context, self._client_cert_chain - ) + result = Security.SSLSetCertificate(self.context, self._client_cert_chain) _assert_no_error(result) while True: @@ -532,7 +555,7 @@ class WrappedSocket(object): # There are some result codes that we want to treat as "not always # errors". Specifically, those are errSSLWouldBlock, # errSSLClosedGraceful, and errSSLClosedNoNotify. - if (result == SecurityConst.errSSLWouldBlock): + if result == SecurityConst.errSSLWouldBlock: # If we didn't process any bytes, then this was just a time out. # However, we can get errSSLWouldBlock in situations when we *did* # read some data, and in those cases we should just read "short" @@ -540,7 +563,10 @@ class WrappedSocket(object): if processed_bytes.value == 0: # Timed out, no data read. raise socket.timeout("recv timed out") - elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): + elif result in ( + SecurityConst.errSSLClosedGraceful, + SecurityConst.errSSLClosedNoNotify, + ): # The remote peer has closed this connection. We should do so as # well. Note that we don't actually return here because in # principle this could actually be fired along with return data. @@ -579,7 +605,7 @@ class WrappedSocket(object): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): @@ -626,18 +652,14 @@ class WrappedSocket(object): # instead to just flag to urllib3 that it shouldn't do its own hostname # validation when using SecureTransport. if not binary_form: - raise ValueError( - "SecureTransport only supports dumping binary certs" - ) + raise ValueError("SecureTransport only supports dumping binary certs") trust = Security.SecTrustRef() certdata = None der_bytes = None try: # Grab the trust store. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: # Probably we haven't done the handshake yet. No biggie. @@ -667,6 +689,27 @@ class WrappedSocket(object): return der_bytes + def version(self): + protocol = Security.SSLProtocol() + result = Security.SSLGetNegotiatedProtocolVersion( + self.context, ctypes.byref(protocol) + ) + _assert_no_error(result) + if protocol.value == SecurityConst.kTLSProtocol13: + raise ssl.SSLError("SecureTransport does not support TLS 1.3") + elif protocol.value == SecurityConst.kTLSProtocol12: + return "TLSv1.2" + elif protocol.value == SecurityConst.kTLSProtocol11: + return "TLSv1.1" + elif protocol.value == SecurityConst.kTLSProtocol1: + return "TLSv1" + elif protocol.value == SecurityConst.kSSLProtocol3: + return "SSLv3" + elif protocol.value == SecurityConst.kSSLProtocol2: + return "SSLv2" + else: + raise ssl.SSLError("Unknown TLS version: %r" % protocol) + def _reuse(self): self._makefile_refs += 1 @@ -678,16 +721,21 @@ class WrappedSocket(object): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 + def makefile(self, mode="r", buffering=None, *args, **kwargs): # We disable buffering with SecureTransport because it conflicts with # the buffering that ST does internally (see issue #1153 for more). buffering = 0 return backport_makefile(self, mode, buffering, *args, **kwargs) + WrappedSocket.makefile = makefile @@ -697,6 +745,7 @@ class SecureTransportContext(object): interface of the standard library ``SSLContext`` object to calls into SecureTransport. """ + def __init__(self, protocol): self._min_version, self._max_version = _protocol_to_min_max[protocol] self._options = 0 @@ -763,16 +812,17 @@ class SecureTransportContext(object): def set_ciphers(self, ciphers): # For now, we just require the default cipher string. if ciphers != util.ssl_.DEFAULT_CIPHERS: - raise ValueError( - "SecureTransport doesn't support custom cipher strings" - ) + raise ValueError("SecureTransport doesn't support custom cipher strings") def load_verify_locations(self, cafile=None, capath=None, cadata=None): # OK, we only really support cadata and cafile. if capath is not None: - raise ValueError( - "SecureTransport does not support cert directories" - ) + raise ValueError("SecureTransport does not support cert directories") + + # Raise if cafile does not exist. + if cafile is not None: + with open(cafile): + pass self._trust_bundle = cafile or cadata @@ -781,9 +831,14 @@ class SecureTransportContext(object): self._client_key = keyfile self._client_cert_passphrase = password - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. @@ -797,8 +852,13 @@ class SecureTransportContext(object): # Now we can handshake wrapped_socket.handshake( - server_hostname, self._verify, self._trust_bundle, - self._min_version, self._max_version, self._client_cert, - self._client_key, self._client_key_passphrase + server_hostname, + self._verify, + self._trust_bundle, + self._min_version, + self._max_version, + self._client_cert, + self._client_key, + self._client_key_passphrase, ) return wrapped_socket diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py index 811e312e..9e97f7aa 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py @@ -1,25 +1,38 @@ # -*- coding: utf-8 -*- """ This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: -- SOCKS4 -- SOCKS4a -- SOCKS5 +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy -Known Limitations: + .. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request:: + + proxy_url="socks4a://<userid>@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy:: + + proxy_url="socks5h://<username>:<password>@proxy-host" -- Currently PySocks does not support contacting remote websites via literal - IPv6 addresses. Any such connection attempt will fail. You must use a domain - name. -- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any - such connection attempt will fail. """ from __future__ import absolute_import @@ -29,23 +42,20 @@ except ImportError: import warnings from ..exceptions import DependencyWarning - warnings.warn(( - 'SOCKS support in urllib3 requires the installation of optional ' - 'dependencies: specifically, PySocks. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies" ), - DependencyWarning + DependencyWarning, ) raise from socket import error as SocketError, timeout as SocketTimeout -from ..connection import ( - HTTPConnection, HTTPSConnection -) -from ..connectionpool import ( - HTTPConnectionPool, HTTPSConnectionPool -) +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool from ..exceptions import ConnectTimeoutError, NewConnectionError from ..poolmanager import PoolManager from ..util.url import parse_url @@ -60,8 +70,9 @@ class SOCKSConnection(HTTPConnection): """ A plain-text HTTP connection that connects via a SOCKS proxy. """ + def __init__(self, *args, **kwargs): - self._socks_options = kwargs.pop('_socks_options') + self._socks_options = kwargs.pop("_socks_options") super(SOCKSConnection, self).__init__(*args, **kwargs) def _new_conn(self): @@ -70,28 +81,30 @@ class SOCKSConnection(HTTPConnection): """ extra_kw = {} if self.source_address: - extra_kw['source_address'] = self.source_address + extra_kw["source_address"] = self.source_address if self.socket_options: - extra_kw['socket_options'] = self.socket_options + extra_kw["socket_options"] = self.socket_options try: conn = socks.create_connection( (self.host, self.port), - proxy_type=self._socks_options['socks_version'], - proxy_addr=self._socks_options['proxy_host'], - proxy_port=self._socks_options['proxy_port'], - proxy_username=self._socks_options['username'], - proxy_password=self._socks_options['password'], - proxy_rdns=self._socks_options['rdns'], + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], timeout=self.timeout, **extra_kw ) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) except socks.ProxyError as e: # This is fragile as hell, but it seems to be the only way to raise @@ -101,23 +114,22 @@ class SOCKSConnection(HTTPConnection): if isinstance(error, SocketTimeout): raise ConnectTimeoutError( self, - "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout) + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), ) else: raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % error + self, "Failed to establish a new connection: %s" % error ) else: raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % e + self, "Failed to establish a new connection: %s" % e ) except SocketError as e: # Defensive: PySocks should catch all these. raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) + self, "Failed to establish a new connection: %s" % e + ) return conn @@ -143,47 +155,53 @@ class SOCKSProxyManager(PoolManager): A version of the urllib3 ProxyManager that routes connections via the defined SOCKS proxy. """ + pool_classes_by_scheme = { - 'http': SOCKSHTTPConnectionPool, - 'https': SOCKSHTTPSConnectionPool, + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, } - def __init__(self, proxy_url, username=None, password=None, - num_pools=10, headers=None, **connection_pool_kw): + def __init__( + self, + proxy_url, + username=None, + password=None, + num_pools=10, + headers=None, + **connection_pool_kw + ): parsed = parse_url(proxy_url) if username is None and password is None and parsed.auth is not None: - split = parsed.auth.split(':') + split = parsed.auth.split(":") if len(split) == 2: username, password = split - if parsed.scheme == 'socks5': + if parsed.scheme == "socks5": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = False - elif parsed.scheme == 'socks5h': + elif parsed.scheme == "socks5h": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = True - elif parsed.scheme == 'socks4': + elif parsed.scheme == "socks4": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = False - elif parsed.scheme == 'socks4a': + elif parsed.scheme == "socks4a": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = True else: - raise ValueError( - "Unable to determine SOCKS version from %s" % proxy_url - ) + raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) self.proxy_url = proxy_url socks_options = { - 'socks_version': socks_version, - 'proxy_host': parsed.host, - 'proxy_port': parsed.port, - 'username': username, - 'password': password, - 'rdns': rdns + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, } - connection_pool_kw['_socks_options'] = socks_options + connection_pool_kw["_socks_options"] = socks_options super(SOCKSProxyManager, self).__init__( num_pools, headers, **connection_pool_kw diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/exceptions.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/exceptions.py index 7bbaa987..5cc4d8a4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/exceptions.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/exceptions.py @@ -1,7 +1,6 @@ from __future__ import absolute_import -from .packages.six.moves.http_client import ( - IncompleteRead as httplib_IncompleteRead -) +from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead + # Base Exceptions @@ -17,6 +16,7 @@ class HTTPWarning(Warning): class PoolError(HTTPError): "Base exception for errors caused within a pool." + def __init__(self, pool, message): self.pool = pool HTTPError.__init__(self, "%s: %s" % (pool, message)) @@ -28,6 +28,7 @@ class PoolError(HTTPError): class RequestError(PoolError): "Base exception for PoolErrors that have associated URLs." + def __init__(self, pool, url, message): self.url = url PoolError.__init__(self, pool, message) @@ -44,7 +45,10 @@ class SSLError(HTTPError): class ProxyError(HTTPError): "Raised when the connection to a proxy fails." - pass + + def __init__(self, message, error, *args): + super(ProxyError, self).__init__(message, error, *args) + self.original_error = error class DecodeError(HTTPError): @@ -63,6 +67,7 @@ ConnectionError = ProtocolError # Leaf Exceptions + class MaxRetryError(RequestError): """Raised when the maximum number of retries is exceeded. @@ -76,8 +81,7 @@ class MaxRetryError(RequestError): def __init__(self, pool, url, reason=None): self.reason = reason - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) + message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) RequestError.__init__(self, pool, url, message) @@ -93,6 +97,7 @@ class HostChangedError(RequestError): class TimeoutStateError(HTTPError): """ Raised when passing an invalid state to a timeout """ + pass @@ -102,6 +107,7 @@ class TimeoutError(HTTPError): Catching this error will catch both :exc:`ReadTimeoutErrors <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. """ + pass @@ -149,8 +155,8 @@ class LocationParseError(LocationValueError): class ResponseError(HTTPError): "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" class SecurityWarning(HTTPWarning): @@ -188,6 +194,21 @@ class DependencyWarning(HTTPWarning): Warned when an attempt is made to import a module with missing optional dependencies. """ + + pass + + +class InvalidProxyConfigurationWarning(HTTPWarning): + """ + Warned when using an HTTPS proxy and an HTTPS URL. Currently + urllib3 doesn't support HTTPS proxies and the proxy will be + contacted via HTTP instead. This warning can be fixed by + changing your HTTPS proxy URL into an HTTP proxy URL. + + If you encounter this warning read this: + https://github.com/urllib3/urllib3/issues/1850 + """ + pass @@ -201,6 +222,7 @@ class BodyNotHttplibCompatible(HTTPError): Body should be httplib.HTTPResponse like (have an fp attribute which returns raw chunks) for read_chunked(). """ + pass @@ -212,12 +234,15 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead): for `partial` to avoid creating large objects on streamed reads. """ + def __init__(self, partial, expected): super(IncompleteRead, self).__init__(partial, expected) def __repr__(self): - return ('IncompleteRead(%i bytes read, ' - '%i more expected)' % (self.partial, self.expected)) + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) class InvalidHeader(HTTPError): @@ -236,8 +261,9 @@ class ProxySchemeUnknown(AssertionError, ValueError): class HeaderParsingError(HTTPError): "Raised by assert_header_parsing, but we convert it to a log.warning statement." + def __init__(self, defects, unparsed_data): - message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) + message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) super(HeaderParsingError, self).__init__(message) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/fields.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/fields.py index 37fe64a3..8715b220 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/fields.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/fields.py @@ -1,11 +1,12 @@ from __future__ import absolute_import import email.utils import mimetypes +import re from .packages import six -def guess_content_type(filename, default='application/octet-stream'): +def guess_content_type(filename, default="application/octet-stream"): """ Guess the "Content-Type" of a file. @@ -19,57 +20,143 @@ def guess_content_type(filename, default='application/octet-stream'): return default -def format_header_param(name, value): +def format_header_param_rfc2231(name, value): """ - Helper function to format and quote a single header parameter. + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2231, as - suggested by RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: - The value of the parameter, provided as a unicode string. + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + An RFC-2231-formatted unicode string. """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + if not any(ch in value for ch in '"\\\r\n'): - result = '%s="%s"' % (name, value) + result = u'%s="%s"' % (name, value) try: - result.encode('ascii') + result.encode("ascii") except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result - if not six.PY3 and isinstance(value, six.text_type): # Python 2: - value = value.encode('utf-8') - value = email.utils.encode_rfc2231(value, 'utf-8') - value = '%s*=%s' % (name, value) + + if six.PY2: # Python 2: + value = value.encode("utf-8") + + # encode_rfc2231 accepts an encoded string and returns an ascii-encoded + # string in Python 2 but accepts and returns unicode strings in Python 3 + value = email.utils.encode_rfc2231(value, "utf-8") + value = "%s*=%s" % (name, value) + + if six.PY2: # Python 2: + value = value.decode("utf-8") + return value +_HTML5_REPLACEMENTS = { + u"\u0022": u"%22", + # Replace "\" with "\\". + u"\u005C": u"\u005C\u005C", + u"\u005C": u"\u005C\u005C", +} + +# All control characters from 0x00 to 0x1F *except* 0x1B. +_HTML5_REPLACEMENTS.update( + { + six.unichr(cc): u"%{:02X}".format(cc) + for cc in range(0x00, 0x1F + 1) + if cc not in (0x1B,) + } +) + + +def _replace_multiple(value, needles_and_replacements): + def replacer(match): + return needles_and_replacements[match.group(0)] + + pattern = re.compile( + r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()]) + ) + + result = pattern.sub(replacer, value) + + return result + + +def format_header_param_html5(name, value): + """ + Helper function to format and quote a single header parameter using the + HTML5 strategy. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows the `HTML5 Working Draft + Section 4.10.22.7`_ and matches the behavior of curl and modern browsers. + + .. _HTML5 Working Draft Section 4.10.22.7: + https://w3c.github.io/html/sec-forms.html#multipart-form-data + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + A unicode string, stripped of troublesome characters. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + value = _replace_multiple(value, _HTML5_REPLACEMENTS) + + return u'%s="%s"' % (name, value) + + +# For backwards-compatibility. +format_header_param = format_header_param_html5 + + class RequestField(object): """ A data container for request body parameters. :param name: - The name of this request field. + The name of this request field. Must be unicode. :param data: The data/value body. :param filename: - An optional filename of the request field. + An optional filename of the request field. Must be unicode. :param headers: An optional dict-like object of headers to initially use for the field. + :param header_formatter: + An optional callable that is used to encode and format the headers. By + default, this is :func:`format_header_param_html5`. """ - def __init__(self, name, data, filename=None, headers=None): + + def __init__( + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5, + ): self._name = name self._filename = filename self.data = data self.headers = {} if headers: self.headers = dict(headers) + self.header_formatter = header_formatter @classmethod - def from_tuples(cls, fieldname, value): + def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. @@ -97,21 +184,25 @@ class RequestField(object): content_type = None data = value - request_param = cls(fieldname, data, filename=filename) + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name, value): """ - Overridable helper function to format a single header parameter. + Overridable helper function to format a single header parameter. By + default, this calls ``self.header_formatter``. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ - return format_header_param(name, value) + + return self.header_formatter(name, value) def _render_parts(self, header_parts): """ @@ -133,7 +224,7 @@ class RequestField(object): if value is not None: parts.append(self._render_part(name, value)) - return '; '.join(parts) + return u"; ".join(parts) def render_headers(self): """ @@ -141,21 +232,22 @@ class RequestField(object): """ lines = [] - sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] for sort_key in sort_keys: if self.headers.get(sort_key, False): - lines.append('%s: %s' % (sort_key, self.headers[sort_key])) + lines.append(u"%s: %s" % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: - lines.append('%s: %s' % (header_name, header_value)) + lines.append(u"%s: %s" % (header_name, header_value)) - lines.append('\r\n') - return '\r\n'.join(lines) + lines.append(u"\r\n") + return u"\r\n".join(lines) - def make_multipart(self, content_disposition=None, content_type=None, - content_location=None): + def make_multipart( + self, content_disposition=None, content_type=None, content_location=None + ): """ Makes this request field into a multipart request field. @@ -168,11 +260,14 @@ class RequestField(object): The 'Content-Location' of the request body. """ - self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join([ - '', self._render_parts( - (('name', self._name), ('filename', self._filename)) - ) - ]) - self.headers['Content-Type'] = content_type - self.headers['Content-Location'] = content_location + self.headers["Content-Disposition"] = content_disposition or u"form-data" + self.headers["Content-Disposition"] += u"; ".join( + [ + u"", + self._render_parts( + ((u"name", self._name), (u"filename", self._filename)) + ), + ] + ) + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/filepost.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/filepost.py index 78f1e19b..b7b00992 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/filepost.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/filepost.py @@ -9,7 +9,7 @@ from .packages import six from .packages.six import b from .fields import RequestField -writer = codecs.lookup('utf-8')[3] +writer = codecs.lookup("utf-8")[3] def choose_boundary(): @@ -17,8 +17,8 @@ def choose_boundary(): Our embarrassingly-simple replacement for mimetools.choose_boundary. """ boundary = binascii.hexlify(os.urandom(16)) - if six.PY3: - boundary = boundary.decode('ascii') + if not six.PY2: + boundary = boundary.decode("ascii") return boundary @@ -76,7 +76,7 @@ def encode_multipart_formdata(fields, boundary=None): boundary = choose_boundary() for field in iter_field_objects(fields): - body.write(b('--%s\r\n' % (boundary))) + body.write(b("--%s\r\n" % (boundary))) writer(body).write(field.render_headers()) data = field.data @@ -89,10 +89,10 @@ def encode_multipart_formdata(fields, boundary=None): else: body.write(data) - body.write(b'\r\n') + body.write(b"\r\n") - body.write(b('--%s--\r\n' % (boundary))) + body.write(b("--%s--\r\n" % (boundary))) - content_type = str('multipart/form-data; boundary=%s' % boundary) + content_type = str("multipart/form-data; boundary=%s" % boundary) return body.getvalue(), content_type diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py index 170e974c..fce4caa6 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py @@ -2,4 +2,4 @@ from __future__ import absolute_import from . import ssl_match_hostname -__all__ = ('ssl_match_hostname', ) +__all__ = ("ssl_match_hostname",) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py index 740db377..a3156a69 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py @@ -11,15 +11,14 @@ import io from socket import SocketIO -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): +def backport_makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None +): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= {"r", "w", "b"}: - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/six.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/six.py index 190c0239..31442409 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/six.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/six.py @@ -1,6 +1,4 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson +# Copyright (c) 2010-2019 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,6 +18,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Utilities for writing code that runs on Python 2 and 3""" + from __future__ import absolute_import import functools @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.10.0" +__version__ = "1.12.0" # Useful for very coarse version differentiation. @@ -38,15 +38,15 @@ PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: - string_types = str, - integer_types = int, - class_types = type, + string_types = (str,) + integer_types = (int,) + class_types = (type,) text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: - string_types = basestring, + string_types = (basestring,) integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode @@ -58,9 +58,9 @@ else: else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): - def __len__(self): return 1 << 31 + try: len(X()) except OverflowError: @@ -84,7 +84,6 @@ def _import_module(name): class _LazyDescr(object): - def __init__(self, name): self.name = name @@ -101,7 +100,6 @@ class _LazyDescr(object): class MovedModule(_LazyDescr): - def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: @@ -122,7 +120,6 @@ class MovedModule(_LazyDescr): class _LazyModule(types.ModuleType): - def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ @@ -137,7 +134,6 @@ class _LazyModule(types.ModuleType): class MovedAttribute(_LazyDescr): - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: @@ -221,28 +217,36 @@ class _SixMetaPathImporter(object): Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None + get_source = get_code # same as get_code + _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" + __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute( + "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" + ), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), @@ -251,7 +255,9 @@ _moved_attributes = [ MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), @@ -262,10 +268,13 @@ _moved_attributes = [ MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule( + "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" + ), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), @@ -283,15 +292,12 @@ _moved_attributes = [ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), @@ -301,9 +307,7 @@ _moved_attributes = [ ] # Add windows specific modules. if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] + _moved_attributes += [MovedModule("winreg", "_winreg")] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) @@ -337,10 +341,14 @@ _urllib_parse_moved_attributes = [ MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute( + "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes" + ), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), @@ -353,8 +361,11 @@ del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") +_importer._add_module( + Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) class Module_six_moves_urllib_error(_LazyModule): @@ -373,8 +384,11 @@ del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") +_importer._add_module( + Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) class Module_six_moves_urllib_request(_LazyModule): @@ -416,6 +430,8 @@ _urllib_request_moved_attributes = [ MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) @@ -423,8 +439,11 @@ del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") +_importer._add_module( + Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) class Module_six_moves_urllib_response(_LazyModule): @@ -444,8 +463,11 @@ del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") +_importer._add_module( + Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) class Module_six_moves_urllib_robotparser(_LazyModule): @@ -454,21 +476,27 @@ class Module_six_moves_urllib_robotparser(_LazyModule): _urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser") ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes +Module_six_moves_urllib_robotparser._moved_attributes = ( + _urllib_robotparser_moved_attributes +) -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") +_importer._add_module( + Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") @@ -477,10 +505,12 @@ class Module_six_moves_urllib(types.ModuleType): robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] + return ["parse", "error", "request", "response", "robotparser"] + -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") +_importer._add_module( + Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib" +) def add_move(move): @@ -520,19 +550,24 @@ else: try: advance_iterator = next except NameError: + def advance_iterator(it): return it.next() + + next = advance_iterator try: callable = callable except NameError: + def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: + def get_unbound_function(unbound): return unbound @@ -543,6 +578,7 @@ if PY3: Iterator = object else: + def get_unbound_function(unbound): return unbound.im_func @@ -553,13 +589,13 @@ else: return types.MethodType(func, None, cls) class Iterator(object): - def next(self): return type(self).__next__(self) callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") +_add_doc( + get_unbound_function, """Get the function out of a possibly unbound function""" +) get_method_function = operator.attrgetter(_meth_func) @@ -571,6 +607,7 @@ get_function_globals = operator.attrgetter(_func_globals) if PY3: + def iterkeys(d, **kw): return iter(d.keys(**kw)) @@ -589,6 +626,7 @@ if PY3: viewitems = operator.methodcaller("items") else: + def iterkeys(d, **kw): return d.iterkeys(**kw) @@ -609,28 +647,33 @@ else: _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc( + iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary." +) if PY3: + def b(s): return s.encode("latin-1") def u(s): return s + unichr = chr import struct + int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io + StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" @@ -639,12 +682,15 @@ if PY3: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: + def b(s): return s + # Workaround for standalone backslash def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + unichr = unichr int2byte = chr @@ -653,8 +699,10 @@ else: def indexbytes(buf, i): return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) import StringIO + StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" @@ -679,13 +727,19 @@ if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + else: + def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: @@ -698,28 +752,45 @@ else: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") + exec_( + """def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""" + ) if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") + exec_( + """def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""" + ) elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") + exec_( + """def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""" + ) else: + def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: + def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) @@ -730,14 +801,17 @@ if print_ is None: if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): + if ( + isinstance(fp, file) + and isinstance(data, unicode) + and fp.encoding is not None + ): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) + want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: @@ -773,6 +847,8 @@ if print_ is None: write(sep) write(arg) write(end) + + if sys.version_info[:2] < (3, 3): _print = print_ @@ -783,16 +859,24 @@ if sys.version_info[:2] < (3, 3): if flush and fp is not None: fp.flush() + _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): + + def wraps( + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f + return wrapper + + else: wraps = functools.wraps @@ -802,29 +886,95 @@ def with_metaclass(meta, *bases): # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): - + class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') + slots = orig_vars.get("__slots__") if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper +def ensure_binary(s, encoding="utf-8", errors="strict"): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. @@ -834,12 +984,13 @@ def python_2_unicode_compatible(klass): returning text and apply this decorator to the class. """ if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) + if "__str__" not in klass.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % klass.__name__ + ) klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") return klass @@ -859,8 +1010,10 @@ if sys.meta_path: # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): + if ( + type(importer).__name__ == "_SixMetaPathImporter" + and importer.name == __name__ + ): del sys.meta_path[i] break del i, importer diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py index d6594eb2..75b6bb1c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py @@ -16,4 +16,4 @@ except ImportError: from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. -__all__ = ('CertificateError', 'match_hostname') +__all__ = ("CertificateError", "match_hostname") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 970cf653..5831c2e0 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -15,7 +15,7 @@ try: except ImportError: ipaddress = None -__version__ = '3.5.0.1' +__version__ = "3.5.0.1" class CertificateError(ValueError): @@ -33,18 +33,19 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # Ported from python3-syntax: # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') + parts = dn.split(r".") leftmost = parts[0] remainder = parts[1:] - wildcards = leftmost.count('*') + wildcards = leftmost.count("*") if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) + "too many wildcards in certificate DNS name: " + repr(dn) + ) # speed up common case w/o wildcards if not wildcards: @@ -53,11 +54,11 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': + if leftmost == "*": # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or @@ -65,21 +66,22 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) return pat.match(hostname) def _to_unicode(obj): if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding='ascii', errors='strict') + obj = unicode(obj, encoding="ascii", errors="strict") return obj + def _ipaddress_match(ipname, host_ip): """Exact matching of IP addresses. @@ -101,9 +103,11 @@ def match_hostname(cert, hostname): returns nothing. """ if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) @@ -122,35 +126,35 @@ def match_hostname(cert, hostname): else: raise dnsnames = [] - san = cert.get('subjectAltName', ()) + san = cert.get("subjectAltName", ()) for key, value in san: - if key == 'DNS': + if key == "DNS": if host_ip is None and _dnsname_match(value, hostname): return dnsnames.append(value) - elif key == 'IP Address': + elif key == "IP Address": if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) if not dnsnames: # The subject is only checked when there is no dNSName entry # in subjectAltName - for sub in cert.get('subject', ()): + for sub in cert.get("subject", ()): for key, value in sub: # XXX according to RFC 2818, the most specific Common Name # must be used. - if key == 'commonName': + if key == "commonName": if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") + raise CertificateError( + "no appropriate commonName or subjectAltName fields were found" + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py index fe5491cf..e2bd3bd8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py @@ -2,57 +2,73 @@ from __future__ import absolute_import import collections import functools import logging +import warnings from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + InvalidProxyConfigurationWarning, +) +from .packages import six from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry -__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] log = logging.getLogger(__name__) -SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context') +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ssl_version", + "ca_cert_dir", + "ssl_context", + "key_password", +) # All known keyword arguments that could be provided to the pool manager, its # pools, or the underlying connections. This is used to construct a pool key. _key_fields = ( - 'key_scheme', # str - 'key_host', # str - 'key_port', # int - 'key_timeout', # int or float or Timeout - 'key_retries', # int or Retry - 'key_strict', # bool - 'key_block', # bool - 'key_source_address', # str - 'key_key_file', # str - 'key_cert_file', # str - 'key_cert_reqs', # str - 'key_ca_certs', # str - 'key_ssl_version', # str - 'key_ca_cert_dir', # str - 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext - 'key_maxsize', # int - 'key_headers', # dict - 'key__proxy', # parsed proxy url - 'key__proxy_headers', # dict - 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples - 'key__socks_options', # dict - 'key_assert_hostname', # bool or string - 'key_assert_fingerprint', # str - 'key_server_hostname', #str + "key_scheme", # str + "key_host", # str + "key_port", # int + "key_timeout", # int or float or Timeout + "key_retries", # int or Retry + "key_strict", # bool + "key_block", # bool + "key_source_address", # str + "key_key_file", # str + "key_key_password", # str + "key_cert_file", # str + "key_cert_reqs", # str + "key_ca_certs", # str + "key_ssl_version", # str + "key_ca_cert_dir", # str + "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + "key_maxsize", # int + "key_headers", # dict + "key__proxy", # parsed proxy url + "key__proxy_headers", # dict + "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples + "key__socks_options", # dict + "key_assert_hostname", # bool or string + "key_assert_fingerprint", # str + "key_server_hostname", # str ) #: The namedtuple class used to construct keys for the connection pool. #: All custom key schemes should include the fields in this key at a minimum. -PoolKey = collections.namedtuple('PoolKey', _key_fields) +PoolKey = collections.namedtuple("PoolKey", _key_fields) def _default_key_normalizer(key_class, request_context): @@ -77,24 +93,24 @@ def _default_key_normalizer(key_class, request_context): """ # Since we mutate the dictionary, make a copy first context = request_context.copy() - context['scheme'] = context['scheme'].lower() - context['host'] = context['host'].lower() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() # These are both dictionaries and need to be transformed into frozensets - for key in ('headers', '_proxy_headers', '_socks_options'): + for key in ("headers", "_proxy_headers", "_socks_options"): if key in context and context[key] is not None: context[key] = frozenset(context[key].items()) # The socket_options key may be a list and needs to be transformed into a # tuple. - socket_opts = context.get('socket_options') + socket_opts = context.get("socket_options") if socket_opts is not None: - context['socket_options'] = tuple(socket_opts) + context["socket_options"] = tuple(socket_opts) # Map the kwargs to the names in the namedtuple - this is necessary since # namedtuples can't have fields starting with '_'. for key in list(context.keys()): - context['key_' + key] = context.pop(key) + context["key_" + key] = context.pop(key) # Default to ``None`` for keys missing from the context for field in key_class._fields: @@ -109,14 +125,11 @@ def _default_key_normalizer(key_class, request_context): #: Each PoolManager makes a copy of this dictionary so they can be configured #: globally here, or individually on the instance. key_fn_by_scheme = { - 'http': functools.partial(_default_key_normalizer, PoolKey), - 'https': functools.partial(_default_key_normalizer, PoolKey), + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), } -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} class PoolManager(RequestMethods): @@ -152,8 +165,7 @@ class PoolManager(RequestMethods): def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, - dispose_func=lambda p: p.close()) + self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) # Locally set the pool classes and keys so other PoolManagers can # override them. @@ -186,10 +198,10 @@ class PoolManager(RequestMethods): # this function has historically only used the scheme, host, and port # in the positional args. When an API change is acceptable these can # be removed. - for key in ('scheme', 'host', 'port'): + for key in ("scheme", "host", "port"): request_context.pop(key, None) - if scheme == 'http': + if scheme == "http": for kw in SSL_KEYWORDS: request_context.pop(kw, None) @@ -204,7 +216,7 @@ class PoolManager(RequestMethods): """ self.pools.clear() - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. @@ -219,11 +231,11 @@ class PoolManager(RequestMethods): raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) - request_context['scheme'] = scheme or 'http' + request_context["scheme"] = scheme or "http" if not port: - port = port_by_scheme.get(request_context['scheme'].lower(), 80) - request_context['port'] = port - request_context['host'] = host + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host return self.connection_from_context(request_context) @@ -234,7 +246,7 @@ class PoolManager(RequestMethods): ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ - scheme = request_context['scheme'].lower() + scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme[scheme] pool_key = pool_key_constructor(request_context) @@ -256,9 +268,9 @@ class PoolManager(RequestMethods): return pool # Make a fresh ConnectionPool of the desired type - scheme = request_context['scheme'] - host = request_context['host'] - port = request_context['port'] + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool @@ -276,8 +288,9 @@ class PoolManager(RequestMethods): not used. """ u = parse_url(url) - return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, - pool_kwargs=pool_kwargs) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) def _merge_pool_kwargs(self, override): """ @@ -311,11 +324,11 @@ class PoolManager(RequestMethods): u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) - kw['assert_same_host'] = False - kw['redirect'] = False + kw["assert_same_host"] = False + kw["redirect"] = False - if 'headers' not in kw: - kw['headers'] = self.headers.copy() + if "headers" not in kw: + kw["headers"] = self.headers.copy() if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) @@ -331,31 +344,37 @@ class PoolManager(RequestMethods): # RFC 7231, Section 6.4.4 if response.status == 303: - method = 'GET' + method = "GET" - retries = kw.get('retries') + retries = kw.get("retries") if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) # Strip headers marked as unsafe to forward to the redirected location. # Check remove_headers_on_redirect to avoid a potential network call within # conn.is_same_host() which may use socket.gethostbyname() in the future. - if (retries.remove_headers_on_redirect - and not conn.is_same_host(redirect_location)): - for header in retries.remove_headers_on_redirect: - kw['headers'].pop(header, None) + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + headers = list(six.iterkeys(kw["headers"])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw["headers"].pop(header, None) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: + response.drain_conn() raise return response - kw['retries'] = retries - kw['redirect'] = redirect + kw["retries"] = retries + kw["redirect"] = redirect log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() return self.urlopen(method, redirect_location, **kw) @@ -386,12 +405,21 @@ class ProxyManager(PoolManager): """ - def __init__(self, proxy_url, num_pools=10, headers=None, - proxy_headers=None, **connection_pool_kw): + def __init__( + self, + proxy_url, + num_pools=10, + headers=None, + proxy_headers=None, + **connection_pool_kw + ): if isinstance(proxy_url, HTTPConnectionPool): - proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, - proxy_url.port) + proxy_url = "%s://%s:%i" % ( + proxy_url.scheme, + proxy_url.host, + proxy_url.port, + ) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) @@ -403,45 +431,59 @@ class ProxyManager(PoolManager): self.proxy = proxy self.proxy_headers = proxy_headers or {} - connection_pool_kw['_proxy'] = self.proxy - connection_pool_kw['_proxy_headers'] = self.proxy_headers + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers - super(ProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw) + super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): if scheme == "https": return super(ProxyManager, self).connection_from_host( - host, port, scheme, pool_kwargs=pool_kwargs) + host, port, scheme, pool_kwargs=pool_kwargs + ) return super(ProxyManager, self).connection_from_host( - self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs + ) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ - headers_ = {'Accept': '*/*'} + headers_ = {"Accept": "*/*"} netloc = parse_url(url).netloc if netloc: - headers_['Host'] = netloc + headers_["Host"] = netloc if headers: headers_.update(headers) return headers_ + def _validate_proxy_scheme_url_selection(self, url_scheme): + if url_scheme == "https" and self.proxy.scheme == "https": + warnings.warn( + "Your proxy configuration specified an HTTPS scheme for the proxy. " + "Are you sure you want to use HTTPS to contact the proxy? " + "This most likely indicates an error in your configuration. " + "Read this issue for more info: " + "https://github.com/urllib3/urllib3/issues/1850", + InvalidProxyConfigurationWarning, + stacklevel=3, + ) + def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. - headers = kw.get('headers', self.headers) - kw['headers'] = self._set_proxy_headers(url, headers) + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/request.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/request.py index 8f2f44bb..55f160bb 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/request.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/request.py @@ -4,7 +4,7 @@ from .filepost import encode_multipart_formdata from .packages.six.moves.urllib.parse import urlencode -__all__ = ['RequestMethods'] +__all__ = ["RequestMethods"] class RequestMethods(object): @@ -36,16 +36,25 @@ class RequestMethods(object): explicitly. """ - _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} def __init__(self, headers=None): self.headers = headers or {} - def urlopen(self, method, url, body=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **kw): # Abstract - raise NotImplementedError("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") + def urlopen( + self, + method, + url, + body=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **kw + ): # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) def request(self, method, url, fields=None, headers=None, **urlopen_kw): """ @@ -60,19 +69,18 @@ class RequestMethods(object): """ method = method.upper() - urlopen_kw['request_url'] = url + urlopen_kw["request_url"] = url if method in self._encode_url_methods: - return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) + return self.request_encode_url( + method, url, fields=fields, headers=headers, **urlopen_kw + ) else: - return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) - def request_encode_url(self, method, url, fields=None, headers=None, - **urlopen_kw): + def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. @@ -80,17 +88,24 @@ class RequestMethods(object): if headers is None: headers = self.headers - extra_kw = {'headers': headers} + extra_kw = {"headers": headers} extra_kw.update(urlopen_kw) if fields: - url += '?' + urlencode(fields) + url += "?" + urlencode(fields) return self.urlopen(method, url, **extra_kw) - def request_encode_body(self, method, url, fields=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **urlopen_kw): + def request_encode_body( + self, + method, + url, + fields=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **urlopen_kw + ): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. @@ -129,22 +144,28 @@ class RequestMethods(object): if headers is None: headers = self.headers - extra_kw = {'headers': {}} + extra_kw = {"headers": {}} if fields: - if 'body' in urlopen_kw: + if "body" in urlopen_kw: raise TypeError( - "request got values for both 'fields' and 'body', can only specify one.") + "request got values for both 'fields' and 'body', can only specify one." + ) if encode_multipart: - body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) else: - body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' + body, content_type = ( + urlencode(fields), + "application/x-www-form-urlencoded", + ) - extra_kw['body'] = body - extra_kw['headers'] = {'Content-Type': content_type} + extra_kw["body"] = body + extra_kw["headers"] = {"Content-Type": content_type} - extra_kw['headers'].update(headers) + extra_kw["headers"].update(headers) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/response.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/response.py index c112690b..7dc9b93c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/response.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/response.py @@ -6,10 +6,21 @@ import logging from socket import timeout as SocketTimeout from socket import error as SocketError +try: + import brotli +except ImportError: + brotli = None + from ._collections import HTTPHeaderDict from .exceptions import ( - BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, - ResponseNotChunked, IncompleteRead, InvalidHeader + BodyNotHttplibCompatible, + ProtocolError, + DecodeError, + ReadTimeoutError, + ResponseNotChunked, + IncompleteRead, + InvalidHeader, + HTTPError, ) from .packages.six import string_types as basestring, PY3 from .packages.six.moves import http_client as httplib @@ -20,10 +31,9 @@ log = logging.getLogger(__name__) class DeflateDecoder(object): - def __init__(self): self._first_try = True - self._data = b'' + self._data = b"" self._obj = zlib.decompressobj() def __getattr__(self, name): @@ -60,7 +70,6 @@ class GzipDecoderState(object): class GzipDecoder(object): - def __init__(self): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) self._state = GzipDecoderState.FIRST_MEMBER @@ -90,6 +99,26 @@ class GzipDecoder(object): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) +if brotli is not None: + + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + + def decompress(self, data): + if hasattr(self._obj, "decompress"): + return self._obj.decompress(data) + return self._obj.process(data) + + def flush(self): + if hasattr(self._obj, "flush"): + return self._obj.flush() + return b"" + + class MultiDecoder(object): """ From RFC7231: @@ -100,7 +129,7 @@ class MultiDecoder(object): """ def __init__(self, modes): - self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] def flush(self): return self._decoders[0].flush() @@ -112,12 +141,15 @@ class MultiDecoder(object): def _get_decoder(mode): - if ',' in mode: + if "," in mode: return MultiDecoder(mode) - if mode == 'gzip': + if mode == "gzip": return GzipDecoder() + if brotli is not None and mode == "br": + return BrotliDecoder() + return DeflateDecoder() @@ -154,14 +186,31 @@ class HTTPResponse(io.IOBase): value of Content-Length header, if present. Otherwise, raise error. """ - CONTENT_DECODERS = ['gzip', 'deflate'] + CONTENT_DECODERS = ["gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] REDIRECT_STATUSES = [301, 302, 303, 307, 308] - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, msg=None, - retries=None, enforce_content_length=False, - request_method=None, request_url=None): + def __init__( + self, + body="", + headers=None, + status=0, + version=0, + reason=None, + strict=0, + preload_content=True, + decode_content=True, + original_response=None, + pool=None, + connection=None, + msg=None, + retries=None, + enforce_content_length=False, + request_method=None, + request_url=None, + auto_close=True, + ): if isinstance(headers, HTTPHeaderDict): self.headers = headers @@ -174,6 +223,7 @@ class HTTPResponse(io.IOBase): self.decode_content = decode_content self.retries = retries self.enforce_content_length = enforce_content_length + self.auto_close = auto_close self._decoder = None self._body = None @@ -189,13 +239,13 @@ class HTTPResponse(io.IOBase): self._pool = pool self._connection = connection - if hasattr(body, 'read'): + if hasattr(body, "read"): self._fp = body # Are we using the chunked-style of transfer encoding? self.chunked = False self.chunk_left = None - tr_enc = self.headers.get('transfer-encoding', '').lower() + tr_enc = self.headers.get("transfer-encoding", "").lower() # Don't incur the penalty of creating a list and then discarding it encodings = (enc.strip() for enc in tr_enc.split(",")) if "chunked" in encodings: @@ -217,7 +267,7 @@ class HTTPResponse(io.IOBase): location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') + return self.headers.get("location") return False @@ -228,6 +278,17 @@ class HTTPResponse(io.IOBase): self._pool._put_conn(self._connection) self._connection = None + def drain_conn(self): + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, SocketError, BaseSSLError, HTTPException): + pass + @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. @@ -256,18 +317,20 @@ class HTTPResponse(io.IOBase): """ Set initial length value for Response content if available. """ - length = self.headers.get('content-length') + length = self.headers.get("content-length") if length is not None: if self.chunked: # This Response will fail with an IncompleteRead if it can't be # received as chunked. This method falls back to attempt reading # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) return None try: @@ -276,10 +339,12 @@ class HTTPResponse(io.IOBase): # (e.g. Content-Length: 42, 42). This line ensures the values # are all valid ints and that as long as the `set` length is 1, # all values are the same. Otherwise, the header is invalid. - lengths = set([int(val) for val in length.split(',')]) + lengths = set([int(val) for val in length.split(",")]) if len(lengths) > 1: - raise InvalidHeader("Content-Length contained multiple " - "unmatching values (%s)" % length) + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % length + ) length = lengths.pop() except ValueError: length = None @@ -295,7 +360,7 @@ class HTTPResponse(io.IOBase): status = 0 # Check for responses that shouldn't include a body - if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": length = 0 return length @@ -306,29 +371,41 @@ class HTTPResponse(io.IOBase): """ # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() + content_encoding = self.headers.get("content-encoding", "").lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) - elif ',' in content_encoding: - encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] if len(encodings): self._decoder = _get_decoder(content_encoding) + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + def _decode(self, data, decode_content, flush_decoder): """ Decode the data passed in and potentially flush the decoder. """ + if not decode_content: + return data + try: - if decode_content and self._decoder: + if self._decoder: data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - content_encoding = self.headers.get('content-encoding', '').lower() + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() raise DecodeError( "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content: + "failed to decode it." % content_encoding, + e, + ) + if flush_decoder: data += self._flush_decoder() return data @@ -339,10 +416,10 @@ class HTTPResponse(io.IOBase): being used. """ if self._decoder: - buf = self._decoder.decompress(b'') + buf = self._decoder.decompress(b"") return buf + self._decoder.flush() - return b'' + return b"" @contextmanager def _error_catcher(self): @@ -362,20 +439,20 @@ class HTTPResponse(io.IOBase): except SocketTimeout: # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') + raise ReadTimeoutError(self._pool, None, "Read timed out.") except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: + if "read operation timed out" not in str(e): # Defensive: # This shouldn't happen but just in case we're missing an edge # case, let's avoid swallowing SSL errors. raise - raise ReadTimeoutError(self._pool, None, 'Read timed out.') + raise ReadTimeoutError(self._pool, None, "Read timed out.") except (HTTPException, SocketError) as e: # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) + raise ProtocolError("Connection broken: %r" % e, e) # If no exception is thrown, we should avoid cleaning up # unnecessarily. @@ -430,17 +507,19 @@ class HTTPResponse(io.IOBase): return flush_decoder = False - data = None + fp_closed = getattr(self._fp, "closed", False) with self._error_catcher(): if amt is None: # cStringIO doesn't like amt=None - data = self._fp.read() + data = self._fp.read() if not fp_closed else b"" flush_decoder = True else: cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + data = self._fp.read(amt) if not fp_closed else b"" + if ( + amt != 0 and not data + ): # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ @@ -450,7 +529,10 @@ class HTTPResponse(io.IOBase): # no harm in redundantly calling close. self._fp.close() flush_decoder = True - if self.enforce_content_length and self.length_remaining not in (0, None): + if self.enforce_content_length and self.length_remaining not in ( + 0, + None, + ): # This is an edge case that httplib failed to cover due # to concerns of backward compatibility. We're # addressing it here to make sure IncompleteRead is @@ -470,7 +552,7 @@ class HTTPResponse(io.IOBase): return data - def stream(self, amt=2**16, decode_content=None): + def stream(self, amt=2 ** 16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the @@ -508,21 +590,24 @@ class HTTPResponse(io.IOBase): headers = r.msg if not isinstance(headers, HTTPHeaderDict): - if PY3: # Python 3 + if PY3: headers = HTTPHeaderDict(headers.items()) - else: # Python 2 + else: + # Python 2.7 headers = HTTPHeaderDict.from_httplib(headers) # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - resp = ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) + strict = getattr(r, "strict", 0) + resp = ResponseCls( + body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw + ) return resp # Backwards-compatibility methods for httplib.HTTPResponse @@ -544,13 +629,18 @@ class HTTPResponse(io.IOBase): if self._connection: self._connection.close() + if not self.auto_close: + io.IOBase.close(self) + @property def closed(self): - if self._fp is None: + if not self.auto_close: + return io.IOBase.closed.__get__(self) + elif self._fp is None: return True - elif hasattr(self._fp, 'isclosed'): + elif hasattr(self._fp, "isclosed"): return self._fp.isclosed() - elif hasattr(self._fp, 'closed'): + elif hasattr(self._fp, "closed"): return self._fp.closed else: return True @@ -561,11 +651,17 @@ class HTTPResponse(io.IOBase): elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") + raise IOError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): return self._fp.flush() def readable(self): @@ -578,7 +674,7 @@ class HTTPResponse(io.IOBase): if len(temp) == 0: return 0 else: - b[:len(temp)] = temp + b[: len(temp)] = temp return len(temp) def supports_chunked_reads(self): @@ -588,7 +684,7 @@ class HTTPResponse(io.IOBase): attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ - return hasattr(self._fp, 'fp') + return hasattr(self._fp, "fp") def _update_chunk_length(self): # First, we'll figure out length of a chunk and then @@ -596,7 +692,7 @@ class HTTPResponse(io.IOBase): if self.chunk_left is not None: return line = self._fp.fp.readline() - line = line.split(b';', 1)[0] + line = line.split(b";", 1)[0] try: self.chunk_left = int(line, 16) except ValueError: @@ -645,11 +741,13 @@ class HTTPResponse(io.IOBase): if not self.chunked: raise ResponseNotChunked( "Response is not chunked. " - "Header 'transfer-encoding: chunked' is missing.") + "Header 'transfer-encoding: chunked' is missing." + ) if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( "Body should be httplib.HTTPResponse like. " - "It should have have an fp attribute which returns raw chunks.") + "It should have have an fp attribute which returns raw chunks." + ) with self._error_catcher(): # Don't bother reading the body of a HEAD request. @@ -667,8 +765,9 @@ class HTTPResponse(io.IOBase): if self.chunk_left == 0: break chunk = self._handle_chunk(amt) - decoded = self._decode(chunk, decode_content=decode_content, - flush_decoder=False) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) if decoded: yield decoded @@ -686,7 +785,7 @@ class HTTPResponse(io.IOBase): if not line: # Some sites may not end with '\r\n'. break - if line == b'\r\n': + if line == b"\r\n": break # We read everything; close the "file". @@ -703,3 +802,20 @@ class HTTPResponse(io.IOBase): return self.retries.history[-1].redirect_location else: return self._request_url + + def __iter__(self): + buffer = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py index 2f2770b6..a96c73a9 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + # For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped from .request import make_headers @@ -12,43 +13,34 @@ from .ssl_ import ( resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, + PROTOCOL_TLS, ) -from .timeout import ( - current_time, - Timeout, -) +from .timeout import current_time, Timeout from .retry import Retry -from .url import ( - get_host, - parse_url, - split_first, - Url, -) -from .wait import ( - wait_for_read, - wait_for_write -) +from .url import get_host, parse_url, split_first, Url +from .wait import wait_for_read, wait_for_write __all__ = ( - 'HAS_SNI', - 'IS_PYOPENSSL', - 'IS_SECURETRANSPORT', - 'SSLContext', - 'Retry', - 'Timeout', - 'Url', - 'assert_fingerprint', - 'current_time', - 'is_connection_dropped', - 'is_fp_closed', - 'get_host', - 'parse_url', - 'make_headers', - 'resolve_cert_reqs', - 'resolve_ssl_version', - 'split_first', - 'ssl_wrap_socket', - 'wait_for_read', - 'wait_for_write' + "HAS_SNI", + "IS_PYOPENSSL", + "IS_SECURETRANSPORT", + "SSLContext", + "PROTOCOL_TLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "current_time", + "is_connection_dropped", + "is_fp_closed", + "get_host", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "split_first", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/connection.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/connection.py index 5ad70b2f..86f0a3b0 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/connection.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/connection.py @@ -14,7 +14,7 @@ def is_connection_dropped(conn): # Platform-specific Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. """ - sock = getattr(conn, 'sock', False) + sock = getattr(conn, "sock", False) if sock is False: # Platform-specific: AppEngine return False if sock is None: # Connection already closed (such as by httplib). @@ -30,8 +30,12 @@ def is_connection_dropped(conn): # Platform-specific # library test suite. Added to its signature is only `socket_options`. # One additional modification is that we avoid binding to IPv6 servers # discovered in DNS if the system doesn't have IPv6 functionality. -def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, socket_options=None): +def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, + socket_options=None, +): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, @@ -45,8 +49,8 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, """ host, port = address - if host.startswith('['): - host = host.strip('[]') + if host.startswith("["): + host = host.strip("[]") err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets @@ -117,7 +121,7 @@ def _has_ipv6(host): # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 + # https://github.com/urllib3/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) @@ -131,4 +135,4 @@ def _has_ipv6(host): return has_ipv6 -HAS_IPV6 = _has_ipv6('::1') +HAS_IPV6 = _has_ipv6("::1") diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/request.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/request.py index 3ddfcd55..3b7bb54d 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/request.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/request.py @@ -4,12 +4,25 @@ from base64 import b64encode from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError -ACCEPT_ENCODING = 'gzip,deflate' +ACCEPT_ENCODING = "gzip,deflate" +try: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + _FAILEDTELL = object() -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None, disable_cache=None): +def make_headers( + keep_alive=None, + accept_encoding=None, + user_agent=None, + basic_auth=None, + proxy_basic_auth=None, + disable_cache=None, +): """ Shortcuts for generating request headers. @@ -49,27 +62,27 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) + accept_encoding = ",".join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING - headers['accept-encoding'] = accept_encoding + headers["accept-encoding"] = accept_encoding if user_agent: - headers['user-agent'] = user_agent + headers["user-agent"] = user_agent if keep_alive: - headers['connection'] = 'keep-alive' + headers["connection"] = "keep-alive" if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(b(basic_auth)).decode('utf-8') + headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(b(proxy_basic_auth)).decode('utf-8') + headers["proxy-authorization"] = "Basic " + b64encode( + b(proxy_basic_auth) + ).decode("utf-8") if disable_cache: - headers['cache-control'] = 'no-cache' + headers["cache-control"] = "no-cache" return headers @@ -81,7 +94,7 @@ def set_file_position(body, pos): """ if pos is not None: rewind_body(body, pos) - elif getattr(body, 'tell', None) is not None: + elif getattr(body, "tell", None) is not None: try: pos = body.tell() except (IOError, OSError): @@ -103,16 +116,20 @@ def rewind_body(body, body_pos): :param int pos: Position to seek to in file. """ - body_seek = getattr(body, 'seek', None) + body_seek = getattr(body, "seek", None) if body_seek is not None and isinstance(body_pos, integer_types): try: body_seek(body_pos) except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect/retry.") + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) elif body_pos is _FAILEDTELL: - raise UnrewindableBodyError("Unable to record file position for rewinding " - "request body during a redirect/retry.") + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) else: - raise ValueError("body_pos must be of type integer, " - "instead it was %s." % type(body_pos)) + raise ValueError( + "body_pos must be of type integer, instead it was %s." % type(body_pos) + ) diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/response.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/response.py index 3d548648..715868dd 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/response.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/response.py @@ -52,11 +52,10 @@ def assert_header_parsing(headers): # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): - raise TypeError('expected httplib.Message, got {0}.'.format( - type(headers))) + raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) - defects = getattr(headers, 'defects', None) - get_payload = getattr(headers, 'get_payload', None) + defects = getattr(headers, "defects", None) + get_payload = getattr(headers, "get_payload", None) unparsed_data = None if get_payload: @@ -84,4 +83,4 @@ def is_response_to_head(response): method = response._method if isinstance(method, int): # Platform-specific: Appengine return method == 3 - return method.upper() == 'HEAD' + return method.upper() == "HEAD" diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/retry.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/retry.py index e7d0abd6..ee30c91b 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/retry.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/retry.py @@ -13,6 +13,7 @@ from ..exceptions import ( ReadTimeoutError, ResponseError, InvalidHeader, + ProxyError, ) from ..packages import six @@ -21,8 +22,9 @@ log = logging.getLogger(__name__) # Data structure for representing the metadata of requests that result in a retry. -RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", - "status", "redirect_location"]) +RequestHistory = namedtuple( + "RequestHistory", ["method", "url", "error", "status", "redirect_location"] +) class Retry(object): @@ -146,21 +148,33 @@ class Retry(object): request. """ - DEFAULT_METHOD_WHITELIST = frozenset([ - 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + DEFAULT_METHOD_WHITELIST = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"]) #: Maximum backoff time. BACKOFF_MAX = 120 - def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): + def __init__( + self, + total=10, + connect=None, + read=None, + redirect=None, + status=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, + status_forcelist=None, + backoff_factor=0, + raise_on_redirect=True, + raise_on_status=True, + history=None, + respect_retry_after_header=True, + remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST, + ): self.total = total self.connect = connect @@ -179,19 +193,25 @@ class Retry(object): self.raise_on_status = raise_on_status self.history = history or tuple() self.respect_retry_after_header = respect_retry_after_header - self.remove_headers_on_redirect = remove_headers_on_redirect + self.remove_headers_on_redirect = frozenset( + [h.lower() for h in remove_headers_on_redirect] + ) def new(self, **kw): params = dict( total=self.total, - connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, method_whitelist=self.method_whitelist, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, - remove_headers_on_redirect=self.remove_headers_on_redirect + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, ) params.update(kw) return type(self)(**params) @@ -216,8 +236,11 @@ class Retry(object): :rtype: float """ # We want to consider only the last consecutive errors sequence (Ignore redirects). - consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, - reversed(self.history)))) + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) if consecutive_errors_len <= 1: return 0 @@ -273,7 +296,7 @@ class Retry(object): this method will return immediately. """ - if response: + if self.respect_retry_after_header and response: slept = self.sleep_for_retry(response) if slept: return @@ -284,6 +307,8 @@ class Retry(object): """ Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ + if isinstance(err, ProxyError): + err = err.original_error return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err): @@ -314,8 +339,12 @@ class Retry(object): if self.status_forcelist and status_code in self.status_forcelist: return True - return (self.total and self.respect_retry_after_header and - has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) + return ( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) def is_exhausted(self): """ Are we out of retries? """ @@ -326,8 +355,15 @@ class Retry(object): return min(retry_counts) < 0 - def increment(self, method=None, url=None, response=None, error=None, - _pool=None, _stacktrace=None): + def increment( + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, + ): """ Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not @@ -350,7 +386,7 @@ class Retry(object): read = self.read redirect = self.redirect status_count = self.status - cause = 'unknown' + cause = "unknown" status = None redirect_location = None @@ -372,7 +408,7 @@ class Retry(object): # Redirect retry? if redirect is not None: redirect -= 1 - cause = 'too many redirects' + cause = "too many redirects" redirect_location = response.get_redirect_location() status = response.status @@ -383,16 +419,21 @@ class Retry(object): if response and response.status: if status_count is not None: status_count -= 1 - cause = ResponseError.SPECIFIC_ERROR.format( - status_code=response.status) + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) status = response.status - history = self.history + (RequestHistory(method, url, error, status, redirect_location),) + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) new_retry = self.new( total=total, - connect=connect, read=read, redirect=redirect, status=status_count, - history=history) + connect=connect, + read=read, + redirect=redirect, + status=status_count, + history=history, + ) if new_retry.is_exhausted(): raise MaxRetryError(_pool, url, error or ResponseError(cause)) @@ -402,9 +443,10 @@ class Retry(object): return new_retry def __repr__(self): - return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' - 'read={self.read}, redirect={self.redirect}, status={self.status})').format( - cls=type(self), self=self) + return ( + "{cls.__name__}(total={self.total}, connect={self.connect}, " + "read={self.read}, redirect={self.redirect}, status={self.status})" + ).format(cls=type(self), self=self) # For backwards compatibility (equivalent to pre-v1.9): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py index dfc553ff..d3b463d4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py @@ -2,11 +2,12 @@ from __future__ import absolute_import import errno import warnings import hmac -import socket +import sys from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 +from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning from ..packages import six @@ -17,11 +18,7 @@ IS_PYOPENSSL = False IS_SECURETRANSPORT = False # Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = { - 32: md5, - 40: sha1, - 64: sha256, -} +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} def _const_compare_digest_backport(a, b): @@ -37,17 +34,27 @@ def _const_compare_digest_backport(a, b): return result == 0 -_const_compare_digest = getattr(hmac, 'compare_digest', - _const_compare_digest_backport) - +_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport) try: # Test for SSL features import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import wrap_socket, CERT_REQUIRED from ssl import HAS_SNI # Has SNI? except ImportError: pass +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + try: from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION @@ -56,25 +63,6 @@ except ImportError: OP_NO_COMPRESSION = 0x20000 -# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in -# those cases. This means that we can only detect IPv4 addresses in this case. -if hasattr(socket, 'inet_pton'): - inet_pton = socket.inet_pton -else: - # Maybe we can use ipaddress if the user has urllib3[secure]? - try: - from pip._vendor import ipaddress - - def inet_pton(_, host): - if isinstance(host, bytes): - host = host.decode('ascii') - return ipaddress.ip_address(host) - - except ImportError: # Platform-specific: Non-Linux - def inet_pton(_, host): - return socket.inet_aton(host) - - # A secure default. # Sources for more information on TLS ciphers: # @@ -83,36 +71,37 @@ else: # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ # # The general intent is: -# - Prefer TLS 1.3 cipher suites # - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), # - prefer ECDHE over DHE for better performance, # - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and # security, # - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_CIPHERS = ':'.join([ - 'TLS13-AES-256-GCM-SHA384', - 'TLS13-CHACHA20-POLY1305-SHA256', - 'TLS13-AES-128-GCM-SHA256', - 'ECDH+AESGCM', - 'ECDH+CHACHA20', - 'DH+AESGCM', - 'DH+CHACHA20', - 'ECDH+AES256', - 'DH+AES256', - 'ECDH+AES128', - 'DH+AES', - 'RSA+AESGCM', - 'RSA+AES', - '!aNULL', - '!eNULL', - '!MD5', -]) +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) try: from ssl import SSLContext # Modern SSL? except ImportError: - import sys class SSLContext(object): # Platform-specific: Python 2 def __init__(self, protocol_version): @@ -130,32 +119,35 @@ except ImportError: self.certfile = certfile self.keyfile = keyfile - def load_verify_locations(self, cafile=None, capath=None): + def load_verify_locations(self, cafile=None, capath=None, cadata=None): self.ca_certs = cafile if capath is not None: raise SSLError("CA directories not supported in older Pythons") + if cadata is not None: + raise SSLError("CA data not supported in older Pythons") + def set_ciphers(self, cipher_suite): self.ciphers = cipher_suite def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( - 'A true SSLContext object is not available. This prevents ' - 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. You can upgrade to a newer ' - 'version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - InsecurePlatformWarning + "A true SSLContext object is not available. This prevents " + "urllib3 from configuring SSL appropriately and may cause " + "certain SSL connections to fail. You can upgrade to a newer " + "version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + InsecurePlatformWarning, ) kwargs = { - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'ca_certs': self.ca_certs, - 'cert_reqs': self.verify_mode, - 'ssl_version': self.protocol, - 'server_side': server_side, + "keyfile": self.keyfile, + "certfile": self.certfile, + "ca_certs": self.ca_certs, + "cert_reqs": self.verify_mode, + "ssl_version": self.protocol, + "server_side": server_side, } return wrap_socket(socket, ciphers=self.ciphers, **kwargs) @@ -170,12 +162,11 @@ def assert_fingerprint(cert, fingerprint): Fingerprint as string of hexdigits, can be interspersed by colons. """ - fingerprint = fingerprint.replace(':', '').lower() + fingerprint = fingerprint.replace(":", "").lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: - raise SSLError( - 'Fingerprint of invalid length: {0}'.format(fingerprint)) + raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) @@ -183,15 +174,18 @@ def assert_fingerprint(cert, fingerprint): cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(fingerprint, hexlify(cert_digest))) + raise SSLError( + 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( + fingerprint, hexlify(cert_digest) + ) + ) def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. + Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. @@ -199,12 +193,12 @@ def resolve_cert_reqs(candidate): constant which can directly be passed to wrap_socket. """ if candidate is None: - return CERT_NONE + return CERT_REQUIRED if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: - res = getattr(ssl, 'CERT_' + candidate) + res = getattr(ssl, "CERT_" + candidate) return res return candidate @@ -215,19 +209,20 @@ def resolve_ssl_version(candidate): like resolve_cert_reqs """ if candidate is None: - return PROTOCOL_SSLv23 + return PROTOCOL_TLS if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) + res = getattr(ssl, "PROTOCOL_" + candidate) return res return candidate -def create_urllib3_context(ssl_version=None, cert_reqs=None, - options=None, ciphers=None): +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): """All arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that @@ -261,7 +256,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, Constructed SSLContext object with specified options :rtype: SSLContext """ - context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + context = SSLContext(ssl_version or PROTOCOL_TLS) context.set_ciphers(ciphers or DEFAULT_CIPHERS) @@ -280,18 +275,41 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, context.options |= options + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( + context, "post_handshake_auth", None + ) is not None: + context.post_handshake_auth = True + context.verify_mode = cert_reqs - if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative # hostnames. So disable it here context.check_hostname = False return context -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None): +def ssl_wrap_socket( + sock, + keyfile=None, + certfile=None, + cert_reqs=None, + ca_certs=None, + server_hostname=None, + ssl_version=None, + ciphers=None, + ssl_context=None, + ca_cert_dir=None, + key_password=None, + ca_cert_data=None, +): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. @@ -307,18 +325,22 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() """ context = ssl_context if context is None: # Note: This branch of code and all the variables in it are no longer # used by urllib3 itself. We should consider deprecating and removing # this code. - context = create_urllib3_context(ssl_version, cert_reqs, - ciphers=ciphers) + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) - if ca_certs or ca_cert_dir: + if ca_certs or ca_cert_dir or ca_cert_data: try: - context.load_verify_locations(ca_certs, ca_cert_dir) + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) except IOError as e: # Platform-specific: Python 2.7 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError @@ -327,55 +349,66 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if e.errno == errno.ENOENT: raise SSLError(e) raise - elif getattr(context, 'load_default_certs', None) is not None: + + elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows (require Python3.4+) context.load_default_certs() + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + if certfile: - context.load_cert_chain(certfile, keyfile) + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 # We shouldn't warn the user if SNI isn't available but we would # not be using SNI anyways due to IP address for server_hostname. - if ((server_hostname is not None and not is_ipaddress(server_hostname)) - or IS_SECURETRANSPORT): + if ( + server_hostname is not None and not is_ipaddress(server_hostname) + ) or IS_SECURETRANSPORT: if HAS_SNI and server_hostname is not None: return context.wrap_socket(sock, server_hostname=server_hostname) warnings.warn( - 'An HTTPS request has been made, but the SNI (Server Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning + "An HTTPS request has been made, but the SNI (Server Name " + "Indication) extension to TLS is not available on this platform. " + "This may cause the server to present an incorrect TLS " + "certificate, which can cause validation failures. You can upgrade to " + "a newer version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + SNIMissingWarning, ) return context.wrap_socket(sock) def is_ipaddress(hostname): - """Detects whether the hostname given is an IP address. + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ - if six.PY3 and isinstance(hostname, bytes): + if not six.PY2 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. - hostname = hostname.decode('ascii') + hostname = hostname.decode("ascii") + return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) - families = [socket.AF_INET] - if hasattr(socket, 'AF_INET6'): - families.append(socket.AF_INET6) - for af in families: - try: - inet_pton(af, hostname) - except (socket.error, ValueError, OSError): - pass - else: - return True +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, "r") as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + return False diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py index cec817e6..b61fea75 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT @@ -45,19 +46,20 @@ class Timeout(object): :type total: integer, float, or None :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout. @@ -91,14 +93,21 @@ class Timeout(object): DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") self._start_connect = None - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) + def __repr__(self): + return "%s(connect=%r, read=%r, total=%r)" % ( + type(self).__name__, + self._connect, + self._read, + self.total, + ) + + # __str__ provided for backwards compatibility + __str__ = __repr__ @classmethod def _validate_timeout(cls, value, name): @@ -118,22 +127,31 @@ class Timeout(object): return value if isinstance(value, bool): - raise ValueError("Timeout cannot be a boolean value. It must " - "be an int, float or None.") + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) try: float(value) except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) try: if value <= 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than or equal to 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + # Python 3 + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) return value @@ -165,8 +183,7 @@ class Timeout(object): # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) + return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): """ Start the timeout clock, used during a connect() attempt @@ -182,14 +199,15 @@ class Timeout(object): def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. - :return: Elapsed time. + :return: Elapsed time in seconds. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) return current_time() - self._start_connect @property @@ -227,15 +245,16 @@ class Timeout(object): :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): + if ( + self.total is not None + and self.total is not self.DEFAULT_TIMEOUT + and self._read is not None + and self._read is not self.DEFAULT_TIMEOUT + ): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) + return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/url.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/url.py index 6b6f9968..0eb0b6a8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/url.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/url.py @@ -1,34 +1,110 @@ from __future__ import absolute_import +import re from collections import namedtuple from ..exceptions import LocationParseError +from ..packages import six -url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] +url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] # We only want to normalize urls with an HTTP(S) scheme. # urllib3 infers URLs without a scheme (None) to be http. -NORMALIZABLE_SCHEMES = ('http', 'https', None) - - -class Url(namedtuple('Url', url_attrs)): +NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +IPV6_RE = re.compile("^" + IPV6_PAT + "$") +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") +BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") +ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") + +SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( + REG_NAME_PAT, + IPV4_PAT, + IPV6_ADDRZ_PAT, +) +SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL) + +UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +SUB_DELIM_CHARS = set("!$&'()*+,;=") +USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"} +PATH_CHARS = USERINFO_CHARS | {"@", "/"} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"} + + +class Url(namedtuple("Url", url_attrs)): """ - Datastructure for representing an HTTP URL. Used as a return value for + Data structure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. """ + __slots__ = () - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, - query=None, fragment=None): - if path and not path.startswith('/'): - path = '/' + path - if scheme: + def __new__( + cls, + scheme=None, + auth=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: scheme = scheme.lower() - if host and scheme in NORMALIZABLE_SCHEMES: - host = host.lower() - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, - query, fragment) + return super(Url, cls).__new__( + cls, scheme, auth, host, port, path, query, fragment + ) @property def hostname(self): @@ -38,10 +114,10 @@ class Url(namedtuple('Url', url_attrs)): @property def request_uri(self): """Absolute path including the query string.""" - uri = self.path or '/' + uri = self.path or "/" if self.query is not None: - uri += '?' + self.query + uri += "?" + self.query return uri @@ -49,7 +125,7 @@ class Url(namedtuple('Url', url_attrs)): def netloc(self): """Network location including host and port""" if self.port: - return '%s:%d' % (self.host, self.port) + return "%s:%d" % (self.host, self.port) return self.host @property @@ -72,23 +148,23 @@ class Url(namedtuple('Url', url_attrs)): 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self - url = '' + url = u"" # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: - url += scheme + '://' + url += scheme + u"://" if auth is not None: - url += auth + '@' + url += auth + u"@" if host is not None: url += host if port is not None: - url += ':' + str(port) + url += u":" + str(port) if path is not None: url += path if query is not None: - url += '?' + query + url += u"?" + query if fragment is not None: - url += '#' + fragment + url += u"#" + fragment return url @@ -98,6 +174,8 @@ class Url(namedtuple('Url', url_attrs)): def split_first(s, delims): """ + .. deprecated:: 1.25 + Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. @@ -124,15 +202,141 @@ def split_first(s, delims): min_delim = d if min_idx is None or min_idx < 0: - return s, '', None + return s, "", None + + return s[:min_idx], s[min_idx + 1 :], min_delim + + +def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = six.ensure_text(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode(encoding) - return s[:min_idx], s[min_idx + 1:], min_delim + +def _remove_path_dot_segments(path): + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def _normalize_host(host, scheme): + if host: + if isinstance(host, six.binary_type): + host = six.ensure_str(host) + + if scheme in NORMALIZABLE_SCHEMES: + is_ipv6 = IPV6_ADDRZ_RE.match(host) + if is_ipv6: + match = ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS) + return host[:start].lower() + zone_id + host[end:] + else: + return host.lower() + elif not IPV4_RE.match(host): + return six.ensure_str( + b".".join([_idna_encode(label) for label in host.split(".")]) + ) + return host + + +def _idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + from pip._vendor import idna + except ImportError: + six.raise_from( + LocationParseError("Unable to parse URL without the 'idna' module"), + None, + ) + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + six.raise_from( + LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None + ) + return name.lower().encode("ascii") + + +def _encode_target(target): + """Percent-encodes a request target so that there are no invalid characters""" + path, query = TARGET_RE.match(target).groups() + target = _encode_invalid_chars(path, PATH_CHARS) + query = _encode_invalid_chars(query, QUERY_CHARS) + if query is not None: + target += "?" + query + return target def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urlparse`. @@ -145,81 +349,77 @@ def parse_url(url): >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - if not url: # Empty return Url() - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. No whitespace, no plus or - # minus prefixes, no non-integer digits such as ^2 (superscript). - if not port.isdigit(): - raise LocationParseError(url) - try: - port = int(port) - except ValueError: - raise LocationParseError(url) + source_url = url + if not SCHEME_RE.search(url): + url = "//" + url + + try: + scheme, authority, path, query, fragment = URI_RE.match(url).groups() + normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, host, port = SUBAUTHORITY_RE.match(authority).groups() + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, USERINFO_CHARS) + if port == "": + port = None else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None + auth, host, port = None, None, None + + if port is not None: + port = int(port) + if not (0 <= port <= 65535): + raise LocationParseError(url) + + host = _normalize_host(host, scheme) - elif not host and url: - host = url + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) + except (ValueError, AttributeError): + return six.raise_from(LocationParseError(source_url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. if not path: - return Url(scheme, auth, host, port, path, query, fragment) + if query is not None or fragment is not None: + path = "" + else: + path = None - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + if isinstance(url, six.text_type): + ensure_func = six.ensure_text + else: + ensure_func = six.ensure_str - # Query - if '?' in path: - path, query = path.split('?', 1) + def ensure_type(x): + return x if x is None else ensure_func(x) - return Url(scheme, auth, host, port, path, query, fragment) + return Url( + scheme=ensure_type(scheme), + auth=ensure_type(auth), + host=ensure_type(host), + port=port, + path=ensure_type(path), + query=ensure_type(query), + fragment=ensure_type(fragment), + ) def get_host(url): @@ -227,4 +427,4 @@ def get_host(url): Deprecated. Use :func:`parse_url` instead. """ p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port + return p.scheme or "http", p.hostname, p.port diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/wait.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/wait.py index 4db71baf..d71d2fd7 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/wait.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/urllib3/util/wait.py @@ -2,6 +2,7 @@ import errno from functools import partial import select import sys + try: from time import monotonic except ImportError: @@ -40,6 +41,8 @@ if sys.version_info >= (3, 5): # Modern Python, that retries syscalls by default def _retry_on_intr(fn, timeout): return fn(timeout) + + else: # Old and broken Pythons. def _retry_on_intr(fn, timeout): diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/vendor.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/vendor.txt new file mode 100644 index 00000000..06fa1358 --- /dev/null +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_vendor/vendor.txt @@ -0,0 +1,24 @@ +appdirs==1.4.4 +CacheControl==0.12.6 +colorama==0.4.3 +contextlib2==0.6.0.post1 +distlib==0.3.1 +distro==1.5.0 +html5lib==1.1 +ipaddress==1.0.23 # Only needed on 2.6 and 2.7 +msgpack==1.0.0 +packaging==20.4 +pep517==0.8.2 +progress==1.5 +pyparsing==2.4.7 +requests==2.24.0 + certifi==2020.06.20 + chardet==3.0.4 + idna==2.10 + urllib3==1.25.9 +resolvelib==0.4.0 +retrying==1.3.3 +setuptools==44.0.0 +six==1.15.0 +toml==0.10.1 +webencodings==0.5.1 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/INSTALLER b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/INSTALLER similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/INSTALLER rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/INSTALLER diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA similarity index 99% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA index 21fbaa35..0afb33c5 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: pyOpenRPA -Version: 1.1.14 +Version: 1.1.15 Summary: First open source RPA platform for business Home-page: https://gitlab.com/UnicodeLabs/OpenRPA Author: Ivan Maslov diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD similarity index 97% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD index 4fbba60d..275189b8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD @@ -1,8 +1,9 @@ -pyOpenRPA-1.1.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyOpenRPA-1.1.14.dist-info/METADATA,sha256=aOG2bCjUIdJa6gP7Meoa873LoWkxQhlllqL4y4EU5L0,3352 -pyOpenRPA-1.1.14.dist-info/RECORD,, -pyOpenRPA-1.1.14.dist-info/WHEEL,sha256=qB97nP5e4MrOsXW5bIU5cUn_KSVr10EV0l-GCHG9qNs,97 -pyOpenRPA-1.1.14.dist-info/top_level.txt,sha256=RPzwQXgYBRo_m5L3ZLs6Voh8aEkMeT29Xsul1w1qE0g,10 +pyOpenRPA-1.1.15.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyOpenRPA-1.1.15.dist-info/METADATA,sha256=QZCz4uAPAscqSQmt9SgpXNqSEDBYuGlZg8r6k4KxKc0,3352 +pyOpenRPA-1.1.15.dist-info/RECORD,, +pyOpenRPA-1.1.15.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pyOpenRPA-1.1.15.dist-info/WHEEL,sha256=qB97nP5e4MrOsXW5bIU5cUn_KSVr10EV0l-GCHG9qNs,97 +pyOpenRPA-1.1.15.dist-info/top_level.txt,sha256=RPzwQXgYBRo_m5L3ZLs6Voh8aEkMeT29Xsul1w1qE0g,10 pyOpenRPA/.idea/inspectionProfiles/profiles_settings.xml,sha256=YXLFmX7rPNGcnKK1uX1uKYPN0fpgskYNe7t0BV7cqkY,174 pyOpenRPA/.idea/misc.xml,sha256=ySjeaQ1DfqxaRTlFGT_3zW5r9mWuwxoAK_AX4QiuAZM,203 pyOpenRPA/.idea/modules.xml,sha256=Q__U1JIA2cjxbLRXAv-SfYY00fZA0TNlpkkbY4s3ncg,277 @@ -18,7 +19,7 @@ pyOpenRPA/Orchestrator/RobotRDPActive/Clipboard.py,sha256=YB5HJL-Qf4IlVrFHyRv_ZM pyOpenRPA/Orchestrator/RobotRDPActive/Connector.py,sha256=MkxTVaOVITl1V3EvH3oNx2gbCx3EeRS9Gb_83rmjdjg,25553 pyOpenRPA/Orchestrator/RobotRDPActive/ConnectorExceptions.py,sha256=wwH9JOoMFFxDKQ7IyNyh1OkFkZ23o1cD8Jm3n31ycII,657 pyOpenRPA/Orchestrator/RobotRDPActive/Processor.py,sha256=HcysWMmxMxSjaUybqovoCZToGrvzC0WFSVZbw6nfa68,9254 -pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py,sha256=h41JsJu3ca68KV9F6oIhJhVPaH72m7TDvZIdEK4k-Xk,10301 +pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py,sha256=jCtHXExgRW0licn8K-xSO3tFd6P-4IFzp46TdS57vQ4,10726 pyOpenRPA/Orchestrator/RobotRDPActive/Scheduler.py,sha256=21N0ilFzWI1mj3X5S9tPMgwvG7BviuBxfTuqBY85Hy4,9144 pyOpenRPA/Orchestrator/RobotRDPActive/Template.rdp,sha256=JEMVYkEmNcfg_p8isdIyvj9E-2ZB5mj-R3MkcNMKxkA,2426 pyOpenRPA/Orchestrator/RobotRDPActive/Timer.py,sha256=y8--fUvg10qEFomecl_cmdWpdGjarZBlFpMbs_GvzoQ,1077 @@ -46,8 +47,8 @@ pyOpenRPA/Orchestrator/RobotScreenActive/__pycache__/__main__.cpython-37.pyc,, pyOpenRPA/Orchestrator/Server.py,sha256=kWMAUDeHlG4RX3lZR-pHBIpeZ6lxF_PJ_KDVENvbFK8,25403 pyOpenRPA/Orchestrator/ServerSettings.py,sha256=dK8pQzg2mcLPjbizOPCP7yHMMVLiTh8RbYVnJXn-Mvg,15640 pyOpenRPA/Orchestrator/Timer.py,sha256=HvYtEeH2Q5WVVjgds9XaBpWRmvZgwgBXurJDdVVq_T0,2097 -pyOpenRPA/Orchestrator/Web/Index.js,sha256=9_-DeipJ8Dx6RjGnySbk6q7FQbMnzmh0UH-O9LrOeGM,27786 -pyOpenRPA/Orchestrator/Web/Index.xhtml,sha256=dHxRf_eYWbsr504OpC-Ex6uz6AVw15mxXHYeaUiZSGQ,14267 +pyOpenRPA/Orchestrator/Web/Index.js,sha256=zdbj1ioNgxhprLpGIQ2YYraKUMUpkBiVoR-3GXGR5X0,28258 +pyOpenRPA/Orchestrator/Web/Index.xhtml,sha256=uRjdlkK8-yyrh4y3BkBYlFkRHzzPlHWz5dRh1FmG5QE,14260 pyOpenRPA/Orchestrator/Web/favicon.ico,sha256=6S8XwSQ_3FXPpaX6zYkf8uUewVXO9bHnrrDHEoWrEgw,112922 pyOpenRPA/Orchestrator/__init__.py,sha256=qVH8fEPgXk54rmy-ol0PnT8GF5OlGE0a8mExwJ4tFqY,124 pyOpenRPA/Orchestrator/__main__.py,sha256=cOd8WU77VGgzTZUB0WmWpPmdYyMZY1zVyuU9yx26MKs,144 @@ -313,6 +314,6 @@ pyOpenRPA/Tools/Terminator.py,sha256=VcjX3gFXiCGu3MMCidhrTNsmC9wsAqfjRJdTSU9fLnU pyOpenRPA/Tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pyOpenRPA/Tools/__pycache__/Terminator.cpython-37.pyc,, pyOpenRPA/Tools/__pycache__/__init__.cpython-37.pyc,, -pyOpenRPA/__init__.py,sha256=zJQYGBDH-YV1iJmK03mujDcj8s95Ct5RaUAYji-Xdf0,175 +pyOpenRPA/__init__.py,sha256=EYr94WCIlVXY6RR7CMN4jtIYitfDcpxLYJGK5kylOSM,175 pyOpenRPA/__pycache__/__init__.cpython-37.pyc,, pyOpenRPA/test.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/WHEEL b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/WHEEL similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/WHEEL rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/WHEEL diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/top_level.txt b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/top_level.txt similarity index 100% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/top_level.txt rename to Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/top_level.txt diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py index 34add348..f0c66ed9 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py @@ -15,12 +15,18 @@ def RobotRDPActive(inGSettings): # Global error handler try: ######## Init the RDP List + lNewRDPList = {} for lRDPSessionKeyStrItem in mGSettingsRDPActiveDict["RDPList"]: lConfigurationItem = mGSettingsRDPActiveDict["RDPList"][lRDPSessionKeyStrItem] + lAddToNewRDPDict = True + if "SessionHex" not in lConfigurationItem: lAddToNewRDPDict = False # 2020.08.03 fix: Init the Session hex field. If no SessionHex - trash in structure - remove if lConfigurationItem["SessionHex"] is None or lConfigurationItem["SessionHex"] == "": # Minor fix - need for restore existed RDP sessions lConfigurationItem["SessionIsWindowExistBool"] = False # Flag that session is not started lConfigurationItem["SessionIsWindowResponsibleBool"] = False # Flag that session is not started lConfigurationItem["SessionHex"] = " 77777sdfsdf77777dsfdfsf77777777" # Flag that session is not started + if lAddToNewRDPDict: + lNewRDPList[lRDPSessionKeyStrItem] = lConfigurationItem + mGSettingsRDPActiveDict["RDPList"] = lNewRDPList # Update the structure ########## # Run monitor - main loop # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js index 56c80ba0..17d52f94 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js @@ -664,6 +664,14 @@ $(document).ready(function() { $(".openrpa-rdpactive-title").show() //Show section $(".openrpa-robotrdpactive-control-panel-general").show() //Show section } + //Turn on the restart PC button + if (lUACAsk(["Orchestrator","Controls","RestartPC"])) { + $(".openrpa-control-restartpc").show() //Show button + } + //Turn on the git update + restart orchestrator + if (lUACAsk(["Orchestrator","Controls","GITRestartOrchestrator"])) { + $(".openrpa-control-gitrestartorchestrator").show() //Show button + } }, dataType: "text" }); diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml index bf4274db..672fda6c 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml @@ -135,13 +135,13 @@ <i class="right arrow icon"></i> </div> </div> - <div class="ui animated button openrpa-control-restartorchestrator" onclick="mGlobal.Controller.OrchestratorGITPullRestart();" style="display: none; margin-top: 5px;"> + <div class="ui animated button openrpa-control-gitrestartorchestrator" onclick="mGlobal.Controller.OrchestratorGITPullRestart();" style="display: none; margin-top: 5px;"> <div class="visible content">Git pull + restart Orchestrator</div> <div class="hidden content"> <i class="right arrow icon"></i> </div> </div> - <div class="ui animated button openrpa-control-restartorchestrator red" onclick="mGlobal.Controller.PCRestart();" style="display: none; margin-top: 5px;"> + <div class="ui animated button openrpa-control-restartpc red" onclick="mGlobal.Controller.PCRestart();" style="display: none; margin-top: 5px;"> <div class="visible content">Restart PC</div> <div class="hidden content"> <i class="right arrow icon"></i> diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/__init__.py b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/__init__.py index c16aa7c5..a9791f74 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/__init__.py +++ b/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA/__init__.py @@ -3,7 +3,7 @@ r""" The OpenRPA package (from UnicodeLabs) """ -__version__ = 'v1.1.14' +__version__ = 'v1.1.15' __all__ = [] __author__ = 'Ivan Maslov <ivan.maslov@unicodelabs.ru>' #from .Core import Robot \ No newline at end of file diff --git a/Resources/WPy32-3720/python-3.7.2/Scripts/pip.exe b/Resources/WPy32-3720/python-3.7.2/Scripts/pip.exe index 5306633aa2b43aee86eab63309cb3f98b75bbb6b..6b1eb425cd96cf41cf68b526cc82383e72784de9 100644 GIT binary patch delta 28830 zcmd?Sdtg(=_AkDZz5=AAKnsPIw$K*JJI$j_nl=qWOTmJHLQ9pWK#dUvF+ucD0w=8! zH&t+a-~$B#Mdf&qw-2yTq@W;(97RE`M-EO6M~fg-t?b`tP12(0-0!`=Kkk3Gxc01B zv-X<Tnl&?PX2%adjXHfbYIVM<Wlg~-->Nr{Ek1JcrObA??S;I|qlkYpzEfti2=g-= zMX1Sq6XAg^*_p=?9yz%&^C>~shWRbwuuz1P?wsQynbLp!Ig2EzR23!p2L3(`N$DcF zNm8r2s5(p1r--f>Bq_pxFb>ed2CGs#om5Fuq$K6Gu}P`{UG?!QDWCXesiaNLHT`;M z>s8XqPI&o}Bqguum(<g#Q%MfuX#Z=Gq^YVk{ra#Ll_sapX^=@cWeBl<{7Zw>;fv<Z z_RN-~u0xT6H>Kf#FW7K(YUeZ*gx=JsScN*J6AB<rLNt*rQ}^k5p({`nMR2%xysW-e zwMMUbJTls+k)#bZ{YOg1_X9<3{%VcnEvhSub1GhKU2WSnMR6$+lH{VYIQbpPlH@-W zA4+h#@;i@|xLXHWy~D}(b)iJ9TcspOEe=T@-)20{??&4lZI~ioTyZfHBK|ukmK51* z`b*{Qa(`14r)vqxmD=SVO!!~OjcAuEkJnTfmAc|Kr*3~#{qT{Jb0S}XW<W`5>pe+t z)emQnMW#E-D_m<KwXUemsJLd?z!s=(3skr8J0Zw;)Yhnb;(%*CI41H`MN!=93dP&T zFC|G5g;x^UrO2e4L2ISxp%B>>iS~4>%J_+RNh*_DxD{{u52e`}J-y_XAq^2lt?sC2 zD(k9GU)~%j;_^?yW&6<qk>z|^ESnpZ=9?XoQ;sI{Ug7sZ3PQ;F^tz(REyjA=HQkf- zu6I$YfcK)4Pa$&A#UVvk+)<ES3Fc>R2<I-;#D8e0JpO#d_$IeBXgpWE_^-|XmBg?0 z5fyolN?bujRv|MV>U|`set#1(xOS%8$$$P`QL5xFd)q+pAIhk*X}nQ^=`F#@auJVe zL+%f#jrDE`y3xj84oMWkcJ+`XxfSJYOsW@xwn7jD)QwJ6f?ccRxVK1Rm93j6ro`58 z-w(lvKv9e0^|kTTzId~!rE=yBp(Or`+H&`B#j7!14R~Amzo46ymFUL4<t|sj$dDG6 zBe`5j&_KnI;wcW^Tj7$rP}g2@qI;b@L$vA@S#;GiLKWg|*S%xwidvoT#etK$HkelK zy0b$jYIH-UH=Z~v%J~em5!A?b*n2t{VZ2&Q)sDtcuAC_!4wbiDZc!5AQ4;?E$*p#) z+)_D-aw-WWGy+;`uX?pgf}@3{#w1QH8c7B2MD3T5jAUcIV>*ot|M}Jx7teJJ!$X0Z zj5CPIMVhj5sCGp~V01>Zl3*c;XX-|0#5vsy5iD`KJC!L3`WE(BOhQZ|>gD7;*y}NU zePDE1L_CP$<Q_ev9PFkFCpS!|3dILJ8CkM9A8hWc%BM~w&gECoyN!_<Q^jjL@mgi2 z`{z)ii+>hM^oYc#Ly1>HiSzl}p~Tf9@j)ae;8m6DA3YpW<LHcO5;RUp7~R6AX_Ba9 zwnlFOHHky2c#Dk6#JVRkIBe6TiB9w*dP_;jKw@1@MiZz`#gmcf+6LJb{Ka2Msp5zt zkIu*_+vq}zDhX-e1`RD#3e2s9@I;b9Z54`UA?k`8sHH|a2sxFNpvQ#_)iLzJL^QQr zP;$v3y_2OK$$rpesMmF4@tuauGU1&vJ{uikJX%{1D^cchf;5rmKuq0V5_NlG{D%^& z(6c?<8YdswEv&Lp=-wcPOPT8lk!y@7z1H8T#v8=mmDsy?V=s2<HSQPGn-txv@gG)K zS);w*>GniZdQ&*TqcPT31)`B!!AG<yO4+qDu49m!q`0N2E50K|hN*D!#~$z8%O}NB zHShgJQ9>^r(&9aTgBp3eb*>Vmmhrn{gl?l8sa5{Jr1bYBg4b{FUq%+kB?Rap0P2sD zP;m!PaqR8IA?`qQP}E^iJFyG00XO?g=bk=2<liv%Xk8^C5`9qRxAgZ!y3>MD?*9Jz z$jXUMo(m-qb=Nl5o^Dl?cyx9!qi&%(*p0G$8`FTW0b%JVi}$<0F5(PXv;Abv%iYj~ zp(29<icjtD(FAs#15+@zv6jNyLG^RB{3Jz;N9)!#A*>2$Z#o>BHTai^%V&)BwP&db z_KGIx3tDBJpRA_wYNh<4?F_rCOKSc}k$3@<V`XJUUF{WEJLlnOX~^aBGPlOn{5vVJ z356jWZaizdOaj})JKL|Z!(B3rf5U7nDi!MQ6z_MU_0wHhUD1(Gpd;NvxLZ9l)W%D@ zvcB5hzNziWxgt4~R@yGCbHp|3EL($@*VUdu(VgXibrkY4s0<nCT7`<T3R0Z>%2m=2 zt6+5PCBdXXUxDh@-Bwm1^n`zh&VeCEn=i(!>4TN6y@(+$W<Dpc4$aIV8mw9=#!r>s zm(t%m#2t5A-Iz3Ye<#<fVaw1EwO5e=hPY^lCCCPrNN7=A<V<H+L|yH9C><m@A*pU3 zMdZdhr@Bhk`0LeGzS^VSpNAa$xK(bEMY-!rkQ7HrwWpAB;N#Xn?MdoK(dGZ0Dj$)B zjtP;8;=QvWi4jL4>gY|ky3UiWDd(f9)E(MzMYPzVn}5axFH}zvx~1%*)~#@5>7aq- zWjq^`ga44ej1NG;k2SxBR=35{+RCfG+|WFz7gh^eS6mzxH;xpxBDQDjCz$>krJgW! zd~sj4DYoC(r!cJsiZrm2vHTg4nuyfkQLI*~tTt~NujzQjeZ8)z#pxpXd`GGn&cO|s ze&SG)r+TrjT@wd=1?C1!NPKn_6s|#5VCu?cuLko{4?Yly?w&IOdj4TDyQOP}`dTvc zb{+0R8TC}gc%&Qa!EIE;LIh<>;ICl?7%uU6S_~dhf;U5ps<r4`tb4u=U?Sxf8qz3n zIq#lE%}DD>OuE`~uIfkCJC71RffWCt<i;XeS124UhZ*lYKcDOi7x@kn5-KvSN<G6+ z4~%kint`nTEDSGbWnww+jbhL=OjC-JAI0Qf8Hk=SJ@n>8|GYM-oR$U1o*Lo~tefQ) zS{z>WCR&TyJU#dsyaEH)nV$e6k7o(G<Z(+ZC}*)oE}0+bDwntd6C&lVwpiDYk&>so z9Gf->Q+dfkZww!p1HNdvIMNoCm4(EfGG6LP*YJmiv%lzyWBWpKxYg?Wu#a?$x(~~f zB;B4C>Oss0PTn_<O^UN;IZ-*O-1`8Uv?xwjvw~VhbXPBKgl2?geA=7sja%WPmXN%O ze3cy?nZiHK$2wiX8~lP5jZj&8))Of=VC90v`HhB~OLnQXR`CvSYiC%ac3M#C*fPEv zGH3#NH5kLk_eQ1kyjTQs<IEYs_%gnL7`OqnMoEiIjP<_TrZ?=B(nd-R$BrG_P+v|~ zIDQ6X#O#$YdV<lm!__hJ;f<8yj>KH%pi*tzR7~q2Z;CszJYc3Y-uS14D0iQ!Ga8Ji z-XhV#s13v)(5n3h()<S_SNyUZ>b~@Q_w^aZZl~Ug151T`@IY&n99g@(h!u5DuL5c5 zM>CD}GaC4uQIfR0U@0||CyI|ipqNP<kHg6P!*CF_u(d4ezvB8Vh`+y&{Lzpj2Y!k2 zABdaCH;k5~6%(}5a+E&U*?%|<i%(Yz`>1=bsU9TBMcNf5p;YKxk=9?I2K(hgD~DAO z>o`#r+kr)?1wwq{idkC4rzk@Xpy2-cI47Tkf>us%2)#SMg%$Kj>y`dz8p%UDA7cx9 z9Jb$1OPF>QpZLdlR=#9BTVcEGjpx-cE1}`t4_Nb16>3j|GpUGq;wPEbQrVAS=@J;< zYWvXJi<22@lm??~`OmQHvef(raO-%@ecgna=3ghUU*j_k>*&Qpwgyjfqjc;T`nj8v zH`AIBq)v^}mtSIB*K6eQt@4@Lv#??-6WGLr-l<nm9pr|A)~!sGV|Qq~D7OT)+?l`v z2`N1XV^%vF9`YCCV9DynxA1bdFJY4U*%;QXr#|*6EXl)zz#7jc^c<)f&ldMwpq^IB zzUz5wr!Q_sJ39H7<Jh=febjsutLo+GR7x3Z`QRSx@4d2OoMhUZJ=j;hGSk6LDaX5a z2we!&SkI^Q5Jo7G&tn4;b5tAH^u)};3+c_x#Beq+{X8sVwZ`@vnY{*#LCXypXS=bj ziAmiXFb7lyrng-kqjh)}RPfQ`*yo8U8mw7^QEx37!j!~*J`vfw1ZL%`BV)NIp1+Lc zW4L}}f%?avSFF>|d($g;I`s+cMVu4Or>)>!QFvHVY*RLiS@^#!>Cbpy*#0F{kgz%2 ztH3~1S&KB@NMrp`_G0fIy@z4u-xE19*q5xNTUA%2!Ba_C*up;SogUka#a*~StK!&i zy^~|>Z&2pMv5Y=%7^7}bw$P^ih|)X{+E$LGY6%s(5_>u8%*g79z|2VOJ<IvGQ&?0| zfvSj&O3GDF*RiEZdC6!B%zJXgfeX=BN9!Y(dmg}s3L4wWdro1el8owaCbO922_sfh zFPy@XhfJgf%@13Wz~_SyE~XABO-os1?qgU@az@vqAc>|Z#j>N4EhtUXscyWmDOQr$ z3#Ey=FMx!FR1@v}(wY6LVN@$C{jG|pzcT*(k{G$DX=P%tTU}9;vy9I|A}bm{OqX)w z%>yuwY{mHDlfD5GlGZHKp!yhb#ha{n6BTc~?YG5)Wy`3ta#!YZhs4)l4#eQ_w_=;h zw}27LWbbtT0<eGu+wa5*Vq~fbeSM3<sROx-QWJwIwe=75H6E=Bbfwqh_(YH!rT&!V zl`f(wX3l`67IQx`r#6kp&<f9oKZkM-r_<U%F4k`GLry0l2S!1(<{kc~AN|#FD)v=! zOxy_!cNI+Cyf~?{oZn(*KPT%uwUE7X@|_de@V-4qZliAc68ZnBo9+i8+`@fAWeXO{ z-niPUp3eU3Do>REy4w2%^_%K4tL;0;M=mANagNI!I_xi44ihew%jWqo?4-xR`ztV; z54W1<g5bG>-!fa0HuB*pz@4*bYJZ>;|9mDP-kU=&A-|DgO<|+FUP4->+#1Ys@}~*4 zH7@QWM_2jtm#|?eoz(BoV3w40{Yk3OVYpb_F`Vasja(FMAuDj5tiWN|+=-MfmQ+78 zIYpoQ>kLV%Tc~oY%X#z<VmB3t!D^}z=Jq$NctSHNZviUPU=Ajg!3$%P*iR{$`I8~5 zp@RN+7cd~(@n&Qj>PYG+RN50GS8FksC-7CoB68LPW4DYm#Fi4Ne`P=;Ukbj<Mx+iK z>o-ulwek!!P&qF_D?0gPG!b^yXr(Cs2_@KFE>4Dx?&9PxUqpeclBG(2l~$rv!1M;J z3|pufQrYHIOQ#`_-%!pATG)luj0uml{Go>~qVd(3a_z&!q{AN`(!Ml#Itz8c)a8zM z#dU+IztYC~s@1$6ed_H}4p$ha*Cj1%YFeo;3rk;gn#P^Xr(=y+$&Y7-330qs5T?w- z5P(-k87lf&<4)ph1|tI{!PDZ9Ha<^t{j+nS4d+Q4{xK)arQvrG7oUK3K*crgPW)CP zxK%ux%4a+5!II23hG-i|sW!Usa4P0$2o&>FJb8L^s6UIEtlsn9XeU202qW$)9>L*o z(#aaRQ(*i#I3w4SPNCc8ak=PRps2}TeO@B|UBv$nJi<$0SQdUCcFhg^7)spy<aMkN z#GqYqF)IR9XvO*^5POB52`%*Ody$@ITGs>$&f{6NDj`l!L2RT5`Zft-)HxAko)N*Y z(+Hk_EJ_0wxk%dBhNxT=xluwKwtu6R=(SB)Ao&`jN|30+zYUbSu?>yb7}bJjr%0*3 zG3p9173_mpe*|jJ;GGRCv3?5Fo&%~RtfW40Km^+LNz|zlwkM$mMAy66-YE&of%*%G zK|&|1_`SsABH6;JmOzfOrQo@-B-oH>+p(+vJF1vN5WwGXv->lK6dwa#&hHNO2lNGw zg3c>GB=31}B(~#cMDm0`CdW~-S_<}e$G9$v<Ppt`XN*ywzLRBV_D~IHMVb9Z=t)1P zqUnuijY^>UJW6=q2$T?WI*t7%n^?W->OP<**i_7(%pBlbFdPhU6V*HsB0^nE+sgUs z@1eexVks*}ZoBHx`aN1_IsZ<Oi-T+AR>%*|aa(v#(m6R+EP5l~#FA=)Ydz-F_E$qI z#8qgN+Im6r_CN;w_kTe?;=R!L$=iZS$Rxx`>rl|y!~v$;{8*GX$f6#8jLWbk!SZxt zJQO-S1B8_A4=YM}x~9Vao8pNn<M}WZmHy478<M*edvzf2W^!*HYZ~yTmAc!Fy+x>x z;ekLhdXRb<y%GmAr40)jBP{OtwwcD`xAFD=WFHLdn*^?Kch3mVRc0!dlJG<eQ)lI? z`m@5Uq+Dp%^oFR>&@Ok^N^)MOAHZUBdIP7;C~}G3+nSCI31|&GubtGE&emj&7)YyU z{JB<J9nxF{wiH$u9>T!<jJP_$kS2JkP;u6jRfgP5gZg#XeuE)6L5n?I#0jI;!rPkI z9fR)e9%u@Y%LO@)KhVU!AC%sYW+hLWYb_a_Phgn*hazD~b@CMG_9P`?Z7~}@xDO<c z9h}+y^4EVB@$J`a`QTI}t{<G)b(^iDfK;AO2AAqys*<F)XspSjG<;_j8$M*XI_hgS zW5`^ef~?5RWISp-WmJ?4*y5uiZfTOMJl_F7<^ESBNvr^-$n!HalP1FP8P9i#X<s}2 zphoI&3Y7B~Of(~$gh1h(TFU!>3nN#bW@{1Ctd4gVaR=hFu&Y#}nud=|m#!6SwJ4f* zLRm#!er+N;&0i?JL#|wJS{Z*m9n;bC6_ZVAnb=2FX%(6Ws}jj$?yu5FmF+uq3<`%Q z+P?%l{XW>NYxqPIgZ;R*k8j1rC6QqxBhILVV_f!XvC)yMX(>}zw*?ClSRH!qYoh16 zcg3?+JWq<}74bYHo_odfG@jg!p5WK9p9<7IkGORmN%0QmL6F2!#V7Gcfm_y*SKQNC zuHA;vKWpLdw9dWKS(0p*J*RYgx*AUf)^Q{S){%!Su<j5&>(1j@W&2Q9^Cmdm>f0hN z`}Yx-6xdadK)31{(CbDvRry<|-mlZ`IiTCWJWE0oHtEQFtlMAXJ6M9aJLW)Z<gJ0) z^?1p4Q1@^eUQ;q^dyjRX^Zj*HIn-b+f<UdPU9G5ft*CEptDsYTo4@Nsj)wz&p5__n z=MkP9Ki7CNhctK+5YyEjL9)M&_%+S%3fxDL$mVSw399Bzz*h*S<xt7yClT$mj@X(X z0b1q%Ww3XszcDh@7qYJ{u#XA@=VQR#AtmzKIwCet1w|Hxy{o>nZqL!W(VCNR;!#oo zF>m%!-K1vntqI8k{w$eNNpd$KS+qlQl#ogIA7xU!`FpH4sQRsAJr({|m#5g@I^TnZ z_x-xs6G#fxN&?ngj|V*B^L^f45tqYs=ya&xnl~af)J}2DPj}#H%`$K}Vg0Q+UO3j5 zsZHKQ|KZ5+>u9a;>vQRqR%3|A5aQWPEvWd=gF-c$9fD`BPgj#Gusd}%!v(THSCc6a zudXIpAWL*LJp?jUSJRo0&7>!2S5oGQb-PI88f#DENj8Kpg=KF325Sw`5_5Hrjlk5P ztEtD+-&&@7ED2nmWqv-=6E~#6=c$BtXofWCYQK&G!-2F$p=Ts1q~D=-aQOF83rltD z+7JjAP+(mP(136m>*^kd_{Lh|i6*VNi0Cz;nR=8Mng?P?Qd;x7z}wT9>rJh19leIx z!yEMuwGmX{s(3Z+4?0G9>1wXQR$!{>vY`nZ+dwT5{hMh-O7bp>z`g%y9P05%bPcBI zXka1pq1#=L!-yhUg3yVhs<>DY<+jpro~|rC*?bm~{Twa)GfIx;5Oe&HgSwilFs9Tz zZ-ar3gBl}oSlC{^UDWDft5mxQxaX#owMP)v)o#JlDc5dc6St>!{p;<JF4p0?z^=V) z+4hVWl7u(etJ|}DB!ybM`4Sc+{>>DXn!f}7hk82s!r3T(^GRZ9U8sAk6>sQjn(*|u z&eN^hFX(sZ9&M)M+15Kf_xfA!@nrd17wBqVLRzTE%NuLSXrWIe&+M?`s(o12PLXpv zh93W67R|T1nmK5|a8*6pD!G<4jZTI%Av6%(D9n*4`W0$GVKyA0JYd5lSrD!rkd{+f zUjd^WBUeQF8#T?JAmZ&=<==Pyw~>l+hj)5Ndq`GrqE%?pvgm*aR|wtPh?Hfy@-9-A z6^EyTv7Fz7RX}B+_7GF;jQ5=hb`HrPPUK^L0?Q6$6{uZ}SL88mf!Zg4gu6!BwH6WG ze$gI^kQ(Ypr?QJQs!qIBhJ8UIR>I1s%1L=2jbXU8SA2)+NS?moF@Ymx>TRC_=Z!fd z%v@P1szx<SgvN$g1V>s2$8y^+Z?|wKpgN;MBdV-98Y&m63H4Lp$#;=$FvHL9)79LG z!A;t^il%rn>F?mh##-W-IuOi%>SY)-wEBFoy7g150=37$B@14E{Y1n+LL6sEcs7p# z6S@g+=xWE4Y>dGJX`V!C686HZ^k|xRuO48VcHN;m!?e5mrIQ*xGs#&|#z)RV&mYhS z)~y0bJ|q9?|HZm2I->4f=G>h%XC}rQ9L38mde3B~js~eL#%*W-9+vdM0x_b`cZ}#z z`VET~?0hp$79~#%j&$I3{Aq}IpH73QpZxpwvM+Y$`luSDJhGUMhU}xXA^V6LfQl)~ z7OgiUm=ZFY?zE8AyoLXSL!Z!`tyFp<YZc#DIQ9XzyzhB>iuFRBXgHi=k^1v|93F&P zSyNf*84*Gwy|V|%BAZ|zw>~6XZn%X%@t@-V>E9<txA!httR!mBE}JdyqgJ_LF%VI< zZADe;gh{Dz-pr$>W4Ux`PeSZD;uIF2XZ_52>>a8Tlu*&`$_3Yiy*+(Y-~X=bdsqIm zuKs<8z$tlpD7y&Z*FFKqW<jfzLra)F7=t8^PKigzPtwdIi&dC>r1>t?j;rAs2zz}t z|EMU-_wD=*vz*l24T%&X`F~hfg)9fI56zrG`-AsBCa3V~*BlP%3A;l&3`j3@NDBc= z0S$l;k><wj5a=dtF`vhv7Kimu9@t1{B=UjKjsjPOYL9x7+)c()Oh2@f`i>u&ZD>aK z(18-2i(ms`@E7c~uzQEzr`|A-T^O3G%3#sM@)NP|5Nl5?acenNgHB~}q>`|{h21)A zy!xvF?A2jI)O}{MPlu(bk56Y;ho!5xOlL{MHzl9_h)NH~iIaAdS^q|JJVUFK)(vTc z*xBK!YRA7>+wepy9UkStq0<NMk1e61=vG{pGC+<g=Oa$i{*F$V0%KgVSs>o?VBOTh z#*Y|Uc01AoeKTn3ef_#Or7?<7QgtO^8-!V=w|R=-k@wGD_<JzlfBkoF_dl|11xxU( z|9YFZJMQAhjz~&m$3~=MKhiQHN0r3}>Wv9IMX7W(<&axf#BrcU^<SV*P@|0=(ig?Q zosN2qKM#-kT}SoUAJ@I<$YNjXlkt9=z68&r?7Ih0!F08g$$lJBI{Nt3hoY6@T??s| zQom<svhVSQo1vRr>?#N~gp<b{M?(Z=xXR>$O(>7@+128OYF+L3sOr@yI+j!EYQIFp zjnh-O#VhIfPaYMy(L`!6O0&@-LhZ(UgvNTf-2AOk)p1pF>8Lo|1t`h_*%9O-rJ_KE znpVd?Qks~&a%I{8N@*4;1=O_FagVG@><eV&%B+Euc0m#S($RfAM1OUZe8Mp*dEcxo zmTVkwi|E%fo=8MKk++YJV5EOu6WlRP{4>xR`~|CMQ|ayQ{+*Yj$toM8h=5E{_>fMQ z!4C>0SYy0dHHdv_9IxKe!iJj0h=`HBVoFt8hO;B4{>Ary0GqL!7FN9XG+N}y5A8$* z9)@1lL~jo#FTcU?*lon1?A}J)_8WBL2TX5H^>qhDe&`qy-&>BykVYq0juPo)ICcp! zWp_Pd`~pxcYvhMcBUX@w9`Ynv*HX@u=aUss(cP59_kpppvTm8Pu}DMf!xg^P4C3x4 z$DoW|P)4}q))G^?b!Hn*VaG(?psyghzcEf3ukkjP^F>G5C3B%_Fw4$K?6I0u>J>Oh zc%m8#+Cl?wT24m7+aO_J@~7y=4(W+vb`4=q<_!0(hGsP=340M7jKu!9Trn@i5si}Y zE-2zsSCV2L4OJoM1W`H|>M447M#F~r;HN`_o>&}|c5o^u{jrpyY#@1*-W5KNLwLy_ zD{^Sw247D(oL1YY(667zO^ec>q2>ukNU&GaSUhc;25(~HY*fYsmc<8_Ej;`Xt(-?4 ze&}ZC#i+(&7mXF@|5LDrp?WQL2}7|qH&Zo)?axh8&pyx2=JwOjiMe8)KZHeE2HB`s z=8``Y71kVw1?9Yz6b~NNlT^$JkqQ=foyvJsBdEr!FapO~n9GvkJ3}R&3;%#Puh!MH zqEK=%ZjO*Eb?;$t%f&-OvDZQ|Qz-U)D3%|JJsOHRLb2tc*yvDfQ7Bdtip>qhrWH_& zc%qO?*<luE?H%7?K9!KwsNe$*vjXdg5vZakeuc7nFhVVS6yBH$J{Yr4W&0(B6qLD~ zfAl^`e8WH4bJl*oKQuobns?$RteuVZGaE4OPNPL!E73m{%BQZS6mE1-1AgjSC?Y~d zK6SZ7L^SQEaCd@3lFg(k8%Z-fJ^cm5h*Nlf0WnEfWYl{zHjuJy%%P+Wq#=z_LUx%0 zE?;63FNc(LV_2JXH3v`uQcZSmUVJ6-%Vtt$EKP;JhKm>K=Z&G_h4gb{sCYssztDx& z_MSO%0jamEehR6H$o(_r<VNVrS9z(aH}nNWa3_(j48__rq$@1hmaATQh)uDX)Qul8 z*_NrgpS@|zO3b<nX+odzQi3ZHQH+7pE$ou5pQ?rR$RF6}kuMdcoTpEx&gd$h5im&Q z{Nk-_a=y_?CehQ|wHhIRyckLjL7m|-QWA(%$tR7&VZ#!*bzQUY;2U6zXiMjvTwdXN z<0(*%L+0M8NZUURk4jd!Fu~_9c!K24oZ(4w5yrE^lC^HS*-1kkRAW6(F4A05cQAbR z6&B4J{tGrSVwk}pW{s*ro9k-Mpc^(W2l_}++)H%*z+bJ!thZkD&PvD$Z9J4u<uifl zCvi{16{(nOirIn!OBbXPUBtH+WOQ1JBCe=Dzw#3Mx*)OBDiLd1ed%%gAa#v_4Y3bU zH-E{>>`MmLUw2>`<?y%mT=WB#Q;vN95J(+<-8PkQqOqUsgM5WJm%^c@N>>vDsyl)Y zCj#0CI=CEb9VGmK!mha9hNrG32TZOu1uYsM0i(YXHLBpxog&MQOV+yDz2Fj8!uG$7 zr1G+=jR#GrhPaUT&Tp`~keuo|1_3zyopVa@g<kY%jrC6Y<CD%(kDt~{k^{xcUp%;+ zHzHCOSwu=I9~ttIJL2PCT!$cG$meBFz5_EDu8%?NKG(Y-?x%9$ym^42qfFk-%C1Nl zF3`Gn(AK!((o~Xbp%RUu${)Lxr8*MS`7JERk>_}yx*H344PJ6$VRZ(Y-?2Q2-6xhO z5&RHjQM-8(&}MrmLN%i{W7`~=3EOa)CfweNnIgh_{k<bgeZGaokIamrLvFLDh&e_M z!s)kr<mkSjRNwVp4Fb2royL!$rG@`tqCX0JAC4^Uf~2J{1*7G@h$r7Ps817028}O} zulX;e%8j@}f+O<?6J39qc~i1_RST=WDIL-`+%!BspK8(SC#OTL;<3Y8H?^>DZyHKm z-A47N@+b-CT3Ftwq>R=3VY^!^VdMr{qxPcki&N1K7a^hUA&%y?fyf>hW$yJqrM*8Y zWAL_07`Il`K=BT>7HK{I8&N$*5A<noC3eyx!5{a(;YWEQXvx@uY16wed>$F+O6!iH z==lsbnJ#7Eh-M9nN=G#Ah?#-vCR+jBFshy`UlXS#*5qwi2!~H#bj}<ehvm6A-H%YP z7F6PwW(mHI^AnEh3ksdGJoq-4=``iX(TQr~XRK3Eey104j{v9M^JcLLMYH<lAm-#B z&jRh*naZSka^cdsX}rrp<;?n&oha%%0wvPb`7vgM<hsahR*Pqbj4L2G7AnWF&`PLV zWu@`dO@S$GlwDHs1Rt3NI!Rh1(i+-ltnQG4lWY~P&UqqmzQ|ww2d!#UoHmD*grt)B z!&(tn$*;awk8btn|NJhDx60eMF7oHSzycO=#H8r`jfqM^=xoWOE%sl}So#T!+RPaZ z{tTbsg#8O%l9e*KFSSegetNq?wnMdg_?JbhJh2tZCl!1GE}6i)U+F)rS#kZ$P^j-Y z!8ucohTe7}PH%)$`Q%3Wg&DVncW6Z##q>UjRTAbS3vuM^Leq8Qb*ItCSo2h$H=ecj zO*611W74sld45c9b?_6mXH5FQP|3Z$DtPrC)S2(bRyB0O4$TO53D|>}?{DA3R3$x< z2S8ebyE7jYZXFz&;8>x&jxuNZk`y&=csWZlVuuPDbP0l=IK@_!^ofnSLD_POZ7C^J z?^dzjONPgKVGqN*+{9DNH$II`9UIr((m`#0n51N#Vh6@24EgO4=AnJCHQ`$yW4MBC zF5UX5<Ua$$tq-TT_3_&w_V(B$?T6viOn%^F_RZKHv0Fh1-}1QSW2PE6g0XQK>XsJv z*0_Pb&_P$|IkB!ZLo?)v{~KMzfuy5d4pcVZdMDkw4^v~>siQ#^M?n*Dw*WUa)3Hkp z6yfYkTe;=0xUX;mTg*lYL@WZeX>|560=8<4!xPJQilqKX3VvH=JiCRJ-E7PnhdvNT zM;<M=3!ILPVsQw_Pg$XxFr3)f=DFD$HxE}2y_J1+voR6oC>RyK4i%Z^iFd2Z$|mxE zxY(f5{_1^|Y<y`}PrCVl+a3x0ffF6=oqU4bJHet(zmvT_VKzzO-Qe8zH>Fk{Xo`-A zshlaKvW#2q?R*;sygv;Wa6e)%-qNQOyo*wKHi&^l-U~sfiu`TtGpUXf;NFxE!^T#? z*y40+IbM)sU~nHP)x-Uz!kv#RV8zQ{9A{C^*~O29Vha(3m7xdFV2*t7V;&eEhf9DF zfg-xv#xKFwBA<b&YJtZph1<m&TGEl?<7}(5kNW9(?1=Ma^_3R3aN<tw6$}ToS!eM% ziGGq@{o@?AaMFFfF}@@Zf24<Q_XoQDfhI(8NjjE)`vGg4RH{yx!^V}RWSQ`Cpy&$T zH??1sU5QOxrGNZI1zUa_EH&28ac{8IWpDpEr}D{V>YcOM>yyX!rtN||Y6dR4^x|J0 z>1e8d9bvIkGLu)5f%^?)92on5yY5ZmTM?nAUUP&^n9^$yHFY0e8z#DM2#5t5JS8`v zl|>7)N2g>Y+QWS5f{zAZa8DWEH-~*NB{TL1Y(Byrb+wVTP3b-OcCa-d=Q18OmkgiK zWQ=IkIDx+|*u@~8(8vnQGqewa61pvu(#RH+_qKN~qy>dG*a-ck8&VpcMVgK;C#vHl z&pTGH_P(xMPj7`U)NFV6oC?bx8e^=!yk95O58IIDt8>}+<xbXlYJc^rpV{!K$#9}< zeYOvqH`Uhfrq{?DhJ6d%%)jF(;;MHDzl_~RSW}*QpB<Q*sfmJFsC*kY1aEn+FXr*P zpUcG4P&s4hpU(vkbQ}x9y}b(JYzI1y1*a_A@or^;T0KC|Zh0Yz{czhqRm0fF)9z6{ z${f=liWPp;CcNV0cMWBqOkbq_Y6u%M<7;*1cPwjWwi-w7x6K@>P8`l&m^oOTF`Ruc zGgX~7d`EDmTcy4>m{rWaAM#Gj{#0E*hP`sTSM?_AJ7<Kt_ZT)|&QNu*m@S=ii+bG) z?D(9Ch5qM-`|d}tL;B%9hm_)h+YWF9@Cl$d=wlZ<{&))g)D-j}&GNYA^ER74xA&Y@ zXr5Tc&?2AIkmgm)C0ZxXr#TrT6?YzJn2Kw_nBy*_VvN?&J;Jo#JJt$YNJaDTLWhb5 z-wLVd$+y@WYui=yP?-2C!myT=0{xSU{;)PtmBI}32E^WSgR9~#(nW0pC?Q>}n!+s4 zX0VLE^-3R$dD~y4QTy#mrB~-K9PEvA7s?Gh8}v%r9{5hOHP6P!gpaz}1Am*+?-4W| zABWjAbTlp0#fE5DUB24Ge*D{gN!vQ1zM<1%S@>x0s_&%RU86tQX(X$Dqo4YHdB^%U z`l!_(yv5$#b`kyd>YK*MHT~cRZhCWd<ndG>p6yF?9lmuZe+T<u_S^Q`Bl8KFz2kB9 zN1wBwcL=@6+9~wnuAM?Jp5B>8dhyoIWvWQIFPLq+tB}J>yEm%~cCfK~rm4T)&R*G5 z9O=a6b$F-K)c0+NOZpCV=k09i-Zb@1YuF2W=MGAEjC7=KtTfH<kfyD0NRI-V0p`Ct zq>+G!0n3oa9-Wsm=k(T&Cb%Cc8bt9mU%+ssi3^R?hi|{ZF{Oi}0UQ<Y5wq><#R>e; z8$8)Sn;%0m9m3$o@f)j|u4+!#iKS!#aAVn<%+*`j@+$Mqq3ij&+BIMnpUU>(dtdJ; z&22zKrKv%>JYEZL^Ve8aD?Y0?ZgD)XyTPh|{qZ{<9Xi*5EhW~iZE|!4oLNq}fDCn6 znH#4-Wy)uPqE=8xy46np<|+2}9sSf&Df{w{0bT8nLTQ3QEb3%4AI`eDvXcg4oq^c| z9!9zqi5Vq|2E{y)KHZh*yMpxsQX?>GMwVCLThjH=N7}u4$NC#0>rBgV)Gc}6S0E3A z<b^t<W)dp-)8Trc8<#57$yi@`8`gtx8X*(DL~s!*u887FbZ9sC+ZKHNLT<AJ3ENeC z`*YOBuDCMoT~LP54kH$6Z8WGGXWllW0oPTy)Q=il8@$kQ9R)X=y`w{xz~kJ2-A%=v zDZX)_xK+5qIFgcZRnwDP&dr3wiBJlOsF0W<$zZ@rU{|?j6#C-7Y(wQ?7zZ2_dvKNd z9Lg(f2&{^@<{0NX(jE1?40?yD*o26`2-8^rNO&QH1wsv4rS|L+jSbr~F~L<6AJ}g1 zVB=9h|I>Q(`wYbjxu;{Uq9`hlwv7LSEI-`?)$zD3EbiH4?JB$)vLz!%vuEAOYFTD` z-M94n3ac$_^5LFz4Ol}Cl^WE&-6vemw>Ip^x@)v54&5esqxi=#GEkNC!`agLiM|!x zkaL|UT17|T-tXO!V=DLotUhSp7APQ{M6-NI?D$YHXe{SWys+F}P+2!s<>^l!4>NFs zHwMa7SdQ)Zlw{F)X!|iZ6^yvN9IA9g5dwH&fK-WON&bPPN+bSLmncWxTd)!OXI*-J z6T9i|N_CI1Y~9`8W)3ew(UEKHBheUMN|AP@s#u8u`be~h1Y>!Z61MrCaYK*^Dt^m5 z;cwOU$K{b@VrAcW8|^J3p_d!zb|`k^2G(c6X!YR_*sKK^8oJsUz%@^{YC#{hbct<Q zFhH%j#7-{gtIqv^{kR})xZxq{BX1i%y%ic<)E1=Aa0w^vxs5I>mhY9~MoRnfU<C_P zG#ZNFfk-m*E!1E|uySGl9Qav+^YCp_D^xd;1`4hd<NNZ2c;LpCki(V2I1#5Wg_H#C zCDybsD;}?2`!ryt%a$`{B1?bfLO2KR9i=`qhRweBrk*Vr-GM}ZSA~B9EVR<Oe?5Ea z-eWzxb|hVTu|4U@7uj=*2B{|-*x^Mto4>T74?cvS7aunC;~sE9p93kp0$8Jz2!-dR zK=FadECm5B?H43pV#W8l>2roZ&b-&#*P36wfHvdX@uX|ct<6=DWNVwq8zDyQXgTM< zM`&pF4$FX#ov$S+;x#eQuX_K+XM)ry#iB^T_5M~i!jstJUJHhMw0sh){{z29$?;FZ za@^`k#S-d4&#<`r$U#jG2itGn-u$a|Y>y{hU2uY(^CZTfBHcqugXbW8ev*>I<G<aZ z@pe)T`_V)xaa|62j^R2s621Z<{UAs-9JD`r9qkxVS9IQql(WZ~eep!~y(YG1aS!!0 z6MK2_(7ZRE3%4{rglL|r!a;~~9Ro9|!mY+DgK^7H<g2SWh&9>aqU-9QU1SsY4b$GA zqP`=O4ZXiVYH-W_{qVf^ehZ$j-Jhv0ZDAkZpW;h~f}pQDmiDhai%Lf0;~q4@FHy3# zj&?2B3Du2OMJ`Xfjw};#`?0cl!*Afrkw3b@6zaV|WLFuzH04)(XH0*5)iW^YaANT8 zWc49k+=r!a0~7IOAiHNtjJ6A?AwB#1Gi=3@S!(OE?As;jI^=vK+nr{X@W5QQ{DHN- zH9hb-zqpCH0lg<V{TJzLce?JF{ow1JEgN9dun0eQ6T9%>0JZl4*7YyOt`1Pc7dMIV zmcJ~{B$78yo~V^4X{{6Sa|L_Rx>7^g#CKY-pb2&n2>bpo{e93=`aVL(7d_Q(zsXBl zTMyd}R~u}1cV5wv#D9GfHv6mpZbRLEG@`%B5q9(WFUFxL?T5p(0J&PMbyVnT$z@v3 z$6-fV`3`;GlS!vN!^^pqC@6@he>oop9BbC{@V=7w{fNzfHFa<sS=TUoO9&V2=Yju` z{q%9R3+y}o!2Ze~*q{3kcKJBF{%VGLjG1M;mZSc)flYgD#DwQD)^QZre%8KmF)hS| zvq(|)NHE%Ps$+}>9wr>N-LGUVx_}jcsI>ox%6QZol?;=L@8WUF&i=jfQMUUvv-+R+ zvwy!fM{R0hlU_HgyD%0&fLJ|(GmF`^*GDtQ)@P$bt;f1A9Ycw?Egjplb3Ih@KRzxi zzn{Im)Iz?ED@(B!dXLSjPEbFynB8A}bAbk`4~tU49S;cOaE0drf%aC`%{vckjG7sb z^RxGcss%3(vV>yMDlf*vlxjO{v3*%T%6|5;!k%6C|9AGcdfDz}De5m8*{93;n-)@Q z6t$M|&P&?!x!94<8w>vz`Fyd64fT!o^;raCxmd%yi%`oui!hF>MHtV2MH!XFiTo!D zb9hih)A+X{%n(&j7H9EKM0_a!P=tEkAVL#=M}*0IhX^hFbrI(CjR^gR9ExwTgRd2| z(R{TCOZZ9=mh$Bybn+!4oXi)Ba4L6+a2lT_!dZN(2<LLA2wl8Hg!8#WgbTSvgdVOJ z;S!!D!lgV7p^pl&dpQOt?@r|1v~c1v0$oXH8`@6Uy^7FF0$ok$1%W<6=vjfTCG?~~ zpC|NvfvzX?ZGmng^zQ=QLg*%ezKaWN5`RYE^@Khu&_jg!1$u<gB?3K0=-og?nNLz| zrbwW34nA3+X9yiD&~t>^1=>WYQK07u%@QaF>f?O{ev!!C1$vs$7=gAB+O~jN;0mFa z1lmgI1%b8^dRCwkEDJv=&`3hx7pR8Nw}FZ>Ybmx>B*YQAVS$fi#}odPASV(k3pAO~ z<pND3)GN>oLfrz*B6OxehY~tjpn5{b3e-fXU7!|1jRMUlG)tfk722Qo75Hc(cNb_0 zp)o*3p-U<D%RN+3C!s$GbTXme3Un%=X9PNp(BlG~Md(4G*sH-K)3JZRWtvJk+7@Mj z<?_V%ocu-b;1C+0YUc}*lh=hPm;fAM%3nehy)DWWrraqgi%mB3d_gJa6-4=SPP!Vp zfdXHtEoyZ5z3h;rI9pT+QGD>PrG`lHwy4rDQ*4NmXp3@&DZkw<3Ylz+njEGyhbU>b zsHtJfXCX?4Eoxer@_vY7u|+KmQ+9+X`Iy@TWwMiR2$3ANs3l>h$3m3Rwy33HN_B`* zVvAZHrraeci%V_h8bK-Nl|&J~FJ`ToqaA{kduZlAv65}68La+!B|Ba7nGsVeu8mcc z`@hrqFX(L7XUz0QHM{h<*xML^#`4A=Xjc3G8R7p$vuM!e%}U0T(suS$ZO@eWH!=5T z)WvOgNUIS(f#~WCJe&Z}o2>go1J&TPJ(NMg?1z#wC2+!ba1H6_0p|dx0mlG`0Pg~J zzR6yCs3e{^WrZXs0x|%W?d<wPb7P<1K?&DBM}DO{*n)?9_gs%e#3*6)HsGfLZQI!k z52vc1-@)E{IE#YjhxM7nf!v)4rUGUGT!4juC4l7swu9MMmc)lL*$To=z#+hy9qiea z<!pIilr{_NA%4CF4xcF-*oi<|@iBa&!+!^%!R6SAvWR(jV;1Jx4(H~^p@?nN)5Am$ z3m)MCK&x_m063DF9~qiJiJ_}1ba-7tIazAh;ztJe?S#z$eS35U=Wp@k@21hVpTuq$ zYGeBz88FX-!^5z@sTa`TTl-1c26_a0?x(yr&?DG+KjprG9*srkA%#x$>G}Y^c*6$@ z)lE23{uESeQIj_Yho|Q^<68?HT47i`_IFkyTZcK|$HEtLa_ndz5wR4tjz3r3TONPz zuA^GHVp?EyH;qzsF8C!sypFvg4_BwX!M>7*+Q0iN?UCuHEl}%?4wT?VBWxUfBZ}Yn z37xxFC}ui@r&|#?bT3)ePhC5N-L<N}Pk9xZO(zpQ>qcQ|zsQacQQCNOnCcmEKv6sE zOnbw9A!Oit*#dFJ;xqbnp2%|LvyC;7jgpX$vVA}3#!uexDMAk277Fzxy)T&Ivx;&j zPMy)e0mo+a!v(l5=%QX)y#o5M1s52^^(3r^+t`}qUYe~SH}Hoyu<>=tKC!0BxUmRD z&ShwG{1QQV1zZWd8G8@^yo>nIt^`ei?%>{P*G@DpemUt9v>qSn>uP9{kPA+uyIfn4 z1m6?g?f#W)nz%sX#GRJ4l(3P6D4$WhM#RfXI|wd8aJI)cP6dX&dYsD{5{Z{KP6llQ zJ>>B`)ETw|o-x)~;`J1Q17BC#*o;T5IV9Is#65#Rgp~fu$w1LXAiXzG4wRw`rDzoI zMUjYWEjD)i(e$jE*&WL;IxYIsce3|jA`Drp?mm|ISg|_U$7Vfd)qb<AonE_)ZGOzJ zp0tb&UOgZUHg9^v2Kt^g5KB*dYSs&^;UV#MHCwp4r_VYUdNUnYf`tDOXI%KG7-gu$ zaq0MqpdS6JDyE+;!=^I)^CcP7^mL)9{U<ma<@{eS;T6=dum5_};t$LDKPb-ER#R?T zbVbNH#UBPihFgIA;P~|<#CqZfPlEsC3;vXlTFsKyWcJJZ!|TD3y1$Q)!w+3uvQF1{ zx==B<4`8#`q^F(TMboF2Jmu}5itjxSmn}Eo5+&`mHmxxY*|ii-1q*);LAd&Mpky`V zXef$sv0X2#OQxF$5sw=!6_lDL@%$SveEk=ATKj_g>SzlN_;hr$hVBhL6E8N{<$M(` z<*|Dn@1?F^%N~BbuX^Yu_VVK?>a0ub@Z)z!p5RE$WdkD12${{&B1aHX%SMosW;YwC zKGVX!#;+eDusktKy)~UZ`@|5nFP$|$@g)&oe)0?j%b)s)UT{7=f?_M4j#oQc*warJ zQtZsreJOVB=>f^pu^S2>0L%j#&<6{ZU87M#e&=FlU7HcxS1`c&CGpI~%)K@>Ggv{Y z=Z<r&6jm1L9S+21Z>Tvw^8Vqh;b55f3c_$!<ADCBw@{tjjcK0Aj2(Z2ZHku;8+->q z2{{PocEeVVvfe^j&jmf4bq}uYc#V2%1Iqgcmt}YUt*0aNhkypZxN#PTfqFUQ6p&-E z^^L~XSLOMrj8Bb2X0S?joQ7TnS>?$t<7aVsk^HcFC^w8~mynK<KNHtl&%<GU9{09* z09oLga6rT7dxVdZgAl5M9EbA~YohCKSOa3aZdbfr>7+y_f4HrXPx>1RJlkKKrcLR$ z6Yb1B_lcvC-@_+q^AHUyrsh7@|G7-f{cyPd*GU?C?)E>Onbf@t0iiP!4V{_9J)f@D zeZ0f`e4;wC`b((S#20$27re^mzmTRL|0;X*h5h0ztv9QBaf<q1UD>`D|8$^s;B)3& ze@`dN=O`6y=r}fEMt|14evf+D6YQl8;tb^<8^UL6{fE#GCI0!FkmdV+1g+zfkamW& z;Vd{aZAa}(C923tXHcq-H!c=uaD&*ro0h6CzQlgqbesCImssiMvFa|J*wdR=sdsN+ znJ=fRuieawU!F7Q!?C2ASgJgI1UKmq<0d_zf1^XX2k;=E9`F;=*w-%?`!=XCB2VB) zT^haJ_>?=^6+~{7KMRf%=Ok?t@#7rmZ91HzlZr-siY$y0Ojsu;AA@%)gD3tczuxcI zqsvop#UG<3-gSgjb~N4~zvJ|Cf$2U>V8UOM%r91j#?8kFLqkMvkUt4&@eSuqAATsz z_i6_pF9jO>Sf-!ysmm8$FkBxJM}x9Xx4$$y(*KRhbJ)pKP}x8E;*(kWmcFrLZb-VN zijCir(tkH7;`b<Sr8h#04)GOi=;H&vzK+#xu@5h=fLp0D<W{<GqR3xl++GMB4k!Kj zT@|lcTpgSJR(i6DCWM$ka<sAjwqQ>_ZC-n2`P_MI{aa~6v7ElIOr~E9k}shSf4qwj zIzPzq=j#=ZUP!$7R*Ekq(XWlhb=6=3{}gjn*us1T6yD(@$_;BDZtX;V5NTwzU+G{6 z-=ExMefSGu9)AZ99{@D?k5ElR{>BpWJ~rXETr@Ba$K^<Tq>W__r;gqLX8d@LxVt89 zv9(`d>sUMDiq8y<*Nh7~FRW%CzulAWt2MtplpM@;qb*u!oeNq=zZZ5pWHlJiLL8W9 zJqDfYSPK5P^A4U!dN+KtDAA>fM`_N^L5=Y{JLJ<m=kCG{Bn0M5L$-1@7Dk{o5iL;7 zo_MEQ50n8vWmoeStSCNmkR$hDy4(9sO3Ks3V>~+w$9eu`_!ZqGajUAXm~uq%ue_6< zONG8#jP^*vi2;?uf7lW9%hhLOZxf0?<54QUzxtfyy>J+8@ZS$Hk@yMkruA_@f<zn+ zHDP$ouG4~yC5cMh5?uK%{g%$*cL#J{cRN}G<%J$zefMCD1xmGXei3_bpChRS&ic^8 z=Lul);UE?yiR1_CUc~zB&q{b4K3*&V_-3Sre+KH=S*&7ziuOTzvp@Y#T*@rAV*hw? zAhJK1X2~D+r>Yy*GTnRDp3|xDmH1)4BK&^HvMyLTC-Sk6vKj9stMf0hrSD~SrGvs? zMg>w}`ggFuzc-*ekq)$~ubrXo^x+{<^9}5~_lCqDeh4LOL2bQlSUb_rb2P4C1M2Ou z@(e0mPnvLt&tUh~_g3FGlC7#w8bgAx`Nh-Qnde~o*eEE6y-URJci{ep$B19`7{OQU zM4C9`d~OF>=m+rYDgyCL;1M&}5A|syYHLxGjdT=+H@*nyt)U;9>EVv#Zjlrc?RhVt zmvP$43J#1=j~>bHIWREGz8$YdG&p%480kFCcHMI;_7Jp<=pRn2;E$HEeFp{&ng#`c zp6UGKuPD52T6?os@HZWhV)eFqU?JN0MhDX!9NvpE<$8!kC(*=W4pov(J~%8!q+P6G zD-WjhCcOFc>p1YEKmOEh;DeOQh3vh9DeCbK_Vqz;O41crfFdrAFalF9lC!yzE?ZI8 zD3{ry21D9pln$CZ7Xf}38s+T8Esz!d$(mueu|o|;we~CaOT+NgFz+nz8qYRAMBQ`b zR+Q8d<?X^>zm*jq%2Y>QW_KS-9x)X>*Ur$-1eZL69|2B37?^T}?s^R3rFbKBmcu6l z4J~om-a{$sXMSR54h@5fN5B83-BQ`10%xa^N>7FA#7@JWAb{UU4e$KIci5b3sG99} z*n0D}sqEFpB=y%*+2O{~S&Ir$#CrU=0dA;O3B2Qm+OG%D1E@1@sNMb(GaMPAF8e2& zbHvyigYJ(v)Dp^Q{W_8V*24aNWU%_|H|*?@42@gO*F6XUJ3f6sO68+_(ipQEcTc_! zXyw(Y_1H(KJ>BplxmaC0uo-@3vglS|26(S=bWWQ(`X%lJURmBz-L@&<B4hlgorDt& z+Foma!8VA#j)PBfIc}hcpUr9PqjiWCndhHXu=DKqBfahWa1$x~!$e1|q^fiP^_wRZ zqH!3XP%6rU28fg!gWAv`kQ&*G9qn`+9US#!4DC#NNIHAkNEtPRvV$gR>3Aasx`X4X zD@SjNSli;*GjfjVJC*NHf=W6CI1l&<pzW!WQUC@(31BW@IpA@?7QlYM7l0N(WG|Hz z56A%I1111|!iVZjfRljTfGwcy1k?l009pWQcsk<&gA?I{v;(;numrFQuoG|ya2n7A z_yO=MpnGq;05Aa@fJuNEfcbz20X2ZNfGvRifRli8fFA&DfUbR1(g474fE_RnFs+YI zB`pTB8t^jUUBIV+3xGC2Pt+g-U;^9(m;{&$SOQoH*aWx^pugp)nH#VQ@SlJCQXSHZ zPKPvSk3)L?Gi>SCI6^G=QbDT!77M;K2Roqdn-cz|dkJe9X*^)94z>CPKP!*!EE%z5 z`*C%wZ`O4O{ThO#K}eTBxBc#rNRA63_5e~GLTo&wZEX&zJJAvLAUeWCgzJC99WR7V zgxF9@O}{#%B!mkQ(#9zbVG7ZIaY(5M6A`8%+}7%#D<MOq`m1oHfv_Io0|;vnK8VnT zunplXgbG5()g3d=p6G0mW=kHxd`OumEyBMAfHdh&=^i}iBgG4B4%Tawng?2@bRX4d z$Dj+jswog8O##Df>2@&BL$U_~((u9}#7H#J77DR*fZvCIX%Kmz$lwm(10ire;se9N zfwS80ZeeG>Ywer=Uj>*c&BdFGB)c?i$8nyePU}BXYSF>}g>p<P8(lW(a{bGD+`~#% zv`(I!^N#ekiX{YX)Xw5>wG|D86FkR;HT`<Bg~7y#wMh<kSFo@8@nrT$FjX~xy@uyN z_CYXJJy@hfyfrKo4jmEVyhqQ*UF;M6ezrsUJe#>MrbJxM38g$y%3edX${C7&U&g+_ zn4(L-*FczEsL7>_DGv7I#ok>%E5AYeww&4j-ABFTHj<?tbvwz>9iQV61*XR{a~y0z zu(#Sdk2qBEO!Y&mdWTQ2f4`d8)depit3J`e^$@$B&@Qv!0-X!MA1nNK?}+icLxuWa zk9coag9siyBuZR=N+g!Y|3kdcQQRZpVz(Fn^G`$RBsBr}d^#Tq|4pNT)gnndVW!&u zo(HYu5}Nw|bwdU(@&A28#(ru!7&k*zykODYvpwVI-FM&YJLXle?U%k7AxqBL9@nIK zcQ05xulUaS^U4;^n^PebsmeTy@ZZ=92}6xIMlHDe_B-#Hx8up5?uue3)oq>Y;bEn} z`r;j(TT?Zvj3)=HuKP-Uc1WWE4nRJ@0x$vefGj{7AQ7MiNPyNW4(TF*1I`1Q0OtT_ z0H*;b0mlGG0EYnefOi2q0b2m;0c!!ozxs;9C#?js9N+=W2h0LY1~>tw00&?wAPs<D zLzCJyFcI-MfCeA|S}!}K761pF0UQDB1#AMW1}q262h0Ub155^#UY>{A1IY)N07C&8 zfMh^CKnrO52|@wq04D*&e+c1Tz!tz0fEvI;z%;;QKq<fihy%1;ay<QBgeu|b^=j4F zk%K~=KpjDU^i}U4|NhVMrw2!<`WxsRul;X}YNVu(9w}{$87a{>#q>!h2DkXz8WF;m zcg7@VXUl($+*2Q+N>lrAl~mFL=;hbIlorKkX6pDeeuL%f{|{(?0YCL7{_iV#6fDj5 ze*}x<3;&t$LIz&=pMYvgeNGu5-1Bs_>Yr-g?J9|5raljapud*6Dk%%8?eWAaEJrX? zx5w-6!KZFfH{w|yq-WlUpL_=KxEt{$uYn)3+uv_`199Zj9#@zAeb29*RGpo^GoW_X zkJR6)_vl~OAJU)KU(=^%56T{ytueR^4;Vf)XpPCnLB=xUOydIMX5-t&!^W?T7maFD zswu}b-n7u<H9c+GYC3AVVEWk<VNNv<G8daG%=ef*=4Iwb%+Hu#FmE&OH#eAj<qXcL z%xTE^DCftV*j#;XAouCq{l45Eb6azj+<40f%P7l(mKQ8<ST0)zSm#@P*2k?It%t1K z+BNU_ytnf{%lj^GuB}J@8~Ho)59d1yN(!bIxC#PxL*eYgI|>&U?ke10c&6~X!YhTS zQJN%8Mj+|q^u6>0^ag#QevaO&_v_c{U(z4Z|EBMp-6K0UyDHmPWcbYd*PN?4zvrap z4#<5o_h{}&>n`h*yuSIx`Qr*cC^%K{ZNZNPG4|PZsGCHW2{iiU`px?7`uFr7>Oa$e zss9lzrp_LlJt2E)_T$-4XTOlWDZ95J)sSfzV#qe+8tjH5L#d(6aI0Z9T6CddiD9{6 zmElRlvxfDC*L;Q@hW&;Q3?~d<8_pZPH?$a{jj?F(&rMy-gUw^jTg@lTm&|QuL(WY( z^K+_m9?4mgvms|k&fXkNZlBy=b0aO?EVo$7Epse)T3)sswEWA`YRR@vu)btnnpcB% z+mqMTX10yAm7?A1^Q#N0>|Q(lI=JM(c`gFq-?Cc_d8QSnznM;$Hs$oT_O<r2W?BbX zhgq|&X6tCH)B2S4dF#toTi%kq$MO_gT>iNHiu~F63-Xub|22Pi{(<}t^Uvr1kgqDp zESOfXreIS+ygl3gpuO3yDXcCG6h2@0M&Z%Ij|)F9Y$=qSl2igM@1)Pv57(RY1^SzO z`ic4_`VDB#!}{*ov$J2#{yn?i@PnbsxZJqL_<`|PqskO(>H%fTGz~YIOa-Q~rdv!E zrs<|TO$$s*O%It?nVvH3H61ryHW|%U^BnV?<^|?j^ExP8LC&lkcg{;WujO3M`6Wk} z8=sq;+dp?u?ugu+-2B|pxi{xJbA3~DXXoCXyD0bm-0IvFxew>A%6%Mq`Eu^Nxd(GU z$UTw!S?;;q^SRBrEl|=(OJ_?Di_v1WOti#Vds<Vi1FR+1eb)D_XRO~sQGc~Y=5@-; z%o~wcl;_NI=Xvw0^Xl?g-ln`Sp}euSc-wGv#w?pUKOui${yq5*<*)YTzn;G%|E>JS z{NM651$_%96x>y?r{G@&T6=GMs(q||0s8Aj`+0lNE)|X`oLIQD@QK3hg`XBmvm|L1 z`Y&3as4v#vsb8Rf2Achb{$KhZ^uOwj+4<Qe*|ph^W<Q&~Bl~prm)VhqZib$Q41;3m zZyaWH8ecSCF#g+k&FJf7N)ruRVVYsO-z1yXn4X0$d~E7wo`+^UjIk=`wpgxNqVkgS zwD=gLnh$SOEj5G=SIg&SQsq40-7ETN?m6lSP?hW;UaQ}zRfWMiIj7JkR@gz<Z$ zlJj$Jz<SKuYVDo(yKRcS(moxdX`cN~``z|?efF2_JM5p>|7}l2{iNmCTR>2@J~`Wj z;WXQ{+1!+qo_kB~y}7qq-?tsL9k+dK`-kmw+n2U)Y+AI_HE8rk`*!=!XrIo7NreLn zZz^0?xV5mk@G{wf)!1Sn=mKS{(m$%dGkZbyLBq#}PR1U_$uMYl8hsBMYmG(ba`O!H zT%kH=&EJ@RH)rPDi@~-EgKc}xo}4dpq}<rt-nsptJm%aPxm74mlJz-jbYAZ~OWwG= z>3IwC{+@RtZ&ShH0;OPpz1Z%w-(g>6|GRw}6=FR$NC;f|yY)W(I{lmaJ^G>9YqHm6 zCm2Tf3>FyK=MB3JpBa8J++>uE`KFspC8nvSIi@Gg5jiQjcjQWzla_JTyOG_cf(Uzp zeW2ZHpJ~6>?zcZ_f5rZm{X_eg_8;x4!XAa0g}IdBHc4!z-qv5#M`m|{rAy3C&Q61g zT993r{Y3WK?B}!B!$@t(-kR;(mi=qCWQa6q3|hk?L$zV8;YGtH!xqC<!*Rnk!yu#H zXfj%i`9_Cvw6VlkYJAH0ym7s8lW~i2l<7g!a#O8|nbw-tnO-t&H+^IJ&h(=x$80lu z%@3Mam>)9NnIAW=H>+|w=fvf#&3OfFxj*M{PEM{3!>=rNaqeGyxyv!!T5@|>QY;x3 z6Nc6}%OuNG%k37IWszlx<*ydm@|5K{%O=bVZ&|Kbx>~cWTl040y_Ki4CD{7errH+T z6x$4#pMC}1?0I%(m+B>HG#WvrKdXN|`&{-<*+#=8!;gl3#<9i*=zOec6sD~h^K0fK z<{!+yNiY$sbGBGt%+u%Jm;WM+<VG@&C$WJ>phM&J%1+8o%}&oAm^~!>MKsy_+23ZT z8pc44(v3OBF~<4ES1>D{GX7*NGTjY>^b#y-8!|APN102_t1$n4VOGrA98*qJ&OJF> zbJpi>&fRS}ZMoeVu<r9&k6RzhdopiB-fMaL^4jt=wqCY$+efy{{Ji`-^B>J$Rlo{# z_MYhN7wi}9y9%WythrFIqxzHjNrq?8S${LUZTQ@<&A8k6j`0AB(bGgb9U6{vvt8Nq zv;V&p7?v0|7`7PhF+5;+#O{pU6}vmY^5l)(7rQ@pEcVm4Br=NSq}b<x(habE0-Boc zv7cbiAi&5F0c2|!co<|DbQr7v)|xL2I1E(`HGm~chG7RV^_&2fEFTR27)k)saD-6` zuv%MSbin9=5d*OPQUE8J1Yjw?09Xt>FlGQ2Gdd<f0~Mz0B{ANxVz6Ve<FMng6R;Dp zldzMqQ?OI9v#_(VbFg!<^RV*)W}pze2)h`&47(h=0=p8s3cDJ+2D=u!4m;53Qw7_! zEJjJ*wp8Z8>g@}7&jQUoJ6#QUFe|5Ca!#gRZenKM^olIT31})rvl$K8z5x|8FaQ8c CCr|AF delta 25687 zcmd_Sdt8*&_CNm2!);U$R8Z~=h>C(b%rL_Yb74$CG|@p6q!i5yO=M|0V`kcD1LZhU z>ejT5njW-c9W~2)siBx)Y1&P#%*f2PjTM~?Ny+E;-g^eD^Z8z0zt`*c-w$2SUVH8J z?CaWVuf6tu=CjtI56=g!G|JjmnTAhY|Hs%Nm)0jQ?tro94=w&0@y4L#i~r=|dy7Bi zp=a^G5q{qM_Tn!PURtkM{5+?>=;wdeA0Fo6xJRbjSe~nrqgS=a$G!9DXAcPezh*&{ zb(3`!geDL+`G~I!Mi>ETW^s!T7X=|mkTF7MDAXe!C<vNg$tYR>?sEcUf{~?LWx`hL zs^ngwyJdo<iy*B0RuJM>CCBwXc$a6#czl1&f-qXPDmk|25c|vpcF=N8;=zh;BDZCU z-Mgj(Z3bijPLuv(Qr8HS$r#!MGFD8?L|MW(MCAzo@@GLoL*~w$HG8@sywM#As0x4W zfjyhV2V|=QgM(4=E!92A{J{QMpXUpL>qKO2P5T{5vf46kTLk(<7-;ntE6s>xiJ?_8 zDN1Oz2%e&L^%*L3A?E_4A`^!R)?(^K0pO^qx5pi-Pb7Z?_RTG`jYcN5`R!`SHZ>hw z?#1oy#Vzz<bcgzsp)q{TG23`>6jOyHNmXK*<Y=d*(SpFjDSgPKpg3nqDX49XFu<4a zZXg;_f{HlgRMU&`$~`SqjSO5T6oy87f~Tb_sJwRZXTNRpES9G?2IRNB>}hdNxN&_c zdf+HJAh47k>m~^9{F|P98YBoUcgo%QZHC6h0q(_Z9i<)W!>i#eWhLFDr+c#KO_cP( zT}kq^sOt^4!q?W@=Aim+#|<kT$;kOP((`Xt1)<1tFu&}p1S^}KkqZK<<2sR%?zo!n zXiop_DN)^8C6uz3jq#O*MwQX&D2Qi&GOqsCMVoUC3b4`}f3oJK*5;X|iU+;jD?Jg1 zb|azE(8C^5XlR`Gvp2w<-@@{$h{5~$E#*^AHB9i_aNP(ktr~(ZQn$JtZ8RCRC@;ql zC6wB1gNF$=Cb+1Ayi|&kV_1=5FYrE5X4{L*Yx8efYn}P+R@%txchkd%;l6M#CWgC? zk+u14*7r_;l?|L%@s~WUf6dbo$r#H@X$Xc3B4Rk{_{<xiZY^N--5-J~P4S$p;Pox_ zv`A5rsEMn;P$gE$s+>Y8%gZW7<uS2qscChDN>N6}(H88G72w6myrGNbf}`gan`Rhm zM3*W7Z^VO3U2pNFUu|1@;}Qe_(mqU3pKw>FUPR23FNc7srLr=2UTVA)mBk9VSeus` zVXc~jU;$CegPj?mBG*vYqSS~vjv(ae71i~&(`dPsj%$~sr7KdGAvZzEx|hFbw^ns2 zk)lHJrtJpj8w9?pyoyrfnpI|{Uyb42=(eY(c{Dn(Rkcdztw@|fZ$gwB15-oqu<U=k zBgIL`^mkvXjb8Ajrt#GC<ea>>GmqnkFm1sovt{(M2#tBElbE8CqAbW8*+#RxGpuE0 zWstHm$&N}$Q4vT38flx#b9fRtc)*1*0yVmh^(Gpj@NuDg3RyiRqydli<r`s@R6L%F zxNRBFN6Bjfwg{iLI1NcGMn8K@dEZ)ucX!;ekds|fMMng=(Iq|hAl@mV7RZ76R82i} zLW%7nNX2w3$?O^x8v{MZWwxW=OKkD&tY;i#YS*Z^A0dt`+3`cAD_8K`vd4rU2(FS_ z>Br$@W!FB=gZ^~8TwP!34tAXs%V-6Lq6Cv`Jo1T?s)UJ4e_*o#^Vv%O{DaN3h)S0s z#nB(HxXdXf;cMz`%XpdJAk~-8iBM$aY7kwG5CtC>)WJE<bqHLj1-h{^33Ah)RVTBf z9?|@LZ+AwF4<}Q*rQV+!&>@%ayYt(b#)}F>Z&tc=DfYlBg*T`w#Z@0zUTmdDz{y)1 z+E~+!Ry0Ahys5QwL~jq4-_GvjY_}*UOYy16qfG6VRzVEXc!nkxPqEA0`zUz4VU0B` zoCczcik`JBqCQpW4!y6orUkh5WKccR?sO(v<Y`ma*GT9j$fn{?HUf2Rkht3CnL<{# zmV3?`+U7+q@3DkT4UIMGIL9o;QC?D8a}x^8dNNo@zg1dRCAVGwlZmDhbz-_s{gvT2 zrc7t>cXVxJe`u<DBbFXM?s9Z=r9RQ|1Mla)ww2(*h*)XMEiUll6klh~3L{}*G0ysq z<Rd)UmuBq9tZTq6j7+7W!9i<lZm=enx;L_rK8lCC<|>}*di_Qg+HSI}m{80`R*cFR zZHl{o>LgG-{0-NM=-NUNS~;N6S!wvx(VBkj<2Fx=hnLWi(wX3{xrBT?yzBo*qNmBj zN4D0A{GtOID{H<&cE>+%3up?cW$%^M+B4;)^awh=b3|;DP$w%r-6}~%(mpo)5+7h` zAFHvnkGEzecuGsl=q4-zt`o{K`XdJQ^z~GXj3JC^MThuX!}Y;f=&M3)b0n<(ra1C* z_ugR_Vg;d5=nefw7siqv;mKoO!Ghq<mm3-#VRR!;jYg{X6xR1jy}_ZN%Q|1Fx?7vy zVzsgS>F#(|a;SF;RC+{d34Mhd6%k}ha<743;f)4pdin{Lr1A!j1f5apamb<Yd(m_x zR`srME9u!7ayUFyyc0vNgbz`1E*Dh4eK1~ym}{a_hC#5#L$II{qD$%PG3X|&DQv~C z(()McQAA?mzywA_O^|uCKEK`Gi`u}4soIrJi6LEkC`RQBL<<Y#p2At~?w&%MdrY9G zyCKXLH%zei^n@v5u*?+BafHz8{lORPDF`$Kr9~sLw+AnErpswz60!Fv2-}JN@pqv$ znl$#9+w+$}f)IY7g>_E&fe<VGW)Qi*r&%maA)cNwVGZbDKYwK(@>b8KPS!BN5lzPo zMOP%!^{`HD5*w23$O!Y~D|?`)0jnyEwR`<Akxj6+8p+YWDzri$v?C2Q3@f4IkOxzl zuX{u2*BDpy>v<k{8mClvBTHx|W1z=DYZO9Jh`K)X!O0B=g@|E7!<PC2Qr|1aRxnJc z3bfK~nXLG_ty;G5Iufe_OWm4!q|p_BO9-lpomkPJ{^StL!yB}P@w-Ds*D;0bc;M1M zmOwlf{aAIk0vr+GDDxaY))wRmtXZNV!y@}SL0a_D6m@+?15L{kge4|B>kfMmy@^sJ z4a<%8f~fyRaCmJEEpt<r-kplvpN#c<l-_jgk095vh+<l)5rm~<LWL!$ytk|Cqyh%e zBy<*x2`*Ex3tVlHi+j83Bdk=fDTTb`hb}Gjy#Sv4P*=SICI)-w-YdLuhGTP+OuWSC zrBg#CrzE8xLtA)GeNgxE$ql}Dr#2IPl%f|^|Bc4-BXgoonjg81=J@k2F7}x%zU9I5 ztNN=l!*7mA8p$RrpBD#VT3ye8PGE)?9ib)m-s7@Ppp~8`C?|JOo8d!8AG!`)jRFJ) z!voYKT;By;UnHN`gRe?-elK#ncR$q;_TmXcgFU`cIDHy3y9cx_Thu}7(kOiWHRf@> z+*8!%xd`<XMw5Dx;yy7+3cQBWFpqka@Otb(doj&CE#6T2Lr>!Flh~VS<x{>+cHM}8 z9;hv9p_9pxKI6oaY|<lI88#ReuwVAi^&n%S2godBe)Mc{S}yq^`hhOZX7rbperF=1 zVq(RgwWKn}(j}LdLytv}H)GO5Rx;()96>I}^y>?5)PT-_72(e`1gN^6evbeOCz?J& z`p0Tz+sNeDeuH+iH#LmmEBEB9>Ux;$hC5gSuwF#dvsl9YdCUzbJ7VK{HbR4zyC=7| zjtI3lW|z@XCh|pWq8w(EH|WrUI3mR+J9*^b0%(p_OX?DPBsJa3rkU-Timpof$yXhH z%c$3cZih;Uu%i1y%jjiP-kpCHmh)r08S#zz?XdnNiv1G=@+z%CR$OT=ViocAYVkn{ zdZALEFLwm0>rata<9fv;z_>XOILw=Xy_mI1R+}%!Q;M3=Og@b38@2?s@VBdPcXB5# zKCJ8><#rfJjo+o-fCbdglMB?<Bwe2gX~S}&F|bOPV|$^W5{NC-l)x%kNh#ekk_07~ zWEwI&AzQo|N){#L#G@;)YIp*UT?=l_QwA=vKLr~f65B?v6_QU9)Z$x(BqVXn&~t1Q zra|#w&CQpi`=ME)XaETQD!hj68>-+<V+%$%Qk|HZQjS#K6;>>Qe^CRTg3#?QM+AMX z8#$+li})O?oeZ;gW`s~)N)yw_9fk6~b{!X~<s(qyDM*W}4Q9@<+F=sZ0oKA6R-Jh$ z@^96N1i@dO16|2u$?;;h--#!AptA%Cye6Z5@2rW-v;-<jDD`M&Kyk5w@jh9G)n|xa zGYJIy!}RVbL8zlJwrDi4n);`@&}AbT;<(@UQcbIU7+>lPtkK0tEBCZ{)2y_Bp@zo! zv7X>cmvKQilcNXrW3UG8g~XiFw|j(uOeb}PrU;=229nt+eY*V&0rKgM)nrwQGW+#H zL8zT0s}f6TuNG$ihPp%45|;NqbUj#oivX<!>R|%ujVmd&((H87meMc!M^GEEttcy_ z+hJLtkoYuIsDZ5SJ9y;2eyq`LRC_-{dO#~+5w1l0U}2+)L9Vlsx2LBd9vY*YmG=Gx zMOVfP<*v$5fh|S6_m!;r(WHB-u1hK_s+7L*A2K#Ib<C~*{H1$upyx!a2Xho$1=2%_ z9qZ$%IMfD;v?|hOWm!FqN~&A+n#~vxN4HY!@1gr=HIrkhqn!_6=+NWxs(3mX^Q@do zShqPLf{x~dhsR<{V%wUDa?k>KRUB<dKmk&e4NF1Y2DT9G7=xj_%7*fXB0ra$Ze(0^ zD*A!H)`hNMgen=G&gy44X@?Eqe2USwFo~#}iicIwj7K18ta#F-8@`drZ`M1mI)bfK z25y>$hj$1(4<5Ovi@WF&JTY&NL@t;DGpQ&4k~_cIwfL&Q_<w-;MMJ>vS0kSMW;zSo zt_FG;HNL*~F4ii(`YpYYhJ8j|TN37L*iH6~YGKcW8|<l}>}k4+XXT2h2n7qm0(p?o z%!xslc+l@64+ejRV8e?+a<F*vg}Qb`J^6uk0^-m_b)k&TZC+<%P$3d!__u|n*0rM( z8-rT#>=G!{HwN7V2Cd`U#Jg)QV#VLG42!$ZqL!i-u`#e93-s&1(57;>^H~6*Z3i}< zQj{I2%fuK246=;67>|wR&Akp$%xf?QJogr;Ez#+F5(m613)zbdX#IH7oR(g27<eiD z+RMiS;?d1@vg^_hnHSJI44a!Ko_zGL$q_7B6uc=_A+}3A`Bg6|9yp@+wGuWNA~6A& zPEdIABc-6`M6zpO^3XW~2E8}{()yKJaxcD$`ff`>eX-s!Z%(s;+gahYAFvMDswH;^ z_IGYc1_L(H)oXl22v64TQu-%Er@WjSU7o<*t(H)iJ=9uC$N$901>RMjHstS}UZtaj zOaeV&+&l|>3(Nf&+h!~>9k2Q<mnG<;ntD!i^g;pjF^Y^3?}MR;-|dY<AwEuKAAwHZ z8I(1AkqA-q@OJFO?D6YpSXy<FDZ@IfD4*#4#oF;qmY2EiNcNBtI%X8fOz$mT9!f^0 zzvX0OT?OmbH^8-A7%+Tnh%qD)FnUA%@0GLp0FUJq^_d6hhVOm$2j>0JJ{b7YDv`dA zMHM;^dLR;S`(>cQx135ERy!$b8PC@qFj;Db(I7d<QyD`_AbR#*!(4w1p8IQU;ZLqV zdOX>^Dr_Q*w!kxMkT$s?C=Xhrs(ZPiDXzZnG1yF#8>kcgi5dnUZ0iMc8ngyFtAo@I z`zvFp!luwzgc%adDQH|cqujGnBWeNZ&2SDdf30<JiL58tIykv!_*G2WF`?LY1e{fe z>ZtS`xiI*No-4ldkxMx_hc5b#JTRnhGFyb~3fp?7rhbDebDao;=!JVGU^a}CqSkB4 zrXjJ&`OuJlJzKB*J&S8sNXw8UBt{JF*WHrcSwU8x;X_A<AGZp^AvQZb!{zidE7>%3 zh$z28J{mg1nS>;^%~zjNe*#bKHSF!skSbxE?Hckareih<!juYZF(!CsrOL+@GoKXQ z=NpyD$K@Dj8}nPU*}M|t!2LvPkJYg~k>{A-VlR@{oU+GNHLE|Fc<Wa~e(1b#s)2rB zYZqByO6=*|8L}M_I$z~7?MNfrP4mQy14)88Nft>=W@9wQOwDcVvf3I7``aqb50s+T zG?P{4A~9+pX*Q>e!;48+Zlc&amJH7AD}FqdjLY5X+;*8s6>No-Y|{P$YmB~(#mdvR zg>Mx{%&{l9oU;UbAJ3CEEc@~*1$_f!UhX+bLr_jbRTz!O4z%1gvt0m-+i}&P;rmG~ zmi<@=<_2HA!<XI_A|Z4kFbTh5bB-~&M}*rvoE`6hcYQOdwqzu127@~x9)0Axd)JZJ z7{pLEK2p@yW^%^TC+QwxGqU%7<+|JM=n3bs#}df6Nx-na814bXw6bVYF-)DYomV2f zx)iz8=2I*DRBQaCY>;d+Mjsvz+d$W!92u6M`74%xPtjG(tw&EOp|Wb<vatF(u6om# z7s#sX?E}O2o5T0l!wAT4UcMwFnyBw9L}82Wd#pdJTzJiRCIn3qo4pl$`hLYbI?}eA zb@a7X5<dKKwZC(EcnWMLU-wvP{v~veyTVrDF|9`(r3<YEbHwnPAJP8SAVmz>Jv`>V z3={!jtyA(av@Q#wx*i6Ot1W18M5Sl+@Cf+A@)aOkyp*SySQ{6IEgu~nzigSJH|dra zHyB#a;=LyE5$GJLUVLaetFr?2E_WxSMU#@e{$u$Nme5j0q)9LneTJkr&^5CeYhyFr z1X_d3w1Ul7N6)H1X({@lyfKIoPyy7^C-o(;C^6hkdH2g=$iMT|WJG>!AnOkDQC>bL zjT7aGM9NPo*!Klg(f-4Dncl~iy>icUdl7+(@1u`)^s>@5_ZSYsZYeL9_U~rgi|)~% z_<}rKkmR(2;&~4HLgBq7c%(O4sfm-$`C~)*EA@!cRG_f-JkNcGm?;g86MLL~Ju8`& zc@(Ny+Ru{cor`E;dF?aS#(a4&2uq!9sf@daCj@ouhC0GRI?tH;>Zi2B3o|0{9{oK= zcQr;xMRG@DDcyaMq>aeUi1p2sd93d|jTOvkKZV<r=a}$34*QWti;&P3%u+%|q(&_T z3A32K)Wbg)C5<hPoE|a6S&cqzkfL@VI36f?q*6&U2U|KRYBwnSSRzi+7$GRWmBQ1~ z$*`9(oUCZrG7J3JS=f8Stjp`<l+L-rSc<fT<;T*yV*xsdmt0}IgzLMZC=@U&T3Ds$ z{t6GI@q1>>9A+6i<c$TBHn&;G%Y};G5GyE>m!K)RB9Z3XxH33f*lz^#b{EW&=Fg(< zvVDBVMly=^d>Zm>P)j3_8D6hodWm(y1dgDonC3>5C23~G5yQxVVXU?qi@ycriII`9 zCs=jI1WN8;Z<kU!{5(ofx8k+>X7cLDRA;mdHBZOJg8TErtJ_ejryvz84~De$b2d5! z8NL|x#j<>{FMTniFLv4&v-o01eX%@WY_~5~=!^Z+7n@{4&j@xEN?FAyXq}ujifh?t zY&DWS7M1jCkMO0EPj{5j8J`pTsG(iZPJ851X=N<gF)Gm+%5tDK>`|Bi@UD6lJ7JhH zz(XnV;ZoUOM~AuIy3j1_IU#usn5#|bDBA)IUzv2?Hj9OII@XcrZBu!KOVD}SL>}Q? zeBNbZ9D>uMVUkqGB*)&{Wn!Fg=E0if2$pe2>J}!9bu48I6QIT*_GZ(47TD~Ji@)oW z;X1#tg;yWLSokE@DxGJRBcJozpkyQsd)=d9qA#5<@-g|V>l=Swlyu%F&UL=(@<Qk} zzLBy|_n4U7WOL^+@!>7KAf2m&97T;zO1dXUAY$O+a2!fAm=_P*sT0klWOTN8@>8;6 zbe4GcYjS*aKiQXrj!uh?zJ}bm_~WH0n;cP0nWkovR+KDjCZmc5#7_KCl1k~9W7)3r zeg1q0Dy@{(6%kL7x`>-WF}9V=40_6hK}1Gfv1f*P0a7_VY{hCe3wF9~JRWolG$%iG z8DCmvD_aCgH5d+#M%q#9x1`&cI48?-N`*bnwua>y?$0x{ik)JjvW%*(hwf3>ENn7t z!seh#PXFlYmqxg4^+Ai!-Qm?2F(7pdfG*FE_@^DAYjG$R!p(e~mLVUHhV`j*-gD7C z`5gSdHc3-0(~{T5=(-`5(LvCX>tj*}%|Q*9F21@5!Mq-D7<pj(*hQ$E{HuZ9YmNEM zOE^Pw^|vH@>_Bl*3Yj*xzqtEHvV81<0lR<3-05m@we_C+GpoBN@cq3Y`CXDrZ?r>3 znC&;vIxsP&m^GwL7G50!YE=N0GXi810=lFpnPctkyzvXVGctg!8E8})Z9>~&7(nD| z4x()~c8YZLZ6uYJRMx$dik9#*t9S2+++0qw9YzLlEUoN#&mO9-x3WK~!1kk*?j3;Z z4yc52{-9F&FGOks^VwYRG^Kx36=3`L58Gbkkp4x9mDWNe_^!)w#I^&(qpV)=WeJ>) zIytsU`vS3SfOZ3mC9kJrl`TRc6KqrQLK%Ieh?Bh)CG79|QxK7=n^KgnnLJjUV_5`; zO1ZVRMvgqJFv(71#5!%53#Yja6F@hiIM!?SDD>DW7D35LyvKeh?iaNk?s|W3OBxno zYYRypmnL3pCijo)7sC8$8oP!(Ic^{vZCl3WC4jQ{(f6tmR9RqSo<^VYGqhq?5ctHB zf^JA!^iOZFCjs%2l7Zr9&E%PqK1m>Z^4s9u3HIc-FBg^-1oC}fZ9yoe){<tjx1=wM zxL7hIQpcLo=E5oPeNdogC{Sb(J3d1MSK;^+Rw*g!Tr+uMd|c{BXQ1=j%Ao}Z=!15n z1@n{88O_L{_Bo0!H-JcXkJt41-=(#WPaU*;G**i?G(&P^=<`GE|A(wbO&Bop8fRxC z#`z=u5B#W405dLIu<SaH_&rV$wyA7}8m^LeCd568lZ?gVJ-7H?S0BIIp5L*54!6?F zUqUUg6+1wJ$tXoGO5<#uo8L=lC#SG2^1)MJ<}vJ$M|ih%tiaeMpy#jB+uspOsj<r? zws>1<ZV9O=o$8#2n3et;K7#UF7t7=7nFYhVVv)8btd756hsIq4?!}kj9k@>fACBPu z<{fY$e_m{D;BLLnUg3co8r%O;Z2ub|hvnt!PwsP1Xz!@Gr!chjn9Dv?KSA#3L1vU0 zyE2`^d<dsclQ+r|#W`2V2W9cjgBVPg@%O{f;W9@;ZQ$<*fw^^DXe6bpF`7vY+iKZE z3tV?o7kvs1H>IM%mC6~R3%$R3q!Ld8>!Z@6?Bz0#8S>Q2^-Q459#$rOT1Hni!gTD9 z8FEs-^zOxY2<|1$ImHu9W=`yt%_!F@9`x`3^5s(2`z#|VYAQ12cHVZ@OXI&~y)*(I z4)s_1gy<CV=ET17;L|7K$gI)(SXDIr2eXMhZH7CJaC)ej{5-L*(N}v%pECOHNhH$h zX5U^C?!U=bz1`eqZ%7&4%#!|lB!6lqMddNFIP!3LqByCUJYSxgbsKvzzq5EFPzc@i zm2BV_N8pSz)NschgY%py`sqEk`QMOl%1gvBkz_qEBy9M-glm_{><5%RgF7kLpGQu# z^)h+ufvEINuVHmM0<D8{7+2d}#|V1nG9tPZM>%Z(=GJ+wp6Iet+ospZuMfm!yzEcy zM_**A(O&pspNhjlJ$pHVE(EbrNMQveTqe2)hbFNzS$MAPA)du-hDFiAVDb5qKmJ;h zNO@mX*iS&JQp(+O&#&IYWZHuR#jDMv_Q3(p;@Z)v^7JOxx8VWD;!brtR(}1*2L+*% z8qz`i3{>ukEN0zPO1r?EaOcCx5n8_eSvaK&P(Y)=rjBb&sDkz8P-wdC7JC>?=Si%_ z-tS7(Uu~D!d%G(Fw?~9mKQEBnNdwX{F=lX18CvY2v>(SUh89N{+(k5TGA0mIFm~4; zOd>B#8X~?qnjD>^j)qS22Kmo$0u}biDzT)bn9h8VbeWtYT1Jz>lheqm@>uOR*wFch z`B|W>Vr;{8Z<ymT%+d7VJ+=pdlF+0nfmi<oGCL_Iu#q7vk_LyhL%sMjd-Y3<xq@6v zis<=XC-u6A4a=Kf@^OFk5XODg-Oh0zNuL8F8~4d{5ipGVcs}lr9wLQ`gc7SibpS13 zsnOm<jC%sR`_bJu@<T!PkNhWW3)duZ2Mr$;CIj$dMA5FC3CF|&-T4K1bh1u7Q%<%` zeu#A#?Gb|EX-+CX)*KuVQa*)uAW5uv!h8dpHJ3u5A0zaSB06eu5U%yHn;?ti1}tjB zgt<xdI5^zVbbBWwz5gOPQ_&|5$(|4#BP}1T3~W6n!%=9BkM+@uM5=hm8Rv`1_=~)L zTQA_gi`H}eW9lx7z!`0TJD;6C(!JR7c`m}K+JckJM0yr%{^O1d9K~q?Kka%v5O$3W z_8dE5rNh{XaJW0Kdm8q(?(y9-JmCQ!wGIm`^fcHPE+0w0nc@~{GpU-oH<UJWk5gB+ zi6Vm@GK(*dCshy4i-GnM>~wjr@ckczA9Xh)T5}3VsrJuF>qDap-{XGa2&@e0lUqKf zxG)ilGr<-(!tPz2!39PLYgWhQh8VU%aTVRbHN+P9zu4ZY$Gc?tw0Hhq1?%(@v0@zA zHhpA_5ml=Ss=$4`KJ@WVI|hT!|CEHx=r^f}DbE^^;evJDx$B6dCPdi4rF8N#3Ez(% zs`+Yx+yIjTqZUhh``PY%(wXHZpx#eB4LAs|HL^qfmDRn;rWxI%&mw_kxtH^@k@6lW zp=W216EphtaNf({dCo`9RYwiF0ki>iDxrO5V!n2&fAmEZHFoAhzxkNxXQoc>28vHV zz4<ZPa?)GqS{#8bl-?0Kw+HS%LJH*c<`ADX*NA@hjc^JZcN_&W-gm(}uRA@4_dC?u z+f}_MVkIW^Gn2cZ)wqgqy?Z73ai(=z=L{*OmoOi34984iw!>INA9@F^Wx9u-0EW|I z9%uSzD%M8{p8sLOt%A@{C^N^Hk}`Cad=S>zT;W~j{&Xlx=niZg$Y;v&G0E`VFRcuf zenyU!(d@pYt5PnMS4<}Q%D&>I|B#BxcsTE#uAGqk)P6WRPMyV6s#yc&f|;96-$B%` zTqd6-zgG5>8<14~4i2poA5MU&XMDIHp3@$#$cS#SU>=7v+avsF^)VfyRUyaP0?XYs zEAWotSore_*yK`bO}{yNZaf?%2KKND0@-yqh9p*9ktGn>qmRqZlR1w*7lw0acTF>1 zveLM85-@A7ST>qGIqMrS@hX}4Sf&W`*7Mjfaakhy=&?cKnnbdtzE9V!jK4(EtW4Y! z_jr{|{9zP%e)f~dSDJl3kYuAMXG3rD%bdxwKS<FNL&Y0f^2`$%;!!Qx@x)m1vp2|} zPZZ}Cyv}UymG3<aV}GVa_!%&|(ju$})B&yoVr&-QqQlf%DZM-a!^RB52-@oeshu0+ zyn*S$Enl`#WOIjg3A1plG~fiAdpMDf^PKFsxPpPZmINJD%Z|Dge|DOel+CN#4S$(e zZzIAc!8(NZ6yloynpdgR<L8^x$wzwu{jZ4?EvqEc=JoH<`yN*!f04}|gA1r{_UvK} z^vxdEjffJ@<OpcM5Ow|aV2B?(X?DcY<HtM2Wae=au_S8P=6jUkKo3fi<h<U1s+PUS zP{TL#qv)c0JV`*WzldZBw}6XUXz1&ta>?}W^%L3b!c7rJJZ(8fE-cX~3b9oaO2cbb zv5Ds^{k3DArH?#DGL|Oxe)=9I5h!N!6bu$Q)^h^cw^W%i33;%a8||LpGB^jV9+MKo zR8mp}0V<I$uv^{@_F+{bZijwFdOVvf&YnoJp6%a#!)q8muNS^9kA@}^+q0SZBd{=I zU4YM)o!Fyig4poRWa%&lG^JFA=HlL`sl0Z9%$|bF>ogLt;DrE)#IVvbEB)uU<npu8 zW57?_w_$|)u~!9GUpQviZ2<H#?BMbTVHQjLS^z!IxmbDpb_4F8`1ZHgkK&>p^EpP{ zY$kcG40AfxUEjVc48#^DmyJ^%tZ2ZY3LW9UWOuDo5#j#GRm@1fRk3b?^4;Z1bRGhw zN`|BP-7E?B2kh~sbSuNL;VVRr7#qH3F`0*;V}mUq^?!<Y`ANvP0Mi{8Rz5&Q`7~gj zR<==zu*pR<|4Yz2SCQ3-xPl<=PaY-K>OKQ~i^ijj3ZfOEZw}Dkj{HyA77MK8jp_vP z?h*3X!T8X5;Py%LCM)^1I$pe9N5X5yChx=XKYSMO)Y;C?XWpU(Xi<mLob6@yIBW7` z+-u_qqQAln#K^NFd)x(wevD@88WTjL$u1jFN++UE%WLhyvgN_m^^PB_0!Nh5Z{g)+ z+fuiQNgdk%p)ZFLuey{Dzza*5Y)oXbAuFMKm`}9a9Rk_NErkbS11|Wv$bHZE%46cO zaSIa<R=(gG@V^odRs?;Lb20Sq;<5Z4^78ZL;<FRT&F8-pH;yM?EK3#_jwkKQMx{f} z(7@SHPd*1r1lE(%l4S{zxbiDSwV_wGusaKG&8!siggZ~X_9;2wPEBPO4&40m0d{I^ zj|}6l^H4mr#3zZ8yT91pOa?4Z5X(L#1<MsfR&GKoaXkW;#<@`Dw|m(wc!o29;8~O& zoGL*H<3Zk89^bX}pp8ZGKr|IA);G&jv^a<J&MakL4d4p>w~#<byBZ&XM9G1xTbOgu z7Y6b;yF-bK=I~*8(jxKdt@CaTyA@C|1qHhL&C!r~p5fw%@#LWAzTTIy7`mff-6eVq zn9nenxt~O=INiH_Uq{k6z<f!redIqY28yFoNJ8!Xn$d9UxIToN8~4GI=`?V;6oNaE z<^yXKq9HIe2Pp2<O@(&A(YD7F@h#a{Tg7gH{dG0C+hMew-iscioAG2fYuc_?1~OII z%$zEG3g&sV|8vUuRuq38IBKBJv0V6TeCD<~cG4n{8ig=a$-CLrMxJ{ix>s`^=5Mg) z9E|c~w}U*9YoQ9>dLaqR{Kqd0j(7#0ZLDtiJmZcdhMwL-y1&?W$SY7ZKId0#rEBrU zSVu2U<ICf$l9mqhxy7JQk8`6~OAWv5net*6+2B?sI_X42x8wHHxD*7|??LE$93(?S zc=Zc+(X|1!`Bzycx-ZDumBoD)bi5H>JqrEiduzIq{JAni-0&7jUlngGhPlJ)h8&K= z9ypnQSM-{B?%BQ+?<<Y@SATi;Jvbj9^fcf|sy5HW%Xlc6xK^d0k*}>v#<OXa4o}(Y zeqtdU#;X$xBOpT<&Q3FW?QZl#Fs@Cam;MOygw`&>*kKP-n<oogqPUA%KaUAmetpXw z@M%3Cbuf_^R>x#6d=r`)?wK?Z^K}Yx;_eyTBXmUb?}c!ssu%qOOg`z__$Ilyda9_{ zO>#)zaFlbekiPekS!4#m{k@n%WahgOum!^@SY0>RM=Q+rx^&IIyVB>N-(bRy9!s=q z`->A-lZv(K?k%ADFAy-sSJ%$($4HI{PjRScT&TWS?g%<4Lv9Tv#qYMjF7tNd2+4UV zB^c%adr9$+5u&o0Jo1l`y>%a<vHx`;^wBk>=^r|_wHG&3#XQ4YeeLjM*cwCFl~Grt z+#749y9lY-aKEV?(hTt~qhns;l6sTo1A)h}ymsbQC<fp6WQ$&8MjBtYPz5xKSq$;3 zNZ3X*6vOC^$*elFH|A=M@Lc_Ob!M+d&Jf4H+)u^jRQo%%fw{O5T|(&va&==$)(l@w zN@(=jjtbo9tiae8{udSa<wcUSDbG0+m6PVn=>Q&v(j*>6&^|njq~Sb_rXeiUQh`Sm zv<+EG^HXUH57Q{+VFqpHp^{$YVHQ2d!+6@nLmjQ>p^+X$=sIDMobxSoC#U7nZ9FWb z8+kaIuH_-z+B_UjYj`-3F6QAR>fqs2`WO#q(3w26(aAiVMJMoZ4lUxLo#yj!0X6Y( z5!E1cvP$e<0*Rpm7<qp+L*qHRjG;X^x`Ls>99_xKJGG3LnX<Hnqw5*^Jx4b%^h=Iz zX6RXtZe{2Rj&5h@A&wq)GJF@u>ls?d(Gv_^&(S7^)^Zf$C(vhs@;aYmF*{FS2MzQQ zj$UMF1xGJ2w1lJ03@zm7Rfd{4N`X45isLsJc>qWGv2{F0TNt$mM{hDTn4@hBy|aS# zKs!TQI4WS;(eF9R&OGRs9A%$w(z8H$o!JhM9^(lS4BfxN$#RKg_`f+hnxUIH8qd(R z991xMIY(0&>f~q|L+u=8SH$Tf991&3f}>dsE#at+p@keZGStLTiwylwRUFS_<N+Kl zWN184Ug^;+)}1F<85+RR@eFPAu<B1_=uaG-#Lz1ooyyP)K(Qf!&Dd$S!pT_f2{r`D zVHVj#oL0IQJUA)Frxu}{1Pj|oksE>{{FIeGiqa6I^HZMTl=)c(jgeDI=`2S1dr9Hd zKVpF3Uo-@<&smx8q`;R`gdr%JQJf%U`ACt5pm;x1ijNX)2vYbd5k5-1At=>Pxx1WK zTY)b`{FEC$N~$3!!%z9rN6{IAEPhIpk76_gF$>WiGR{i(`$!f;P@$h`tB;ar2pa9D ztoBg~4MA2vWeKOuA8pW#=af?VIHREbWMtj+&`;_*7Nhxf<Y3((aYG%sT6aN>#RzxX z%1T{7guAYV8}3$2aRgoOWf6p1jREK?N93{VqU%b4>ks+dfxm5&QrEVuCO>cOofx<p z%Ta2rKrF&agw=?yjKsqNn6a9qY#Shg^ZsqAEO=sDe7_bX)Q%K>K=2vhG~fi_FkmlW z2Vm=J^7gjENXF@rSk7`lBp{8D?yt=VTd{^E-1-9L8P^c!Ycai7A`vl`uyQl-6M!3p z?0PLpT(O3neJzazf4rvb$2d^B1wkRe3YZ9(3a|m@09<Rxn12;U`U=?q!dAduK+_uX z#=lC*ob?`Z=U;`PHZ0ThCM;mOX%`v0T~Y9Z5rQ4|Dt0Kg`n9M}6gx?;ONHg#>9eX! zL#!@?J(TQ`g@=C%FzYeQ_=kn;+ny1{5`8yI*b(wFR#M?6(z<<6!nmDS#&CV}BAoG& zY}>--+fkP7mb7$YdcFTl&&JMO^bnxlLq}O|TiC<f`zR}W3wwCG9%ZF(VGo#6$c3G% zv!iBQAy(JB7B}N`_dKZj{ANc84lD=nktBWoRW?^H9wtq%>jx|U0CUkLPgoX^fG`%- z7hPH$<0-oI=&4Xo*(7&f54n_o$@?`8|2G-(#t`w5cS*$?8RlCX*fx%R@`YB{8Bv3K zols}F7sW@UzSHkANyCo9+1&#ibvM0{EN;#r-ZxU5g$E$p%!l8*b~x<Kxn_7y+o{S= zwGTZeiPqYzgZ{DLdEonT6TjndfqfntSSnqpt47|a3G?rC>{_buAtv2~Edwr2;c|~} zEZO^<8GFW{s_2OoDGhMMvCoun6>vKnrOp3<Jp8%~$_+RBn3>j2Rwee49|pOBp4&yv z{5#&sK0uZZ%)#vRW$S*P;jYF&my|a58r%bZizpRFng4<!Dp!JG!FNcdWw@_K!{Od? z&Afrj&3Wi8j2Lw+wrxfK<4P;+TlSewcr{x|Jf<d$sBJxx;1FT=J#Mp=)pyGSH$ZI5 zSV|qsMY_P^b{;S6j8Ek8(H-%+@xZXDjj&mKS>l~KD`;ET!&5|k<7GHzAE6Ipy!Kph z;9fgRZDW@eYRHKl%6L|QA)nd@A{E)CNax)7H-N-!8J$idc1DYN>15E(zM}mZGInQd z=(LwRv{vxT#IdtLjCh$G*{Kixe0>M~sr5wM<q`|lljnE!ccMQhH*8@a3An@96W4IN zfz|H;-L($A*ChF6jys>avGbsfOEBD%ac{@Lz7)g}k(>rU0}}ScMP*(kXtDzK^*lVr z{tpkMS!c6bKOJA&SxRZQ-O^}@`F|5!cbn&bSW54_i9F~E6x#819rKmYr$O*wb8Ex^ zS!p_A<+x9Eg--t)z1KRj;LU#GkagtMHxtA+H00=;eHFU*+3Fd}T#UFx%f;p3ReUo^ zb8*3onewvTS?RCC;&$ukiwOMfRsm%y!-M;FzFp?slG=FYo}ao~?Yy6*Dg>&$_rj;J znXe~+yERXTbFgrBxrJTYc`cG}`AX@8-q;#3sd`yNmI1AKA%RQ6t5+aGufe(HvCLAR z(ih@4CKTV}6X`X_teyZVE5!3FJ~Bto;Wr_Ug;!4nx8YZNk5FB0F5{}Uf`G62za@R& zdMxmVXgGY=ywyMOQHC6SOA$DNAy?iS%6t*M-X0*HZYIXJ%UH1X?Wy8U6}k0xy0}C| zQuln#hyi;qvf%5zAF&s9?i(7-E@xnPLId`tp*+K7=Z9YY8w2F#zT9MgVw9(bCAzld z;Rz?U4uPL*{Qmy&#Vc8-vI9w44%Dsm=1Th*E1kTOytF?xtOsX^XAChbN#p*cnP1uY zEszLX6Vns@klyJb`x<)(7kd6U!vDQLnC|zp{iBnOnt}eeJ6K$mOlBVF*JIc{wvqfz z<`15-3#iY7_C+$rj=Ah8t57lM{tES?tuW~CRb&fV@s|Uip7cUzGF=GN`{iE_dcqGz z7o1TbXMX7}q%|)X=lhu@)XHdBU{W#eP}kjja?d_4!X?<N@Bm*G>@j=?gA2rNIi30f zcldpd{C(9GlF2*sQpur%QR3Po<lMnX(RqYiJD9=`Jz`1ULx$vNbTPHT6nEEbfYSi; zHJz3t>KFg^6~uX{pZrT4!u*eekCsEz;x<F0`42ukwTF?xqxD_F_gBm7u_!<GPG9lu zxA&}mCt3_#3oi>h?hrQ|BBu^3#D#~5_wZ4E@DW2Aj!Y0ANFuRE|91MZiISa19}m0? zi6KevT^6@)A<}#N{6kgmKmSPSx&uYBgAaZVk|eH7Byo=`_FSnilm*U~<HaGz=krsL zf#jp(i^P~cWOT!W;^DW+>kT8tB{Ae?!-~G&y#+}+atRqp_P`p+`HkzyW<+4wN&6T= zAGDFW6VtWtRx)9PA(ZomMabT65gq}&59sl_MMwZl2dqWf>A(Jwq3;(syWm4oe8&Y3 zvZDtzF6|H$Pow98cNF*fw-@6RKMeH^%roYKhY!Rz+L&}*taJ$8DfgcJ-?_Q|WcQLL z;QBZ2*GJmq0g%m9yusXXlP|d^JFy7jTiv@jNca@$noXO72z}D(Y4Ch{or`Nd&L?pN zil0yZ+t0VMlaH!_dO!BC?}KZN+~b2wLHzW_6CQqabY`IITbcc&l|F}WQ#&PD2}zD8 z^G+s&4Y`+-b_v;dGBM>PQ20lH$JrY`mFzp;<6kn+L$8ppPnvUC#lz`FXGewEJ-eEF zb?r%??M$NmfO;?g%`rtby%aOD!zO@j<Oppo2(^@j*J!cS!|T5TJ^L>E^pxFF8d6F> zWdxLIPbsC1z;RSn%FkobsRwqfUY9iJB_9XpUj7&EXa0-(ad7u0Q`U7ERP>iOhW+IY z&40boZwFbuE>-+3k{n#86?bhWE$fC3Y==gKLDg})|AT2v47r)(YLa282$~4H0PBQ@ z{INb;95REbUY^dJK<i%Ch(9bMA0a?2=oO8aI-M9_$s<p{@@f<=I9cq$t_Ap6fLNG+ za>{;sg-ptfB@Odbu~^L{SH4_ynWb_^xGQ&%BcdwT(?Fk*NVuJS@1V9%NWT9e%#D96 z<eCG1KCU@dAnMl~Ll=_Q?ehYlAV`uUDu5}3Y)7)#?fpFuIATSyPdl0a<PFTtH#NP< zQwtITgKwh(uPj&@cwZZkAx|v`=O>};lWHsd{d3UvJvA*b_ZE=2g{#Hl7s;Coxi+}6 zkZXf}PxI5z;-?kR0(<5?{fsOys1?k=FRDZ-lNN6im;X)zo|z<O{7xQvrXcYB{~+<- z&nU$Do8-(h4~u0tiPotQ14_w6=Zq=S#``DxKMz}kf4+l#7hveS7U5~Y^MEse6G-c@ z$V<y0ljXkoGq0G>5<WdTrb;+YIbFu?l0(DqE{<&M8=uAup%8a`u)6+1Z*Th5;~f%C zzk8f4YgANXD$XnMurK31zhb8OrI26hXvKk&%&v7Am6Bb_a^!c+(m%9EjI_%~I(n5< z-So;!s($wdl;B5mqI^ZULW6P1+8aeD+$(DG<D`32?1*8YFul;fGld&_f0<{oG{Mg! zcJk0|k72%j?rSac0+ZQIeeO?V`X>fV_%htn$ZCJfrzZlkU^jK{oN&{uLhjI1b){pm z-mp*hSUl$McPc+_%1Fj5wRt*r$#N33BfALrC5mlOe}(+<c8?V#@l<pbxO^UH#^}G@ z7HunJbtwTm`-Gf1nlDv#qu(+A0~S;{GwdFM8USt83RVa9+glf1k|n&>0J10R2&->R zYk2h`tmpVR#}fz&*?cNdEL=hAPsOqeotIAaO=cBqEx?Gx;V6d{?mB7lx;%?7dK}HD zVz`GWK8S)(UHgF|Htu<pgkRHYhH8JPHk23PmhsD{b3R~SgYWvFf7hs4Xd$XqyqsMA z;CM&|M5S()hWzV8OWf?L4*R_qFdXl~ypCpGU|BH{Pp3sKgwYN|m!=}s|3d~bRV4Rx zV(1@_;LQ~F6CoQPA&;CcV#eLU)A6FQnVdhJB)++h+&Zn#dV=*1HgKMN+;e-T8?-|- zoy=+MJ1bNgA7|iaL-?(bP^b?4M$NZm#+iQI*`cgAwT!EiKVKv3&h+oeNXOd5TNjxt z>12A)j1+SAOnT&#E>ye)o#1GP+K^Xup$ncN-9IuryV^SS$Ige*42;hb^n3}fWkEg+ zm+dpFOeOT^BD{`cMZ;bD1j9G>V*D<H{z(zPcY~F43oFtPWW&?Zm1?2L>Nw@3V*&qY z7}r$nYJ5sOl=j??G>VN5&G;A8SN&6Ev>Q9*MhnN#XCEf#KT-_UK8wDnW5<bj<x96x z@B22rS5+X5<4L}}?eDpj9vp&`erJb@>LOBlc0k(MZFn`H!AiYYiT$;G02^(#*WTk# zE2FcPk!@!O4$Sgdc;T+1n-YB*hK6t8aC>qza?v~5?9d$TboFR*>+Fy|tWf$bTj2TK z@M?{(mBe^%a0pM^xRlH}ml(tF>tEn7sg(WEsF~nHE?4d+|2mf_8b_1Ha}IIOcf|a0 zthf#zKYpxISRuQZKNAt)XO=KW`p^S2Sam`%40Oi~vg2d5xb6)3_TwQ*e%|rmReyE; zImR#oye?gkqZ=JGgADnkU(tpulElxou)2m<=b-{@vDx`MT7_bCX!fm(>_^-RU&e2< z^*!#MaFbmp8Av~z&I)BmbM#Z7zC9n={7Isid4<$}G8oh3=TF{BI)KH{zur6m6c>&l zXJM1DAiD8eR+IM_J`a*Pmo)G}l~?tq&jKxP4cd%R7wtXpJ7C!RS>UIso5DL2*wiQa zJp#B{_#PL!63d*wh_<O*BquYY<=%dFG}@12e36#pRlgZi>@w{}8Fz86JR?}d{&IIB zIeb1L!^p?kex(uyng0~We|>ZL3s^`yuW!cC#;N4%PvgWlrjpyA=B4c%iIUl+C|t<g z!tu@vnXfdXR16d@WEM1&%Fl+1gU*q4pQ&S@x&C?~GpdB`LyPG*m&tdZ4HDlzOTs@- z#k^+UnL-s$=0Anu#ia~3Kx5#d^Z(ev!F(pZ>|F7VJqc;JKJgn2H!gY(b~!i{cl%39 z=+X-Q1rOU?ci6wgcmY3qcAI%v*a>1C>yid^h0oT)Pe|N_V~pLTAfH{hFJMKpWl!?O zMwzn>F8|;lnGg-=53m5n0V)BD0P6vJ0UrRG0quaExRBEypam2FDgbi;KX;W0mjGLV z?*<$Od<M7yxC@BDw;25aIe=1tJp^9-Wk7ZV>H+5fmjFKkS^?d02+<#)1Q-Eh01p6c zfcbzWfHi=vfOi0=0T%&30@?swyU2tTz#u>lpa3utFc+`_@XszznQ#!uCx9z}Hb76b zKmkw!%z!a~sQ^2m8n7AgHsCbiHh}%DK<obd?^pu-TDcbCyY=wSp2E)MNlPb7^XC>} zZX)8V`u0ie_~B28u295SB!Cw`1BtOKOxkn&D>2Mz{TWVXklTK;@Lm08gk2Gzh%f@7 zfUpOG%{MFp418fCLhOSC0U_KG!b&ghlOePqgdavY_alC34&hXU%n1;I5LZ-$bJs0G z5<)pb1;WkLA|&IBN@4dki?9HMl?b0gXh*mZ;Y5V(2(1WNy_MJYJbwLb*F8_&RLlBJ zc>JMhvt}ynvlY|l&V0x|Q!!`m?CCS-%~Lq$J@WX&iUIv*%^M&P89rTJ*&0P2YW-o4 z?N&cg0sG2|I~W1TZd^%TiP(2eQgg$%`ET^w+iiXDuUmr2&OZ!Y6&kb#m+e}fTb10K zMBI)R(__h~+exxiQiW$fvix?Em}Vi}ZYKon8R!f9r}Iz@${=0u#0F0q42Sw)GU!fX zz;cx@WwL=(Av!eI7u|CodE-uE_~qdi0h*n)PxxuLg}iwuru*jnd$b+-<nEnVao8x9 zml#^i@(6!voQ3i8ziHzvB>Z-a7&D%8#7<*@%tf}eCy8V}V;8Sgqo~;6e>`Uqc0I?? zZq4w~Uj^WgNOxia&aCrQYSha-lZo4Skog*aea>F;!R@38jhwx+vQ7R<cU1n@CSN+s zb1?870r%wg879Nc=ul{cj=%Mw<$a3|{{Oq4DE$9QPmrV1@wKaE7rW%kO6+s-f8>Fe zWwJ+uy4J&W&;IJQ2fE5-Npe}_x<L`LyH4Hr79k6u1Y`ix0I2{4AQ}(>kOSJUT7(t= z1zZAL1bhZK2RIFA0-ONU0}cZY0`>xS19kwm0@edo0+s<7|B|Z~r?3FX9KZ~~B!Cr= z2e1H)03{$E5COn1mk1rwBS%~Sw0(z)0Vtpua1qc1I0)DdSOZuBm;;yu7!Mc?$O9O^ zn~Bx~$pEAR;sKF>P(UD{{R+;}02cwL0rddJzZc<lz<NM6zz&!S7!Ak+7y)Sj0dV76 z%ewb^$)bqzV&Q%46YszN{lAZ|8`MjdqGE@{9e?Y8#c;J+gw3~5DDqz!i^B~6--h3y zH}SWmD@nd+Ubm#HEbu^mFPTDgDqHXtfW7?+xD-KzY4Fa!A;1_2C@DVu{|-+7Um*Au z;Q!wNtGP@NbUbq)oqv{3G3ftyIIym_?270dCle@Ew5yN^_SZ6ACZr*^j(9YFvw{6Q zQ%AhM0?RtSxaf$d%|Lp;d+~E_#Ie$Kq%U{{{K&f_-n<EMl+zIx3tv5OJ4V*k>TCwJ zt8$p~0p&tvt#YsOsPeq>jm)m9zN+b}d8#F<H&pvoCspmL9_j?ORy|x@uAZ;1Rc}={ zs!ywbRtID$vP@Z{vmVHrpXJP2o3$b9wXC<Zj%A(9x|nq>%bS&}8K+sQ@oHK%eY7U+ zOWLj454G*upzP4><ZM%RN%nGQ_O9%s*#SDOZmDjaZjio6U!`BDzop-o^KQ<WoDqhx zh8c$0hLuLW$!2=owAggmblh~w<TbUK0?m=;IP)O0);!$&u=y$T^X3ia*UYEQlDT_s zpIk$3RW4el5QH@d@|73V&t|n|{h5`d>92W9b4oKTdtdehU7p@~LVsHSwf?$Zmh)iF ztA;*Cqj99M)Hu!fxY2HW&iI1yWn;7Pd*d(0fu<oQjmcozZQ9Q((Pa9_^f{{Xo#{u@ zuc(e>3NeS9`<VNhGt8OhZ1XVlDDwpKL*`2JeDl-hrKsLU^H%d7GisxR8bdHg`A_9B zr?Oc&JhL)$StiLmmH9<xfGSdztV&bqRHIa5Rj;bvQms<|L%m&nP5qPlwpu`EmuD@? zdJ$cGIZLGp)kbPFv?H{ewC`&_)?UgE)5Ykf=w|9>>jL$|^b_?>IbY=TGAIm#3_8Ow z!~F)QVYy+AVW;7M;RD0ZhJMBejLyf5UmI^5hnwCueQYW)k2B9SKWScVe#QJx^C9ym z=AX^C%zv7LbGzln<_<%bKArnw?pL{jMG&C!g+a=>%9BcW=7G$is*$P(R7BOF`apGF zbxB24KdWx3{!|64d#a<=3Uz<AO084ptH-J*sHdoxs$WtcR=2CgtPE$CDod9&CaW@Q zMOLt8phm5ER5MR=SaV!+QS-Itdx*||G;JD56Qm78TVu4z+Cf^a)}Xa$3$&xOR_z4s zgW9Ru+1eVdM@zKpwVSouv^%wLYY%Cgv}d%RYA-?D#O&DYFS5VMrrEz__s~73dqKBF zw?p@qu0eN3=ln$Xv#w3oT_2-Y>&^NCeW`wu{xSV)`rZ2Tkc(UTyZV7SALd-nY01fk zFqRwahDC;F4J!>N3?Cb=L0+<qON_gWpBt|jsj;U?XEH-jcbfK^PMIRivF3d9WQ_i+ z=CkH+&EdJKTwU(8+(&bt%-xlHAh#j6Jy&pAVJt!zf|Na!W0Yme>B>dQjmp=QXOv$m zzfu0AY|8Xzwr0ktY^pag5$jc-sIK#lRj3E3E$Z>=2h`IsAzx8{sUCt}d{J{yb3hxR z>!Ulbm-O8XDTafF>xRcospcW(E9PI#wp<6B6;ql15`<C8@k-}Sr7Ckq=8DW_)qM3b z^~kInSzR?s%}ULyns+p3b?@uT4I2zi$mEO(Z_E*dtqA%l2Py|EGnE>pUTK2vn6Gpx z>y$^7zbN};?o+*>-mDIXPIxeDQ`YNQd$LYsorNYCpqZdqt=XX2i(w7dW@~$9M`u5( zTj<oitvjsyQzzH=)c4g7(f^zy<%Af*3_T6ehH6ObYle>uvBv&JjWNggpm8du<m<-$ z#^c5hjGq`U8$(RtrdZPnb6)P7xyN(A&HatFZVA3lM-Z%xR!&qdP!_3YWB!~~|Dygw z9g(HUnwS-&iPk1)hiZ$o4{4nbLqvvU7iGWC1tdaOuKSPfxZapEC8sUtJM-`6kldKu z^xWdy>FB!Wb6?K=ckYqgv%aEMVk3ZA5Txvb{&*wvQs&LfJD9+7Rj4XLrB_W+Eml>l zmZ?^#R;t#h)~hzCHmiPAwWw~Y+MrLTsUJgszvxu2QLk5TP#;nMppJnuP0dQn%E(e? zWnoqrv!27OSedmZYkgLO=C(%A25Ea~S>02#D%Aa9$o+opyV~Eix3#kD;OwyMp4p1* zrP<4~S7t}+2I#VNW?g}9zwTY#3Eii<R^1()pzouft#|03))W1H{V{!`-g!ZPN&l1n zcl{lGP)@g;sGPW*emO&O^f~675jn*<lXBMOyao9m3!OO0__pz&@r3b?v6tzeCK<HZ z3G?3EuUVIG7KA(mTIJTvXH;X<PW4*#dG+t=)foF5S(T8c-I{Bf7R?*lqU=YrS7rYW zMWxo=Z@AC+sngWke4lwbQ;WL=VKjmQ<=c?-cT`7J$FcByp!!HPRy|4WP!o*dE%nl@ z@3Z8Z6wPqWOwBIMEzK6~4y_d{$dmeMISX?(<^0R!WqH*L!ZHN?m5(UbDi0_(tGa2z zHN7-_G;x|FO<&CbjX^U;GeuLaalWVdMDwGjt9CL}$R_PO+8?x$*-4Ox2eW5nZ_VDH z{h#bUx>Vh0-7`=eCv*q(NAw@%+{jsI*kbtH@V((*#+}AP5XaArktT&{DAtE-CKI&i zQY=MVAuGZ;uI_~a${|X(a=$V#^GxPXsw{Pmx>#MIUaVd_7)$0k&3VmNns1@$v|6DV zsueY#m^mqPDki2a)1J8?b4TXx%)REr=5<$|kfqH)a*H6~3}3iuZZj{--JaW&OEC;G z-hXmsnzB$iLupekQ*Ku_DXCJP8JVffG-ggjPU|zbX4YpmX9_BX%A%U2nyOlaMSe3h zOS4LVqPD1~s%NQ%h;{Sl$}aYjn?ubJSZAZn@n(fN)tqL|SohF8S+B@BX1jTTd69XE zx!SzUyu!TFyvBTB)jZkkV3>);FD;bCgy~+IF=pN8-|ahrdY)L<f1zyLx}^(cEB}k0 O__RzJ{1aFN;r{`h?oBoT diff --git a/Resources/WPy32-3720/python-3.7.2/Scripts/pip3.7.exe b/Resources/WPy32-3720/python-3.7.2/Scripts/pip3.7.exe index 5306633aa2b43aee86eab63309cb3f98b75bbb6b..6b1eb425cd96cf41cf68b526cc82383e72784de9 100644 GIT binary patch delta 28830 zcmd?Sdtg(=_AkDZz5=AAKnsPIw$K*JJI$j_nl=qWOTmJHLQ9pWK#dUvF+ucD0w=8! zH&t+a-~$B#Mdf&qw-2yTq@W;(97RE`M-EO6M~fg-t?b`tP12(0-0!`=Kkk3Gxc01B zv-X<Tnl&?PX2%adjXHfbYIVM<Wlg~-->Nr{Ek1JcrObA??S;I|qlkYpzEfti2=g-= zMX1Sq6XAg^*_p=?9yz%&^C>~shWRbwuuz1P?wsQynbLp!Ig2EzR23!p2L3(`N$DcF zNm8r2s5(p1r--f>Bq_pxFb>ed2CGs#om5Fuq$K6Gu}P`{UG?!QDWCXesiaNLHT`;M z>s8XqPI&o}Bqguum(<g#Q%MfuX#Z=Gq^YVk{ra#Ll_sapX^=@cWeBl<{7Zw>;fv<Z z_RN-~u0xT6H>Kf#FW7K(YUeZ*gx=JsScN*J6AB<rLNt*rQ}^k5p({`nMR2%xysW-e zwMMUbJTls+k)#bZ{YOg1_X9<3{%VcnEvhSub1GhKU2WSnMR6$+lH{VYIQbpPlH@-W zA4+h#@;i@|xLXHWy~D}(b)iJ9TcspOEe=T@-)20{??&4lZI~ioTyZfHBK|ukmK51* z`b*{Qa(`14r)vqxmD=SVO!!~OjcAuEkJnTfmAc|Kr*3~#{qT{Jb0S}XW<W`5>pe+t z)emQnMW#E-D_m<KwXUemsJLd?z!s=(3skr8J0Zw;)Yhnb;(%*CI41H`MN!=93dP&T zFC|G5g;x^UrO2e4L2ISxp%B>>iS~4>%J_+RNh*_DxD{{u52e`}J-y_XAq^2lt?sC2 zD(k9GU)~%j;_^?yW&6<qk>z|^ESnpZ=9?XoQ;sI{Ug7sZ3PQ;F^tz(REyjA=HQkf- zu6I$YfcK)4Pa$&A#UVvk+)<ES3Fc>R2<I-;#D8e0JpO#d_$IeBXgpWE_^-|XmBg?0 z5fyolN?bujRv|MV>U|`set#1(xOS%8$$$P`QL5xFd)q+pAIhk*X}nQ^=`F#@auJVe zL+%f#jrDE`y3xj84oMWkcJ+`XxfSJYOsW@xwn7jD)QwJ6f?ccRxVK1Rm93j6ro`58 z-w(lvKv9e0^|kTTzId~!rE=yBp(Or`+H&`B#j7!14R~Amzo46ymFUL4<t|sj$dDG6 zBe`5j&_KnI;wcW^Tj7$rP}g2@qI;b@L$vA@S#;GiLKWg|*S%xwidvoT#etK$HkelK zy0b$jYIH-UH=Z~v%J~em5!A?b*n2t{VZ2&Q)sDtcuAC_!4wbiDZc!5AQ4;?E$*p#) z+)_D-aw-WWGy+;`uX?pgf}@3{#w1QH8c7B2MD3T5jAUcIV>*ot|M}Jx7teJJ!$X0Z zj5CPIMVhj5sCGp~V01>Zl3*c;XX-|0#5vsy5iD`KJC!L3`WE(BOhQZ|>gD7;*y}NU zePDE1L_CP$<Q_ev9PFkFCpS!|3dILJ8CkM9A8hWc%BM~w&gECoyN!_<Q^jjL@mgi2 z`{z)ii+>hM^oYc#Ly1>HiSzl}p~Tf9@j)ae;8m6DA3YpW<LHcO5;RUp7~R6AX_Ba9 zwnlFOHHky2c#Dk6#JVRkIBe6TiB9w*dP_;jKw@1@MiZz`#gmcf+6LJb{Ka2Msp5zt zkIu*_+vq}zDhX-e1`RD#3e2s9@I;b9Z54`UA?k`8sHH|a2sxFNpvQ#_)iLzJL^QQr zP;$v3y_2OK$$rpesMmF4@tuauGU1&vJ{uikJX%{1D^cchf;5rmKuq0V5_NlG{D%^& z(6c?<8YdswEv&Lp=-wcPOPT8lk!y@7z1H8T#v8=mmDsy?V=s2<HSQPGn-txv@gG)K zS);w*>GniZdQ&*TqcPT31)`B!!AG<yO4+qDu49m!q`0N2E50K|hN*D!#~$z8%O}NB zHShgJQ9>^r(&9aTgBp3eb*>Vmmhrn{gl?l8sa5{Jr1bYBg4b{FUq%+kB?Rap0P2sD zP;m!PaqR8IA?`qQP}E^iJFyG00XO?g=bk=2<liv%Xk8^C5`9qRxAgZ!y3>MD?*9Jz z$jXUMo(m-qb=Nl5o^Dl?cyx9!qi&%(*p0G$8`FTW0b%JVi}$<0F5(PXv;Abv%iYj~ zp(29<icjtD(FAs#15+@zv6jNyLG^RB{3Jz;N9)!#A*>2$Z#o>BHTai^%V&)BwP&db z_KGIx3tDBJpRA_wYNh<4?F_rCOKSc}k$3@<V`XJUUF{WEJLlnOX~^aBGPlOn{5vVJ z356jWZaizdOaj})JKL|Z!(B3rf5U7nDi!MQ6z_MU_0wHhUD1(Gpd;NvxLZ9l)W%D@ zvcB5hzNziWxgt4~R@yGCbHp|3EL($@*VUdu(VgXibrkY4s0<nCT7`<T3R0Z>%2m=2 zt6+5PCBdXXUxDh@-Bwm1^n`zh&VeCEn=i(!>4TN6y@(+$W<Dpc4$aIV8mw9=#!r>s zm(t%m#2t5A-Iz3Ye<#<fVaw1EwO5e=hPY^lCCCPrNN7=A<V<H+L|yH9C><m@A*pU3 zMdZdhr@Bhk`0LeGzS^VSpNAa$xK(bEMY-!rkQ7HrwWpAB;N#Xn?MdoK(dGZ0Dj$)B zjtP;8;=QvWi4jL4>gY|ky3UiWDd(f9)E(MzMYPzVn}5axFH}zvx~1%*)~#@5>7aq- zWjq^`ga44ej1NG;k2SxBR=35{+RCfG+|WFz7gh^eS6mzxH;xpxBDQDjCz$>krJgW! zd~sj4DYoC(r!cJsiZrm2vHTg4nuyfkQLI*~tTt~NujzQjeZ8)z#pxpXd`GGn&cO|s ze&SG)r+TrjT@wd=1?C1!NPKn_6s|#5VCu?cuLko{4?Yly?w&IOdj4TDyQOP}`dTvc zb{+0R8TC}gc%&Qa!EIE;LIh<>;ICl?7%uU6S_~dhf;U5ps<r4`tb4u=U?Sxf8qz3n zIq#lE%}DD>OuE`~uIfkCJC71RffWCt<i;XeS124UhZ*lYKcDOi7x@kn5-KvSN<G6+ z4~%kint`nTEDSGbWnww+jbhL=OjC-JAI0Qf8Hk=SJ@n>8|GYM-oR$U1o*Lo~tefQ) zS{z>WCR&TyJU#dsyaEH)nV$e6k7o(G<Z(+ZC}*)oE}0+bDwntd6C&lVwpiDYk&>so z9Gf->Q+dfkZww!p1HNdvIMNoCm4(EfGG6LP*YJmiv%lzyWBWpKxYg?Wu#a?$x(~~f zB;B4C>Oss0PTn_<O^UN;IZ-*O-1`8Uv?xwjvw~VhbXPBKgl2?geA=7sja%WPmXN%O ze3cy?nZiHK$2wiX8~lP5jZj&8))Of=VC90v`HhB~OLnQXR`CvSYiC%ac3M#C*fPEv zGH3#NH5kLk_eQ1kyjTQs<IEYs_%gnL7`OqnMoEiIjP<_TrZ?=B(nd-R$BrG_P+v|~ zIDQ6X#O#$YdV<lm!__hJ;f<8yj>KH%pi*tzR7~q2Z;CszJYc3Y-uS14D0iQ!Ga8Ji z-XhV#s13v)(5n3h()<S_SNyUZ>b~@Q_w^aZZl~Ug151T`@IY&n99g@(h!u5DuL5c5 zM>CD}GaC4uQIfR0U@0||CyI|ipqNP<kHg6P!*CF_u(d4ezvB8Vh`+y&{Lzpj2Y!k2 zABdaCH;k5~6%(}5a+E&U*?%|<i%(Yz`>1=bsU9TBMcNf5p;YKxk=9?I2K(hgD~DAO z>o`#r+kr)?1wwq{idkC4rzk@Xpy2-cI47Tkf>us%2)#SMg%$Kj>y`dz8p%UDA7cx9 z9Jb$1OPF>QpZLdlR=#9BTVcEGjpx-cE1}`t4_Nb16>3j|GpUGq;wPEbQrVAS=@J;< zYWvXJi<22@lm??~`OmQHvef(raO-%@ecgna=3ghUU*j_k>*&Qpwgyjfqjc;T`nj8v zH`AIBq)v^}mtSIB*K6eQt@4@Lv#??-6WGLr-l<nm9pr|A)~!sGV|Qq~D7OT)+?l`v z2`N1XV^%vF9`YCCV9DynxA1bdFJY4U*%;QXr#|*6EXl)zz#7jc^c<)f&ldMwpq^IB zzUz5wr!Q_sJ39H7<Jh=febjsutLo+GR7x3Z`QRSx@4d2OoMhUZJ=j;hGSk6LDaX5a z2we!&SkI^Q5Jo7G&tn4;b5tAH^u)};3+c_x#Beq+{X8sVwZ`@vnY{*#LCXypXS=bj ziAmiXFb7lyrng-kqjh)}RPfQ`*yo8U8mw7^QEx37!j!~*J`vfw1ZL%`BV)NIp1+Lc zW4L}}f%?avSFF>|d($g;I`s+cMVu4Or>)>!QFvHVY*RLiS@^#!>Cbpy*#0F{kgz%2 ztH3~1S&KB@NMrp`_G0fIy@z4u-xE19*q5xNTUA%2!Ba_C*up;SogUka#a*~StK!&i zy^~|>Z&2pMv5Y=%7^7}bw$P^ih|)X{+E$LGY6%s(5_>u8%*g79z|2VOJ<IvGQ&?0| zfvSj&O3GDF*RiEZdC6!B%zJXgfeX=BN9!Y(dmg}s3L4wWdro1el8owaCbO922_sfh zFPy@XhfJgf%@13Wz~_SyE~XABO-os1?qgU@az@vqAc>|Z#j>N4EhtUXscyWmDOQr$ z3#Ey=FMx!FR1@v}(wY6LVN@$C{jG|pzcT*(k{G$DX=P%tTU}9;vy9I|A}bm{OqX)w z%>yuwY{mHDlfD5GlGZHKp!yhb#ha{n6BTc~?YG5)Wy`3ta#!YZhs4)l4#eQ_w_=;h zw}27LWbbtT0<eGu+wa5*Vq~fbeSM3<sROx-QWJwIwe=75H6E=Bbfwqh_(YH!rT&!V zl`f(wX3l`67IQx`r#6kp&<f9oKZkM-r_<U%F4k`GLry0l2S!1(<{kc~AN|#FD)v=! zOxy_!cNI+Cyf~?{oZn(*KPT%uwUE7X@|_de@V-4qZliAc68ZnBo9+i8+`@fAWeXO{ z-niPUp3eU3Do>REy4w2%^_%K4tL;0;M=mANagNI!I_xi44ihew%jWqo?4-xR`ztV; z54W1<g5bG>-!fa0HuB*pz@4*bYJZ>;|9mDP-kU=&A-|DgO<|+FUP4->+#1Ys@}~*4 zH7@QWM_2jtm#|?eoz(BoV3w40{Yk3OVYpb_F`Vasja(FMAuDj5tiWN|+=-MfmQ+78 zIYpoQ>kLV%Tc~oY%X#z<VmB3t!D^}z=Jq$NctSHNZviUPU=Ajg!3$%P*iR{$`I8~5 zp@RN+7cd~(@n&Qj>PYG+RN50GS8FksC-7CoB68LPW4DYm#Fi4Ne`P=;Ukbj<Mx+iK z>o-ulwek!!P&qF_D?0gPG!b^yXr(Cs2_@KFE>4Dx?&9PxUqpeclBG(2l~$rv!1M;J z3|pufQrYHIOQ#`_-%!pATG)luj0uml{Go>~qVd(3a_z&!q{AN`(!Ml#Itz8c)a8zM z#dU+IztYC~s@1$6ed_H}4p$ha*Cj1%YFeo;3rk;gn#P^Xr(=y+$&Y7-330qs5T?w- z5P(-k87lf&<4)ph1|tI{!PDZ9Ha<^t{j+nS4d+Q4{xK)arQvrG7oUK3K*crgPW)CP zxK%ux%4a+5!II23hG-i|sW!Usa4P0$2o&>FJb8L^s6UIEtlsn9XeU202qW$)9>L*o z(#aaRQ(*i#I3w4SPNCc8ak=PRps2}TeO@B|UBv$nJi<$0SQdUCcFhg^7)spy<aMkN z#GqYqF)IR9XvO*^5POB52`%*Ody$@ITGs>$&f{6NDj`l!L2RT5`Zft-)HxAko)N*Y z(+Hk_EJ_0wxk%dBhNxT=xluwKwtu6R=(SB)Ao&`jN|30+zYUbSu?>yb7}bJjr%0*3 zG3p9173_mpe*|jJ;GGRCv3?5Fo&%~RtfW40Km^+LNz|zlwkM$mMAy66-YE&of%*%G zK|&|1_`SsABH6;JmOzfOrQo@-B-oH>+p(+vJF1vN5WwGXv->lK6dwa#&hHNO2lNGw zg3c>GB=31}B(~#cMDm0`CdW~-S_<}e$G9$v<Ppt`XN*ywzLRBV_D~IHMVb9Z=t)1P zqUnuijY^>UJW6=q2$T?WI*t7%n^?W->OP<**i_7(%pBlbFdPhU6V*HsB0^nE+sgUs z@1eexVks*}ZoBHx`aN1_IsZ<Oi-T+AR>%*|aa(v#(m6R+EP5l~#FA=)Ydz-F_E$qI z#8qgN+Im6r_CN;w_kTe?;=R!L$=iZS$Rxx`>rl|y!~v$;{8*GX$f6#8jLWbk!SZxt zJQO-S1B8_A4=YM}x~9Vao8pNn<M}WZmHy478<M*edvzf2W^!*HYZ~yTmAc!Fy+x>x z;ekLhdXRb<y%GmAr40)jBP{OtwwcD`xAFD=WFHLdn*^?Kch3mVRc0!dlJG<eQ)lI? z`m@5Uq+Dp%^oFR>&@Ok^N^)MOAHZUBdIP7;C~}G3+nSCI31|&GubtGE&emj&7)YyU z{JB<J9nxF{wiH$u9>T!<jJP_$kS2JkP;u6jRfgP5gZg#XeuE)6L5n?I#0jI;!rPkI z9fR)e9%u@Y%LO@)KhVU!AC%sYW+hLWYb_a_Phgn*hazD~b@CMG_9P`?Z7~}@xDO<c z9h}+y^4EVB@$J`a`QTI}t{<G)b(^iDfK;AO2AAqys*<F)XspSjG<;_j8$M*XI_hgS zW5`^ef~?5RWISp-WmJ?4*y5uiZfTOMJl_F7<^ESBNvr^-$n!HalP1FP8P9i#X<s}2 zphoI&3Y7B~Of(~$gh1h(TFU!>3nN#bW@{1Ctd4gVaR=hFu&Y#}nud=|m#!6SwJ4f* zLRm#!er+N;&0i?JL#|wJS{Z*m9n;bC6_ZVAnb=2FX%(6Ws}jj$?yu5FmF+uq3<`%Q z+P?%l{XW>NYxqPIgZ;R*k8j1rC6QqxBhILVV_f!XvC)yMX(>}zw*?ClSRH!qYoh16 zcg3?+JWq<}74bYHo_odfG@jg!p5WK9p9<7IkGORmN%0QmL6F2!#V7Gcfm_y*SKQNC zuHA;vKWpLdw9dWKS(0p*J*RYgx*AUf)^Q{S){%!Su<j5&>(1j@W&2Q9^Cmdm>f0hN z`}Yx-6xdadK)31{(CbDvRry<|-mlZ`IiTCWJWE0oHtEQFtlMAXJ6M9aJLW)Z<gJ0) z^?1p4Q1@^eUQ;q^dyjRX^Zj*HIn-b+f<UdPU9G5ft*CEptDsYTo4@Nsj)wz&p5__n z=MkP9Ki7CNhctK+5YyEjL9)M&_%+S%3fxDL$mVSw399Bzz*h*S<xt7yClT$mj@X(X z0b1q%Ww3XszcDh@7qYJ{u#XA@=VQR#AtmzKIwCet1w|Hxy{o>nZqL!W(VCNR;!#oo zF>m%!-K1vntqI8k{w$eNNpd$KS+qlQl#ogIA7xU!`FpH4sQRsAJr({|m#5g@I^TnZ z_x-xs6G#fxN&?ngj|V*B^L^f45tqYs=ya&xnl~af)J}2DPj}#H%`$K}Vg0Q+UO3j5 zsZHKQ|KZ5+>u9a;>vQRqR%3|A5aQWPEvWd=gF-c$9fD`BPgj#Gusd}%!v(THSCc6a zudXIpAWL*LJp?jUSJRo0&7>!2S5oGQb-PI88f#DENj8Kpg=KF325Sw`5_5Hrjlk5P ztEtD+-&&@7ED2nmWqv-=6E~#6=c$BtXofWCYQK&G!-2F$p=Ts1q~D=-aQOF83rltD z+7JjAP+(mP(136m>*^kd_{Lh|i6*VNi0Cz;nR=8Mng?P?Qd;x7z}wT9>rJh19leIx z!yEMuwGmX{s(3Z+4?0G9>1wXQR$!{>vY`nZ+dwT5{hMh-O7bp>z`g%y9P05%bPcBI zXka1pq1#=L!-yhUg3yVhs<>DY<+jpro~|rC*?bm~{Twa)GfIx;5Oe&HgSwilFs9Tz zZ-ar3gBl}oSlC{^UDWDft5mxQxaX#owMP)v)o#JlDc5dc6St>!{p;<JF4p0?z^=V) z+4hVWl7u(etJ|}DB!ybM`4Sc+{>>DXn!f}7hk82s!r3T(^GRZ9U8sAk6>sQjn(*|u z&eN^hFX(sZ9&M)M+15Kf_xfA!@nrd17wBqVLRzTE%NuLSXrWIe&+M?`s(o12PLXpv zh93W67R|T1nmK5|a8*6pD!G<4jZTI%Av6%(D9n*4`W0$GVKyA0JYd5lSrD!rkd{+f zUjd^WBUeQF8#T?JAmZ&=<==Pyw~>l+hj)5Ndq`GrqE%?pvgm*aR|wtPh?Hfy@-9-A z6^EyTv7Fz7RX}B+_7GF;jQ5=hb`HrPPUK^L0?Q6$6{uZ}SL88mf!Zg4gu6!BwH6WG ze$gI^kQ(Ypr?QJQs!qIBhJ8UIR>I1s%1L=2jbXU8SA2)+NS?moF@Ymx>TRC_=Z!fd z%v@P1szx<SgvN$g1V>s2$8y^+Z?|wKpgN;MBdV-98Y&m63H4Lp$#;=$FvHL9)79LG z!A;t^il%rn>F?mh##-W-IuOi%>SY)-wEBFoy7g150=37$B@14E{Y1n+LL6sEcs7p# z6S@g+=xWE4Y>dGJX`V!C686HZ^k|xRuO48VcHN;m!?e5mrIQ*xGs#&|#z)RV&mYhS z)~y0bJ|q9?|HZm2I->4f=G>h%XC}rQ9L38mde3B~js~eL#%*W-9+vdM0x_b`cZ}#z z`VET~?0hp$79~#%j&$I3{Aq}IpH73QpZxpwvM+Y$`luSDJhGUMhU}xXA^V6LfQl)~ z7OgiUm=ZFY?zE8AyoLXSL!Z!`tyFp<YZc#DIQ9XzyzhB>iuFRBXgHi=k^1v|93F&P zSyNf*84*Gwy|V|%BAZ|zw>~6XZn%X%@t@-V>E9<txA!httR!mBE}JdyqgJ_LF%VI< zZADe;gh{Dz-pr$>W4Ux`PeSZD;uIF2XZ_52>>a8Tlu*&`$_3Yiy*+(Y-~X=bdsqIm zuKs<8z$tlpD7y&Z*FFKqW<jfzLra)F7=t8^PKigzPtwdIi&dC>r1>t?j;rAs2zz}t z|EMU-_wD=*vz*l24T%&X`F~hfg)9fI56zrG`-AsBCa3V~*BlP%3A;l&3`j3@NDBc= z0S$l;k><wj5a=dtF`vhv7Kimu9@t1{B=UjKjsjPOYL9x7+)c()Oh2@f`i>u&ZD>aK z(18-2i(ms`@E7c~uzQEzr`|A-T^O3G%3#sM@)NP|5Nl5?acenNgHB~}q>`|{h21)A zy!xvF?A2jI)O}{MPlu(bk56Y;ho!5xOlL{MHzl9_h)NH~iIaAdS^q|JJVUFK)(vTc z*xBK!YRA7>+wepy9UkStq0<NMk1e61=vG{pGC+<g=Oa$i{*F$V0%KgVSs>o?VBOTh z#*Y|Uc01AoeKTn3ef_#Or7?<7QgtO^8-!V=w|R=-k@wGD_<JzlfBkoF_dl|11xxU( z|9YFZJMQAhjz~&m$3~=MKhiQHN0r3}>Wv9IMX7W(<&axf#BrcU^<SV*P@|0=(ig?Q zosN2qKM#-kT}SoUAJ@I<$YNjXlkt9=z68&r?7Ih0!F08g$$lJBI{Nt3hoY6@T??s| zQom<svhVSQo1vRr>?#N~gp<b{M?(Z=xXR>$O(>7@+128OYF+L3sOr@yI+j!EYQIFp zjnh-O#VhIfPaYMy(L`!6O0&@-LhZ(UgvNTf-2AOk)p1pF>8Lo|1t`h_*%9O-rJ_KE znpVd?Qks~&a%I{8N@*4;1=O_FagVG@><eV&%B+Euc0m#S($RfAM1OUZe8Mp*dEcxo zmTVkwi|E%fo=8MKk++YJV5EOu6WlRP{4>xR`~|CMQ|ayQ{+*Yj$toM8h=5E{_>fMQ z!4C>0SYy0dHHdv_9IxKe!iJj0h=`HBVoFt8hO;B4{>Ary0GqL!7FN9XG+N}y5A8$* z9)@1lL~jo#FTcU?*lon1?A}J)_8WBL2TX5H^>qhDe&`qy-&>BykVYq0juPo)ICcp! zWp_Pd`~pxcYvhMcBUX@w9`Ynv*HX@u=aUss(cP59_kpppvTm8Pu}DMf!xg^P4C3x4 z$DoW|P)4}q))G^?b!Hn*VaG(?psyghzcEf3ukkjP^F>G5C3B%_Fw4$K?6I0u>J>Oh zc%m8#+Cl?wT24m7+aO_J@~7y=4(W+vb`4=q<_!0(hGsP=340M7jKu!9Trn@i5si}Y zE-2zsSCV2L4OJoM1W`H|>M447M#F~r;HN`_o>&}|c5o^u{jrpyY#@1*-W5KNLwLy_ zD{^Sw247D(oL1YY(667zO^ec>q2>ukNU&GaSUhc;25(~HY*fYsmc<8_Ej;`Xt(-?4 ze&}ZC#i+(&7mXF@|5LDrp?WQL2}7|qH&Zo)?axh8&pyx2=JwOjiMe8)KZHeE2HB`s z=8``Y71kVw1?9Yz6b~NNlT^$JkqQ=foyvJsBdEr!FapO~n9GvkJ3}R&3;%#Puh!MH zqEK=%ZjO*Eb?;$t%f&-OvDZQ|Qz-U)D3%|JJsOHRLb2tc*yvDfQ7Bdtip>qhrWH_& zc%qO?*<luE?H%7?K9!KwsNe$*vjXdg5vZakeuc7nFhVVS6yBH$J{Yr4W&0(B6qLD~ zfAl^`e8WH4bJl*oKQuobns?$RteuVZGaE4OPNPL!E73m{%BQZS6mE1-1AgjSC?Y~d zK6SZ7L^SQEaCd@3lFg(k8%Z-fJ^cm5h*Nlf0WnEfWYl{zHjuJy%%P+Wq#=z_LUx%0 zE?;63FNc(LV_2JXH3v`uQcZSmUVJ6-%Vtt$EKP;JhKm>K=Z&G_h4gb{sCYssztDx& z_MSO%0jamEehR6H$o(_r<VNVrS9z(aH}nNWa3_(j48__rq$@1hmaATQh)uDX)Qul8 z*_NrgpS@|zO3b<nX+odzQi3ZHQH+7pE$ou5pQ?rR$RF6}kuMdcoTpEx&gd$h5im&Q z{Nk-_a=y_?CehQ|wHhIRyckLjL7m|-QWA(%$tR7&VZ#!*bzQUY;2U6zXiMjvTwdXN z<0(*%L+0M8NZUURk4jd!Fu~_9c!K24oZ(4w5yrE^lC^HS*-1kkRAW6(F4A05cQAbR z6&B4J{tGrSVwk}pW{s*ro9k-Mpc^(W2l_}++)H%*z+bJ!thZkD&PvD$Z9J4u<uifl zCvi{16{(nOirIn!OBbXPUBtH+WOQ1JBCe=Dzw#3Mx*)OBDiLd1ed%%gAa#v_4Y3bU zH-E{>>`MmLUw2>`<?y%mT=WB#Q;vN95J(+<-8PkQqOqUsgM5WJm%^c@N>>vDsyl)Y zCj#0CI=CEb9VGmK!mha9hNrG32TZOu1uYsM0i(YXHLBpxog&MQOV+yDz2Fj8!uG$7 zr1G+=jR#GrhPaUT&Tp`~keuo|1_3zyopVa@g<kY%jrC6Y<CD%(kDt~{k^{xcUp%;+ zHzHCOSwu=I9~ttIJL2PCT!$cG$meBFz5_EDu8%?NKG(Y-?x%9$ym^42qfFk-%C1Nl zF3`Gn(AK!((o~Xbp%RUu${)Lxr8*MS`7JERk>_}yx*H344PJ6$VRZ(Y-?2Q2-6xhO z5&RHjQM-8(&}MrmLN%i{W7`~=3EOa)CfweNnIgh_{k<bgeZGaokIamrLvFLDh&e_M z!s)kr<mkSjRNwVp4Fb2royL!$rG@`tqCX0JAC4^Uf~2J{1*7G@h$r7Ps817028}O} zulX;e%8j@}f+O<?6J39qc~i1_RST=WDIL-`+%!BspK8(SC#OTL;<3Y8H?^>DZyHKm z-A47N@+b-CT3Ftwq>R=3VY^!^VdMr{qxPcki&N1K7a^hUA&%y?fyf>hW$yJqrM*8Y zWAL_07`Il`K=BT>7HK{I8&N$*5A<noC3eyx!5{a(;YWEQXvx@uY16wed>$F+O6!iH z==lsbnJ#7Eh-M9nN=G#Ah?#-vCR+jBFshy`UlXS#*5qwi2!~H#bj}<ehvm6A-H%YP z7F6PwW(mHI^AnEh3ksdGJoq-4=``iX(TQr~XRK3Eey104j{v9M^JcLLMYH<lAm-#B z&jRh*naZSka^cdsX}rrp<;?n&oha%%0wvPb`7vgM<hsahR*Pqbj4L2G7AnWF&`PLV zWu@`dO@S$GlwDHs1Rt3NI!Rh1(i+-ltnQG4lWY~P&UqqmzQ|ww2d!#UoHmD*grt)B z!&(tn$*;awk8btn|NJhDx60eMF7oHSzycO=#H8r`jfqM^=xoWOE%sl}So#T!+RPaZ z{tTbsg#8O%l9e*KFSSegetNq?wnMdg_?JbhJh2tZCl!1GE}6i)U+F)rS#kZ$P^j-Y z!8ucohTe7}PH%)$`Q%3Wg&DVncW6Z##q>UjRTAbS3vuM^Leq8Qb*ItCSo2h$H=ecj zO*611W74sld45c9b?_6mXH5FQP|3Z$DtPrC)S2(bRyB0O4$TO53D|>}?{DA3R3$x< z2S8ebyE7jYZXFz&;8>x&jxuNZk`y&=csWZlVuuPDbP0l=IK@_!^ofnSLD_POZ7C^J z?^dzjONPgKVGqN*+{9DNH$II`9UIr((m`#0n51N#Vh6@24EgO4=AnJCHQ`$yW4MBC zF5UX5<Ua$$tq-TT_3_&w_V(B$?T6viOn%^F_RZKHv0Fh1-}1QSW2PE6g0XQK>XsJv z*0_Pb&_P$|IkB!ZLo?)v{~KMzfuy5d4pcVZdMDkw4^v~>siQ#^M?n*Dw*WUa)3Hkp z6yfYkTe;=0xUX;mTg*lYL@WZeX>|560=8<4!xPJQilqKX3VvH=JiCRJ-E7PnhdvNT zM;<M=3!ILPVsQw_Pg$XxFr3)f=DFD$HxE}2y_J1+voR6oC>RyK4i%Z^iFd2Z$|mxE zxY(f5{_1^|Y<y`}PrCVl+a3x0ffF6=oqU4bJHet(zmvT_VKzzO-Qe8zH>Fk{Xo`-A zshlaKvW#2q?R*;sygv;Wa6e)%-qNQOyo*wKHi&^l-U~sfiu`TtGpUXf;NFxE!^T#? z*y40+IbM)sU~nHP)x-Uz!kv#RV8zQ{9A{C^*~O29Vha(3m7xdFV2*t7V;&eEhf9DF zfg-xv#xKFwBA<b&YJtZph1<m&TGEl?<7}(5kNW9(?1=Ma^_3R3aN<tw6$}ToS!eM% ziGGq@{o@?AaMFFfF}@@Zf24<Q_XoQDfhI(8NjjE)`vGg4RH{yx!^V}RWSQ`Cpy&$T zH??1sU5QOxrGNZI1zUa_EH&28ac{8IWpDpEr}D{V>YcOM>yyX!rtN||Y6dR4^x|J0 z>1e8d9bvIkGLu)5f%^?)92on5yY5ZmTM?nAUUP&^n9^$yHFY0e8z#DM2#5t5JS8`v zl|>7)N2g>Y+QWS5f{zAZa8DWEH-~*NB{TL1Y(Byrb+wVTP3b-OcCa-d=Q18OmkgiK zWQ=IkIDx+|*u@~8(8vnQGqewa61pvu(#RH+_qKN~qy>dG*a-ck8&VpcMVgK;C#vHl z&pTGH_P(xMPj7`U)NFV6oC?bx8e^=!yk95O58IIDt8>}+<xbXlYJc^rpV{!K$#9}< zeYOvqH`Uhfrq{?DhJ6d%%)jF(;;MHDzl_~RSW}*QpB<Q*sfmJFsC*kY1aEn+FXr*P zpUcG4P&s4hpU(vkbQ}x9y}b(JYzI1y1*a_A@or^;T0KC|Zh0Yz{czhqRm0fF)9z6{ z${f=liWPp;CcNV0cMWBqOkbq_Y6u%M<7;*1cPwjWwi-w7x6K@>P8`l&m^oOTF`Ruc zGgX~7d`EDmTcy4>m{rWaAM#Gj{#0E*hP`sTSM?_AJ7<Kt_ZT)|&QNu*m@S=ii+bG) z?D(9Ch5qM-`|d}tL;B%9hm_)h+YWF9@Cl$d=wlZ<{&))g)D-j}&GNYA^ER74xA&Y@ zXr5Tc&?2AIkmgm)C0ZxXr#TrT6?YzJn2Kw_nBy*_VvN?&J;Jo#JJt$YNJaDTLWhb5 z-wLVd$+y@WYui=yP?-2C!myT=0{xSU{;)PtmBI}32E^WSgR9~#(nW0pC?Q>}n!+s4 zX0VLE^-3R$dD~y4QTy#mrB~-K9PEvA7s?Gh8}v%r9{5hOHP6P!gpaz}1Am*+?-4W| zABWjAbTlp0#fE5DUB24Ge*D{gN!vQ1zM<1%S@>x0s_&%RU86tQX(X$Dqo4YHdB^%U z`l!_(yv5$#b`kyd>YK*MHT~cRZhCWd<ndG>p6yF?9lmuZe+T<u_S^Q`Bl8KFz2kB9 zN1wBwcL=@6+9~wnuAM?Jp5B>8dhyoIWvWQIFPLq+tB}J>yEm%~cCfK~rm4T)&R*G5 z9O=a6b$F-K)c0+NOZpCV=k09i-Zb@1YuF2W=MGAEjC7=KtTfH<kfyD0NRI-V0p`Ct zq>+G!0n3oa9-Wsm=k(T&Cb%Cc8bt9mU%+ssi3^R?hi|{ZF{Oi}0UQ<Y5wq><#R>e; z8$8)Sn;%0m9m3$o@f)j|u4+!#iKS!#aAVn<%+*`j@+$Mqq3ij&+BIMnpUU>(dtdJ; z&22zKrKv%>JYEZL^Ve8aD?Y0?ZgD)XyTPh|{qZ{<9Xi*5EhW~iZE|!4oLNq}fDCn6 znH#4-Wy)uPqE=8xy46np<|+2}9sSf&Df{w{0bT8nLTQ3QEb3%4AI`eDvXcg4oq^c| z9!9zqi5Vq|2E{y)KHZh*yMpxsQX?>GMwVCLThjH=N7}u4$NC#0>rBgV)Gc}6S0E3A z<b^t<W)dp-)8Trc8<#57$yi@`8`gtx8X*(DL~s!*u887FbZ9sC+ZKHNLT<AJ3ENeC z`*YOBuDCMoT~LP54kH$6Z8WGGXWllW0oPTy)Q=il8@$kQ9R)X=y`w{xz~kJ2-A%=v zDZX)_xK+5qIFgcZRnwDP&dr3wiBJlOsF0W<$zZ@rU{|?j6#C-7Y(wQ?7zZ2_dvKNd z9Lg(f2&{^@<{0NX(jE1?40?yD*o26`2-8^rNO&QH1wsv4rS|L+jSbr~F~L<6AJ}g1 zVB=9h|I>Q(`wYbjxu;{Uq9`hlwv7LSEI-`?)$zD3EbiH4?JB$)vLz!%vuEAOYFTD` z-M94n3ac$_^5LFz4Ol}Cl^WE&-6vemw>Ip^x@)v54&5esqxi=#GEkNC!`agLiM|!x zkaL|UT17|T-tXO!V=DLotUhSp7APQ{M6-NI?D$YHXe{SWys+F}P+2!s<>^l!4>NFs zHwMa7SdQ)Zlw{F)X!|iZ6^yvN9IA9g5dwH&fK-WON&bPPN+bSLmncWxTd)!OXI*-J z6T9i|N_CI1Y~9`8W)3ew(UEKHBheUMN|AP@s#u8u`be~h1Y>!Z61MrCaYK*^Dt^m5 z;cwOU$K{b@VrAcW8|^J3p_d!zb|`k^2G(c6X!YR_*sKK^8oJsUz%@^{YC#{hbct<Q zFhH%j#7-{gtIqv^{kR})xZxq{BX1i%y%ic<)E1=Aa0w^vxs5I>mhY9~MoRnfU<C_P zG#ZNFfk-m*E!1E|uySGl9Qav+^YCp_D^xd;1`4hd<NNZ2c;LpCki(V2I1#5Wg_H#C zCDybsD;}?2`!ryt%a$`{B1?bfLO2KR9i=`qhRweBrk*Vr-GM}ZSA~B9EVR<Oe?5Ea z-eWzxb|hVTu|4U@7uj=*2B{|-*x^Mto4>T74?cvS7aunC;~sE9p93kp0$8Jz2!-dR zK=FadECm5B?H43pV#W8l>2roZ&b-&#*P36wfHvdX@uX|ct<6=DWNVwq8zDyQXgTM< zM`&pF4$FX#ov$S+;x#eQuX_K+XM)ry#iB^T_5M~i!jstJUJHhMw0sh){{z29$?;FZ za@^`k#S-d4&#<`r$U#jG2itGn-u$a|Y>y{hU2uY(^CZTfBHcqugXbW8ev*>I<G<aZ z@pe)T`_V)xaa|62j^R2s621Z<{UAs-9JD`r9qkxVS9IQql(WZ~eep!~y(YG1aS!!0 z6MK2_(7ZRE3%4{rglL|r!a;~~9Ro9|!mY+DgK^7H<g2SWh&9>aqU-9QU1SsY4b$GA zqP`=O4ZXiVYH-W_{qVf^ehZ$j-Jhv0ZDAkZpW;h~f}pQDmiDhai%Lf0;~q4@FHy3# zj&?2B3Du2OMJ`Xfjw};#`?0cl!*Afrkw3b@6zaV|WLFuzH04)(XH0*5)iW^YaANT8 zWc49k+=r!a0~7IOAiHNtjJ6A?AwB#1Gi=3@S!(OE?As;jI^=vK+nr{X@W5QQ{DHN- zH9hb-zqpCH0lg<V{TJzLce?JF{ow1JEgN9dun0eQ6T9%>0JZl4*7YyOt`1Pc7dMIV zmcJ~{B$78yo~V^4X{{6Sa|L_Rx>7^g#CKY-pb2&n2>bpo{e93=`aVL(7d_Q(zsXBl zTMyd}R~u}1cV5wv#D9GfHv6mpZbRLEG@`%B5q9(WFUFxL?T5p(0J&PMbyVnT$z@v3 z$6-fV`3`;GlS!vN!^^pqC@6@he>oop9BbC{@V=7w{fNzfHFa<sS=TUoO9&V2=Yju` z{q%9R3+y}o!2Ze~*q{3kcKJBF{%VGLjG1M;mZSc)flYgD#DwQD)^QZre%8KmF)hS| zvq(|)NHE%Ps$+}>9wr>N-LGUVx_}jcsI>ox%6QZol?;=L@8WUF&i=jfQMUUvv-+R+ zvwy!fM{R0hlU_HgyD%0&fLJ|(GmF`^*GDtQ)@P$bt;f1A9Ycw?Egjplb3Ih@KRzxi zzn{Im)Iz?ED@(B!dXLSjPEbFynB8A}bAbk`4~tU49S;cOaE0drf%aC`%{vckjG7sb z^RxGcss%3(vV>yMDlf*vlxjO{v3*%T%6|5;!k%6C|9AGcdfDz}De5m8*{93;n-)@Q z6t$M|&P&?!x!94<8w>vz`Fyd64fT!o^;raCxmd%yi%`oui!hF>MHtV2MH!XFiTo!D zb9hih)A+X{%n(&j7H9EKM0_a!P=tEkAVL#=M}*0IhX^hFbrI(CjR^gR9ExwTgRd2| z(R{TCOZZ9=mh$Bybn+!4oXi)Ba4L6+a2lT_!dZN(2<LLA2wl8Hg!8#WgbTSvgdVOJ z;S!!D!lgV7p^pl&dpQOt?@r|1v~c1v0$oXH8`@6Uy^7FF0$ok$1%W<6=vjfTCG?~~ zpC|NvfvzX?ZGmng^zQ=QLg*%ezKaWN5`RYE^@Khu&_jg!1$u<gB?3K0=-og?nNLz| zrbwW34nA3+X9yiD&~t>^1=>WYQK07u%@QaF>f?O{ev!!C1$vs$7=gAB+O~jN;0mFa z1lmgI1%b8^dRCwkEDJv=&`3hx7pR8Nw}FZ>Ybmx>B*YQAVS$fi#}odPASV(k3pAO~ z<pND3)GN>oLfrz*B6OxehY~tjpn5{b3e-fXU7!|1jRMUlG)tfk722Qo75Hc(cNb_0 zp)o*3p-U<D%RN+3C!s$GbTXme3Un%=X9PNp(BlG~Md(4G*sH-K)3JZRWtvJk+7@Mj z<?_V%ocu-b;1C+0YUc}*lh=hPm;fAM%3nehy)DWWrraqgi%mB3d_gJa6-4=SPP!Vp zfdXHtEoyZ5z3h;rI9pT+QGD>PrG`lHwy4rDQ*4NmXp3@&DZkw<3Ylz+njEGyhbU>b zsHtJfXCX?4Eoxer@_vY7u|+KmQ+9+X`Iy@TWwMiR2$3ANs3l>h$3m3Rwy33HN_B`* zVvAZHrraeci%V_h8bK-Nl|&J~FJ`ToqaA{kduZlAv65}68La+!B|Ba7nGsVeu8mcc z`@hrqFX(L7XUz0QHM{h<*xML^#`4A=Xjc3G8R7p$vuM!e%}U0T(suS$ZO@eWH!=5T z)WvOgNUIS(f#~WCJe&Z}o2>go1J&TPJ(NMg?1z#wC2+!ba1H6_0p|dx0mlG`0Pg~J zzR6yCs3e{^WrZXs0x|%W?d<wPb7P<1K?&DBM}DO{*n)?9_gs%e#3*6)HsGfLZQI!k z52vc1-@)E{IE#YjhxM7nf!v)4rUGUGT!4juC4l7swu9MMmc)lL*$To=z#+hy9qiea z<!pIilr{_NA%4CF4xcF-*oi<|@iBa&!+!^%!R6SAvWR(jV;1Jx4(H~^p@?nN)5Am$ z3m)MCK&x_m063DF9~qiJiJ_}1ba-7tIazAh;ztJe?S#z$eS35U=Wp@k@21hVpTuq$ zYGeBz88FX-!^5z@sTa`TTl-1c26_a0?x(yr&?DG+KjprG9*srkA%#x$>G}Y^c*6$@ z)lE23{uESeQIj_Yho|Q^<68?HT47i`_IFkyTZcK|$HEtLa_ndz5wR4tjz3r3TONPz zuA^GHVp?EyH;qzsF8C!sypFvg4_BwX!M>7*+Q0iN?UCuHEl}%?4wT?VBWxUfBZ}Yn z37xxFC}ui@r&|#?bT3)ePhC5N-L<N}Pk9xZO(zpQ>qcQ|zsQacQQCNOnCcmEKv6sE zOnbw9A!Oit*#dFJ;xqbnp2%|LvyC;7jgpX$vVA}3#!uexDMAk277Fzxy)T&Ivx;&j zPMy)e0mo+a!v(l5=%QX)y#o5M1s52^^(3r^+t`}qUYe~SH}Hoyu<>=tKC!0BxUmRD z&ShwG{1QQV1zZWd8G8@^yo>nIt^`ei?%>{P*G@DpemUt9v>qSn>uP9{kPA+uyIfn4 z1m6?g?f#W)nz%sX#GRJ4l(3P6D4$WhM#RfXI|wd8aJI)cP6dX&dYsD{5{Z{KP6llQ zJ>>B`)ETw|o-x)~;`J1Q17BC#*o;T5IV9Is#65#Rgp~fu$w1LXAiXzG4wRw`rDzoI zMUjYWEjD)i(e$jE*&WL;IxYIsce3|jA`Drp?mm|ISg|_U$7Vfd)qb<AonE_)ZGOzJ zp0tb&UOgZUHg9^v2Kt^g5KB*dYSs&^;UV#MHCwp4r_VYUdNUnYf`tDOXI%KG7-gu$ zaq0MqpdS6JDyE+;!=^I)^CcP7^mL)9{U<ma<@{eS;T6=dum5_};t$LDKPb-ER#R?T zbVbNH#UBPihFgIA;P~|<#CqZfPlEsC3;vXlTFsKyWcJJZ!|TD3y1$Q)!w+3uvQF1{ zx==B<4`8#`q^F(TMboF2Jmu}5itjxSmn}Eo5+&`mHmxxY*|ii-1q*);LAd&Mpky`V zXef$sv0X2#OQxF$5sw=!6_lDL@%$SveEk=ATKj_g>SzlN_;hr$hVBhL6E8N{<$M(` z<*|Dn@1?F^%N~BbuX^Yu_VVK?>a0ub@Z)z!p5RE$WdkD12${{&B1aHX%SMosW;YwC zKGVX!#;+eDusktKy)~UZ`@|5nFP$|$@g)&oe)0?j%b)s)UT{7=f?_M4j#oQc*warJ zQtZsreJOVB=>f^pu^S2>0L%j#&<6{ZU87M#e&=FlU7HcxS1`c&CGpI~%)K@>Ggv{Y z=Z<r&6jm1L9S+21Z>Tvw^8Vqh;b55f3c_$!<ADCBw@{tjjcK0Aj2(Z2ZHku;8+->q z2{{PocEeVVvfe^j&jmf4bq}uYc#V2%1Iqgcmt}YUt*0aNhkypZxN#PTfqFUQ6p&-E z^^L~XSLOMrj8Bb2X0S?joQ7TnS>?$t<7aVsk^HcFC^w8~mynK<KNHtl&%<GU9{09* z09oLga6rT7dxVdZgAl5M9EbA~YohCKSOa3aZdbfr>7+y_f4HrXPx>1RJlkKKrcLR$ z6Yb1B_lcvC-@_+q^AHUyrsh7@|G7-f{cyPd*GU?C?)E>Onbf@t0iiP!4V{_9J)f@D zeZ0f`e4;wC`b((S#20$27re^mzmTRL|0;X*h5h0ztv9QBaf<q1UD>`D|8$^s;B)3& ze@`dN=O`6y=r}fEMt|14evf+D6YQl8;tb^<8^UL6{fE#GCI0!FkmdV+1g+zfkamW& z;Vd{aZAa}(C923tXHcq-H!c=uaD&*ro0h6CzQlgqbesCImssiMvFa|J*wdR=sdsN+ znJ=fRuieawU!F7Q!?C2ASgJgI1UKmq<0d_zf1^XX2k;=E9`F;=*w-%?`!=XCB2VB) zT^haJ_>?=^6+~{7KMRf%=Ok?t@#7rmZ91HzlZr-siY$y0Ojsu;AA@%)gD3tczuxcI zqsvop#UG<3-gSgjb~N4~zvJ|Cf$2U>V8UOM%r91j#?8kFLqkMvkUt4&@eSuqAATsz z_i6_pF9jO>Sf-!ysmm8$FkBxJM}x9Xx4$$y(*KRhbJ)pKP}x8E;*(kWmcFrLZb-VN zijCir(tkH7;`b<Sr8h#04)GOi=;H&vzK+#xu@5h=fLp0D<W{<GqR3xl++GMB4k!Kj zT@|lcTpgSJR(i6DCWM$ka<sAjwqQ>_ZC-n2`P_MI{aa~6v7ElIOr~E9k}shSf4qwj zIzPzq=j#=ZUP!$7R*Ekq(XWlhb=6=3{}gjn*us1T6yD(@$_;BDZtX;V5NTwzU+G{6 z-=ExMefSGu9)AZ99{@D?k5ElR{>BpWJ~rXETr@Ba$K^<Tq>W__r;gqLX8d@LxVt89 zv9(`d>sUMDiq8y<*Nh7~FRW%CzulAWt2MtplpM@;qb*u!oeNq=zZZ5pWHlJiLL8W9 zJqDfYSPK5P^A4U!dN+KtDAA>fM`_N^L5=Y{JLJ<m=kCG{Bn0M5L$-1@7Dk{o5iL;7 zo_MEQ50n8vWmoeStSCNmkR$hDy4(9sO3Ks3V>~+w$9eu`_!ZqGajUAXm~uq%ue_6< zONG8#jP^*vi2;?uf7lW9%hhLOZxf0?<54QUzxtfyy>J+8@ZS$Hk@yMkruA_@f<zn+ zHDP$ouG4~yC5cMh5?uK%{g%$*cL#J{cRN}G<%J$zefMCD1xmGXei3_bpChRS&ic^8 z=Lul);UE?yiR1_CUc~zB&q{b4K3*&V_-3Sre+KH=S*&7ziuOTzvp@Y#T*@rAV*hw? zAhJK1X2~D+r>Yy*GTnRDp3|xDmH1)4BK&^HvMyLTC-Sk6vKj9stMf0hrSD~SrGvs? zMg>w}`ggFuzc-*ekq)$~ubrXo^x+{<^9}5~_lCqDeh4LOL2bQlSUb_rb2P4C1M2Ou z@(e0mPnvLt&tUh~_g3FGlC7#w8bgAx`Nh-Qnde~o*eEE6y-URJci{ep$B19`7{OQU zM4C9`d~OF>=m+rYDgyCL;1M&}5A|syYHLxGjdT=+H@*nyt)U;9>EVv#Zjlrc?RhVt zmvP$43J#1=j~>bHIWREGz8$YdG&p%480kFCcHMI;_7Jp<=pRn2;E$HEeFp{&ng#`c zp6UGKuPD52T6?os@HZWhV)eFqU?JN0MhDX!9NvpE<$8!kC(*=W4pov(J~%8!q+P6G zD-WjhCcOFc>p1YEKmOEh;DeOQh3vh9DeCbK_Vqz;O41crfFdrAFalF9lC!yzE?ZI8 zD3{ry21D9pln$CZ7Xf}38s+T8Esz!d$(mueu|o|;we~CaOT+NgFz+nz8qYRAMBQ`b zR+Q8d<?X^>zm*jq%2Y>QW_KS-9x)X>*Ur$-1eZL69|2B37?^T}?s^R3rFbKBmcu6l z4J~om-a{$sXMSR54h@5fN5B83-BQ`10%xa^N>7FA#7@JWAb{UU4e$KIci5b3sG99} z*n0D}sqEFpB=y%*+2O{~S&Ir$#CrU=0dA;O3B2Qm+OG%D1E@1@sNMb(GaMPAF8e2& zbHvyigYJ(v)Dp^Q{W_8V*24aNWU%_|H|*?@42@gO*F6XUJ3f6sO68+_(ipQEcTc_! zXyw(Y_1H(KJ>BplxmaC0uo-@3vglS|26(S=bWWQ(`X%lJURmBz-L@&<B4hlgorDt& z+Foma!8VA#j)PBfIc}hcpUr9PqjiWCndhHXu=DKqBfahWa1$x~!$e1|q^fiP^_wRZ zqH!3XP%6rU28fg!gWAv`kQ&*G9qn`+9US#!4DC#NNIHAkNEtPRvV$gR>3Aasx`X4X zD@SjNSli;*GjfjVJC*NHf=W6CI1l&<pzW!WQUC@(31BW@IpA@?7QlYM7l0N(WG|Hz z56A%I1111|!iVZjfRljTfGwcy1k?l009pWQcsk<&gA?I{v;(;numrFQuoG|ya2n7A z_yO=MpnGq;05Aa@fJuNEfcbz20X2ZNfGvRifRli8fFA&DfUbR1(g474fE_RnFs+YI zB`pTB8t^jUUBIV+3xGC2Pt+g-U;^9(m;{&$SOQoH*aWx^pugp)nH#VQ@SlJCQXSHZ zPKPvSk3)L?Gi>SCI6^G=QbDT!77M;K2Roqdn-cz|dkJe9X*^)94z>CPKP!*!EE%z5 z`*C%wZ`O4O{ThO#K}eTBxBc#rNRA63_5e~GLTo&wZEX&zJJAvLAUeWCgzJC99WR7V zgxF9@O}{#%B!mkQ(#9zbVG7ZIaY(5M6A`8%+}7%#D<MOq`m1oHfv_Io0|;vnK8VnT zunplXgbG5()g3d=p6G0mW=kHxd`OumEyBMAfHdh&=^i}iBgG4B4%Tawng?2@bRX4d z$Dj+jswog8O##Df>2@&BL$U_~((u9}#7H#J77DR*fZvCIX%Kmz$lwm(10ire;se9N zfwS80ZeeG>Ywer=Uj>*c&BdFGB)c?i$8nyePU}BXYSF>}g>p<P8(lW(a{bGD+`~#% zv`(I!^N#ekiX{YX)Xw5>wG|D86FkR;HT`<Bg~7y#wMh<kSFo@8@nrT$FjX~xy@uyN z_CYXJJy@hfyfrKo4jmEVyhqQ*UF;M6ezrsUJe#>MrbJxM38g$y%3edX${C7&U&g+_ zn4(L-*FczEsL7>_DGv7I#ok>%E5AYeww&4j-ABFTHj<?tbvwz>9iQV61*XR{a~y0z zu(#Sdk2qBEO!Y&mdWTQ2f4`d8)depit3J`e^$@$B&@Qv!0-X!MA1nNK?}+icLxuWa zk9coag9siyBuZR=N+g!Y|3kdcQQRZpVz(Fn^G`$RBsBr}d^#Tq|4pNT)gnndVW!&u zo(HYu5}Nw|bwdU(@&A28#(ru!7&k*zykODYvpwVI-FM&YJLXle?U%k7AxqBL9@nIK zcQ05xulUaS^U4;^n^PebsmeTy@ZZ=92}6xIMlHDe_B-#Hx8up5?uue3)oq>Y;bEn} z`r;j(TT?Zvj3)=HuKP-Uc1WWE4nRJ@0x$vefGj{7AQ7MiNPyNW4(TF*1I`1Q0OtT_ z0H*;b0mlGG0EYnefOi2q0b2m;0c!!ozxs;9C#?js9N+=W2h0LY1~>tw00&?wAPs<D zLzCJyFcI-MfCeA|S}!}K761pF0UQDB1#AMW1}q262h0Ub155^#UY>{A1IY)N07C&8 zfMh^CKnrO52|@wq04D*&e+c1Tz!tz0fEvI;z%;;QKq<fihy%1;ay<QBgeu|b^=j4F zk%K~=KpjDU^i}U4|NhVMrw2!<`WxsRul;X}YNVu(9w}{$87a{>#q>!h2DkXz8WF;m zcg7@VXUl($+*2Q+N>lrAl~mFL=;hbIlorKkX6pDeeuL%f{|{(?0YCL7{_iV#6fDj5 ze*}x<3;&t$LIz&=pMYvgeNGu5-1Bs_>Yr-g?J9|5raljapud*6Dk%%8?eWAaEJrX? zx5w-6!KZFfH{w|yq-WlUpL_=KxEt{$uYn)3+uv_`199Zj9#@zAeb29*RGpo^GoW_X zkJR6)_vl~OAJU)KU(=^%56T{ytueR^4;Vf)XpPCnLB=xUOydIMX5-t&!^W?T7maFD zswu}b-n7u<H9c+GYC3AVVEWk<VNNv<G8daG%=ef*=4Iwb%+Hu#FmE&OH#eAj<qXcL z%xTE^DCftV*j#;XAouCq{l45Eb6azj+<40f%P7l(mKQ8<ST0)zSm#@P*2k?It%t1K z+BNU_ytnf{%lj^GuB}J@8~Ho)59d1yN(!bIxC#PxL*eYgI|>&U?ke10c&6~X!YhTS zQJN%8Mj+|q^u6>0^ag#QevaO&_v_c{U(z4Z|EBMp-6K0UyDHmPWcbYd*PN?4zvrap z4#<5o_h{}&>n`h*yuSIx`Qr*cC^%K{ZNZNPG4|PZsGCHW2{iiU`px?7`uFr7>Oa$e zss9lzrp_LlJt2E)_T$-4XTOlWDZ95J)sSfzV#qe+8tjH5L#d(6aI0Z9T6CddiD9{6 zmElRlvxfDC*L;Q@hW&;Q3?~d<8_pZPH?$a{jj?F(&rMy-gUw^jTg@lTm&|QuL(WY( z^K+_m9?4mgvms|k&fXkNZlBy=b0aO?EVo$7Epse)T3)sswEWA`YRR@vu)btnnpcB% z+mqMTX10yAm7?A1^Q#N0>|Q(lI=JM(c`gFq-?Cc_d8QSnznM;$Hs$oT_O<r2W?BbX zhgq|&X6tCH)B2S4dF#toTi%kq$MO_gT>iNHiu~F63-Xub|22Pi{(<}t^Uvr1kgqDp zESOfXreIS+ygl3gpuO3yDXcCG6h2@0M&Z%Ij|)F9Y$=qSl2igM@1)Pv57(RY1^SzO z`ic4_`VDB#!}{*ov$J2#{yn?i@PnbsxZJqL_<`|PqskO(>H%fTGz~YIOa-Q~rdv!E zrs<|TO$$s*O%It?nVvH3H61ryHW|%U^BnV?<^|?j^ExP8LC&lkcg{;WujO3M`6Wk} z8=sq;+dp?u?ugu+-2B|pxi{xJbA3~DXXoCXyD0bm-0IvFxew>A%6%Mq`Eu^Nxd(GU z$UTw!S?;;q^SRBrEl|=(OJ_?Di_v1WOti#Vds<Vi1FR+1eb)D_XRO~sQGc~Y=5@-; z%o~wcl;_NI=Xvw0^Xl?g-ln`Sp}euSc-wGv#w?pUKOui${yq5*<*)YTzn;G%|E>JS z{NM651$_%96x>y?r{G@&T6=GMs(q||0s8Aj`+0lNE)|X`oLIQD@QK3hg`XBmvm|L1 z`Y&3as4v#vsb8Rf2Achb{$KhZ^uOwj+4<Qe*|ph^W<Q&~Bl~prm)VhqZib$Q41;3m zZyaWH8ecSCF#g+k&FJf7N)ruRVVYsO-z1yXn4X0$d~E7wo`+^UjIk=`wpgxNqVkgS zwD=gLnh$SOEj5G=SIg&SQsq40-7ETN?m6lSP?hW;UaQ}zRfWMiIj7JkR@gz<Z$ zlJj$Jz<SKuYVDo(yKRcS(moxdX`cN~``z|?efF2_JM5p>|7}l2{iNmCTR>2@J~`Wj z;WXQ{+1!+qo_kB~y}7qq-?tsL9k+dK`-kmw+n2U)Y+AI_HE8rk`*!=!XrIo7NreLn zZz^0?xV5mk@G{wf)!1Sn=mKS{(m$%dGkZbyLBq#}PR1U_$uMYl8hsBMYmG(ba`O!H zT%kH=&EJ@RH)rPDi@~-EgKc}xo}4dpq}<rt-nsptJm%aPxm74mlJz-jbYAZ~OWwG= z>3IwC{+@RtZ&ShH0;OPpz1Z%w-(g>6|GRw}6=FR$NC;f|yY)W(I{lmaJ^G>9YqHm6 zCm2Tf3>FyK=MB3JpBa8J++>uE`KFspC8nvSIi@Gg5jiQjcjQWzla_JTyOG_cf(Uzp zeW2ZHpJ~6>?zcZ_f5rZm{X_eg_8;x4!XAa0g}IdBHc4!z-qv5#M`m|{rAy3C&Q61g zT993r{Y3WK?B}!B!$@t(-kR;(mi=qCWQa6q3|hk?L$zV8;YGtH!xqC<!*Rnk!yu#H zXfj%i`9_Cvw6VlkYJAH0ym7s8lW~i2l<7g!a#O8|nbw-tnO-t&H+^IJ&h(=x$80lu z%@3Mam>)9NnIAW=H>+|w=fvf#&3OfFxj*M{PEM{3!>=rNaqeGyxyv!!T5@|>QY;x3 z6Nc6}%OuNG%k37IWszlx<*ydm@|5K{%O=bVZ&|Kbx>~cWTl040y_Ki4CD{7errH+T z6x$4#pMC}1?0I%(m+B>HG#WvrKdXN|`&{-<*+#=8!;gl3#<9i*=zOec6sD~h^K0fK z<{!+yNiY$sbGBGt%+u%Jm;WM+<VG@&C$WJ>phM&J%1+8o%}&oAm^~!>MKsy_+23ZT z8pc44(v3OBF~<4ES1>D{GX7*NGTjY>^b#y-8!|APN102_t1$n4VOGrA98*qJ&OJF> zbJpi>&fRS}ZMoeVu<r9&k6RzhdopiB-fMaL^4jt=wqCY$+efy{{Ji`-^B>J$Rlo{# z_MYhN7wi}9y9%WythrFIqxzHjNrq?8S${LUZTQ@<&A8k6j`0AB(bGgb9U6{vvt8Nq zv;V&p7?v0|7`7PhF+5;+#O{pU6}vmY^5l)(7rQ@pEcVm4Br=NSq}b<x(habE0-Boc zv7cbiAi&5F0c2|!co<|DbQr7v)|xL2I1E(`HGm~chG7RV^_&2fEFTR27)k)saD-6` zuv%MSbin9=5d*OPQUE8J1Yjw?09Xt>FlGQ2Gdd<f0~Mz0B{ANxVz6Ve<FMng6R;Dp zldzMqQ?OI9v#_(VbFg!<^RV*)W}pze2)h`&47(h=0=p8s3cDJ+2D=u!4m;53Qw7_! zEJjJ*wp8Z8>g@}7&jQUoJ6#QUFe|5Ca!#gRZenKM^olIT31})rvl$K8z5x|8FaQ8c CCr|AF delta 25687 zcmd_Sdt8*&_CNm2!);U$R8Z~=h>C(b%rL_Yb74$CG|@p6q!i5yO=M|0V`kcD1LZhU z>ejT5njW-c9W~2)siBx)Y1&P#%*f2PjTM~?Ny+E;-g^eD^Z8z0zt`*c-w$2SUVH8J z?CaWVuf6tu=CjtI56=g!G|JjmnTAhY|Hs%Nm)0jQ?tro94=w&0@y4L#i~r=|dy7Bi zp=a^G5q{qM_Tn!PURtkM{5+?>=;wdeA0Fo6xJRbjSe~nrqgS=a$G!9DXAcPezh*&{ zb(3`!geDL+`G~I!Mi>ETW^s!T7X=|mkTF7MDAXe!C<vNg$tYR>?sEcUf{~?LWx`hL zs^ngwyJdo<iy*B0RuJM>CCBwXc$a6#czl1&f-qXPDmk|25c|vpcF=N8;=zh;BDZCU z-Mgj(Z3bijPLuv(Qr8HS$r#!MGFD8?L|MW(MCAzo@@GLoL*~w$HG8@sywM#As0x4W zfjyhV2V|=QgM(4=E!92A{J{QMpXUpL>qKO2P5T{5vf46kTLk(<7-;ntE6s>xiJ?_8 zDN1Oz2%e&L^%*L3A?E_4A`^!R)?(^K0pO^qx5pi-Pb7Z?_RTG`jYcN5`R!`SHZ>hw z?#1oy#Vzz<bcgzsp)q{TG23`>6jOyHNmXK*<Y=d*(SpFjDSgPKpg3nqDX49XFu<4a zZXg;_f{HlgRMU&`$~`SqjSO5T6oy87f~Tb_sJwRZXTNRpES9G?2IRNB>}hdNxN&_c zdf+HJAh47k>m~^9{F|P98YBoUcgo%QZHC6h0q(_Z9i<)W!>i#eWhLFDr+c#KO_cP( zT}kq^sOt^4!q?W@=Aim+#|<kT$;kOP((`Xt1)<1tFu&}p1S^}KkqZK<<2sR%?zo!n zXiop_DN)^8C6uz3jq#O*MwQX&D2Qi&GOqsCMVoUC3b4`}f3oJK*5;X|iU+;jD?Jg1 zb|azE(8C^5XlR`Gvp2w<-@@{$h{5~$E#*^AHB9i_aNP(ktr~(ZQn$JtZ8RCRC@;ql zC6wB1gNF$=Cb+1Ayi|&kV_1=5FYrE5X4{L*Yx8efYn}P+R@%txchkd%;l6M#CWgC? zk+u14*7r_;l?|L%@s~WUf6dbo$r#H@X$Xc3B4Rk{_{<xiZY^N--5-J~P4S$p;Pox_ zv`A5rsEMn;P$gE$s+>Y8%gZW7<uS2qscChDN>N6}(H88G72w6myrGNbf}`gan`Rhm zM3*W7Z^VO3U2pNFUu|1@;}Qe_(mqU3pKw>FUPR23FNc7srLr=2UTVA)mBk9VSeus` zVXc~jU;$CegPj?mBG*vYqSS~vjv(ae71i~&(`dPsj%$~sr7KdGAvZzEx|hFbw^ns2 zk)lHJrtJpj8w9?pyoyrfnpI|{Uyb42=(eY(c{Dn(Rkcdztw@|fZ$gwB15-oqu<U=k zBgIL`^mkvXjb8Ajrt#GC<ea>>GmqnkFm1sovt{(M2#tBElbE8CqAbW8*+#RxGpuE0 zWstHm$&N}$Q4vT38flx#b9fRtc)*1*0yVmh^(Gpj@NuDg3RyiRqydli<r`s@R6L%F zxNRBFN6Bjfwg{iLI1NcGMn8K@dEZ)ucX!;ekds|fMMng=(Iq|hAl@mV7RZ76R82i} zLW%7nNX2w3$?O^x8v{MZWwxW=OKkD&tY;i#YS*Z^A0dt`+3`cAD_8K`vd4rU2(FS_ z>Br$@W!FB=gZ^~8TwP!34tAXs%V-6Lq6Cv`Jo1T?s)UJ4e_*o#^Vv%O{DaN3h)S0s z#nB(HxXdXf;cMz`%XpdJAk~-8iBM$aY7kwG5CtC>)WJE<bqHLj1-h{^33Ah)RVTBf z9?|@LZ+AwF4<}Q*rQV+!&>@%ayYt(b#)}F>Z&tc=DfYlBg*T`w#Z@0zUTmdDz{y)1 z+E~+!Ry0Ahys5QwL~jq4-_GvjY_}*UOYy16qfG6VRzVEXc!nkxPqEA0`zUz4VU0B` zoCczcik`JBqCQpW4!y6orUkh5WKccR?sO(v<Y`ma*GT9j$fn{?HUf2Rkht3CnL<{# zmV3?`+U7+q@3DkT4UIMGIL9o;QC?D8a}x^8dNNo@zg1dRCAVGwlZmDhbz-_s{gvT2 zrc7t>cXVxJe`u<DBbFXM?s9Z=r9RQ|1Mla)ww2(*h*)XMEiUll6klh~3L{}*G0ysq z<Rd)UmuBq9tZTq6j7+7W!9i<lZm=enx;L_rK8lCC<|>}*di_Qg+HSI}m{80`R*cFR zZHl{o>LgG-{0-NM=-NUNS~;N6S!wvx(VBkj<2Fx=hnLWi(wX3{xrBT?yzBo*qNmBj zN4D0A{GtOID{H<&cE>+%3up?cW$%^M+B4;)^awh=b3|;DP$w%r-6}~%(mpo)5+7h` zAFHvnkGEzecuGsl=q4-zt`o{K`XdJQ^z~GXj3JC^MThuX!}Y;f=&M3)b0n<(ra1C* z_ugR_Vg;d5=nefw7siqv;mKoO!Ghq<mm3-#VRR!;jYg{X6xR1jy}_ZN%Q|1Fx?7vy zVzsgS>F#(|a;SF;RC+{d34Mhd6%k}ha<743;f)4pdin{Lr1A!j1f5apamb<Yd(m_x zR`srME9u!7ayUFyyc0vNgbz`1E*Dh4eK1~ym}{a_hC#5#L$II{qD$%PG3X|&DQv~C z(()McQAA?mzywA_O^|uCKEK`Gi`u}4soIrJi6LEkC`RQBL<<Y#p2At~?w&%MdrY9G zyCKXLH%zei^n@v5u*?+BafHz8{lORPDF`$Kr9~sLw+AnErpswz60!Fv2-}JN@pqv$ znl$#9+w+$}f)IY7g>_E&fe<VGW)Qi*r&%maA)cNwVGZbDKYwK(@>b8KPS!BN5lzPo zMOP%!^{`HD5*w23$O!Y~D|?`)0jnyEwR`<Akxj6+8p+YWDzri$v?C2Q3@f4IkOxzl zuX{u2*BDpy>v<k{8mClvBTHx|W1z=DYZO9Jh`K)X!O0B=g@|E7!<PC2Qr|1aRxnJc z3bfK~nXLG_ty;G5Iufe_OWm4!q|p_BO9-lpomkPJ{^StL!yB}P@w-Ds*D;0bc;M1M zmOwlf{aAIk0vr+GDDxaY))wRmtXZNV!y@}SL0a_D6m@+?15L{kge4|B>kfMmy@^sJ z4a<%8f~fyRaCmJEEpt<r-kplvpN#c<l-_jgk095vh+<l)5rm~<LWL!$ytk|Cqyh%e zBy<*x2`*Ex3tVlHi+j83Bdk=fDTTb`hb}Gjy#Sv4P*=SICI)-w-YdLuhGTP+OuWSC zrBg#CrzE8xLtA)GeNgxE$ql}Dr#2IPl%f|^|Bc4-BXgoonjg81=J@k2F7}x%zU9I5 ztNN=l!*7mA8p$RrpBD#VT3ye8PGE)?9ib)m-s7@Ppp~8`C?|JOo8d!8AG!`)jRFJ) z!voYKT;By;UnHN`gRe?-elK#ncR$q;_TmXcgFU`cIDHy3y9cx_Thu}7(kOiWHRf@> z+*8!%xd`<XMw5Dx;yy7+3cQBWFpqka@Otb(doj&CE#6T2Lr>!Flh~VS<x{>+cHM}8 z9;hv9p_9pxKI6oaY|<lI88#ReuwVAi^&n%S2godBe)Mc{S}yq^`hhOZX7rbperF=1 zVq(RgwWKn}(j}LdLytv}H)GO5Rx;()96>I}^y>?5)PT-_72(e`1gN^6evbeOCz?J& z`p0Tz+sNeDeuH+iH#LmmEBEB9>Ux;$hC5gSuwF#dvsl9YdCUzbJ7VK{HbR4zyC=7| zjtI3lW|z@XCh|pWq8w(EH|WrUI3mR+J9*^b0%(p_OX?DPBsJa3rkU-Timpof$yXhH z%c$3cZih;Uu%i1y%jjiP-kpCHmh)r08S#zz?XdnNiv1G=@+z%CR$OT=ViocAYVkn{ zdZALEFLwm0>rata<9fv;z_>XOILw=Xy_mI1R+}%!Q;M3=Og@b38@2?s@VBdPcXB5# zKCJ8><#rfJjo+o-fCbdglMB?<Bwe2gX~S}&F|bOPV|$^W5{NC-l)x%kNh#ekk_07~ zWEwI&AzQo|N){#L#G@;)YIp*UT?=l_QwA=vKLr~f65B?v6_QU9)Z$x(BqVXn&~t1Q zra|#w&CQpi`=ME)XaETQD!hj68>-+<V+%$%Qk|HZQjS#K6;>>Qe^CRTg3#?QM+AMX z8#$+li})O?oeZ;gW`s~)N)yw_9fk6~b{!X~<s(qyDM*W}4Q9@<+F=sZ0oKA6R-Jh$ z@^96N1i@dO16|2u$?;;h--#!AptA%Cye6Z5@2rW-v;-<jDD`M&Kyk5w@jh9G)n|xa zGYJIy!}RVbL8zlJwrDi4n);`@&}AbT;<(@UQcbIU7+>lPtkK0tEBCZ{)2y_Bp@zo! zv7X>cmvKQilcNXrW3UG8g~XiFw|j(uOeb}PrU;=229nt+eY*V&0rKgM)nrwQGW+#H zL8zT0s}f6TuNG$ihPp%45|;NqbUj#oivX<!>R|%ujVmd&((H87meMc!M^GEEttcy_ z+hJLtkoYuIsDZ5SJ9y;2eyq`LRC_-{dO#~+5w1l0U}2+)L9Vlsx2LBd9vY*YmG=Gx zMOVfP<*v$5fh|S6_m!;r(WHB-u1hK_s+7L*A2K#Ib<C~*{H1$upyx!a2Xho$1=2%_ z9qZ$%IMfD;v?|hOWm!FqN~&A+n#~vxN4HY!@1gr=HIrkhqn!_6=+NWxs(3mX^Q@do zShqPLf{x~dhsR<{V%wUDa?k>KRUB<dKmk&e4NF1Y2DT9G7=xj_%7*fXB0ra$Ze(0^ zD*A!H)`hNMgen=G&gy44X@?Eqe2USwFo~#}iicIwj7K18ta#F-8@`drZ`M1mI)bfK z25y>$hj$1(4<5Ovi@WF&JTY&NL@t;DGpQ&4k~_cIwfL&Q_<w-;MMJ>vS0kSMW;zSo zt_FG;HNL*~F4ii(`YpYYhJ8j|TN37L*iH6~YGKcW8|<l}>}k4+XXT2h2n7qm0(p?o z%!xslc+l@64+ejRV8e?+a<F*vg}Qb`J^6uk0^-m_b)k&TZC+<%P$3d!__u|n*0rM( z8-rT#>=G!{HwN7V2Cd`U#Jg)QV#VLG42!$ZqL!i-u`#e93-s&1(57;>^H~6*Z3i}< zQj{I2%fuK246=;67>|wR&Akp$%xf?QJogr;Ez#+F5(m613)zbdX#IH7oR(g27<eiD z+RMiS;?d1@vg^_hnHSJI44a!Ko_zGL$q_7B6uc=_A+}3A`Bg6|9yp@+wGuWNA~6A& zPEdIABc-6`M6zpO^3XW~2E8}{()yKJaxcD$`ff`>eX-s!Z%(s;+gahYAFvMDswH;^ z_IGYc1_L(H)oXl22v64TQu-%Er@WjSU7o<*t(H)iJ=9uC$N$901>RMjHstS}UZtaj zOaeV&+&l|>3(Nf&+h!~>9k2Q<mnG<;ntD!i^g;pjF^Y^3?}MR;-|dY<AwEuKAAwHZ z8I(1AkqA-q@OJFO?D6YpSXy<FDZ@IfD4*#4#oF;qmY2EiNcNBtI%X8fOz$mT9!f^0 zzvX0OT?OmbH^8-A7%+Tnh%qD)FnUA%@0GLp0FUJq^_d6hhVOm$2j>0JJ{b7YDv`dA zMHM;^dLR;S`(>cQx135ERy!$b8PC@qFj;Db(I7d<QyD`_AbR#*!(4w1p8IQU;ZLqV zdOX>^Dr_Q*w!kxMkT$s?C=Xhrs(ZPiDXzZnG1yF#8>kcgi5dnUZ0iMc8ngyFtAo@I z`zvFp!luwzgc%adDQH|cqujGnBWeNZ&2SDdf30<JiL58tIykv!_*G2WF`?LY1e{fe z>ZtS`xiI*No-4ldkxMx_hc5b#JTRnhGFyb~3fp?7rhbDebDao;=!JVGU^a}CqSkB4 zrXjJ&`OuJlJzKB*J&S8sNXw8UBt{JF*WHrcSwU8x;X_A<AGZp^AvQZb!{zidE7>%3 zh$z28J{mg1nS>;^%~zjNe*#bKHSF!skSbxE?Hckareih<!juYZF(!CsrOL+@GoKXQ z=NpyD$K@Dj8}nPU*}M|t!2LvPkJYg~k>{A-VlR@{oU+GNHLE|Fc<Wa~e(1b#s)2rB zYZqByO6=*|8L}M_I$z~7?MNfrP4mQy14)88Nft>=W@9wQOwDcVvf3I7``aqb50s+T zG?P{4A~9+pX*Q>e!;48+Zlc&amJH7AD}FqdjLY5X+;*8s6>No-Y|{P$YmB~(#mdvR zg>Mx{%&{l9oU;UbAJ3CEEc@~*1$_f!UhX+bLr_jbRTz!O4z%1gvt0m-+i}&P;rmG~ zmi<@=<_2HA!<XI_A|Z4kFbTh5bB-~&M}*rvoE`6hcYQOdwqzu127@~x9)0Axd)JZJ z7{pLEK2p@yW^%^TC+QwxGqU%7<+|JM=n3bs#}df6Nx-na814bXw6bVYF-)DYomV2f zx)iz8=2I*DRBQaCY>;d+Mjsvz+d$W!92u6M`74%xPtjG(tw&EOp|Wb<vatF(u6om# z7s#sX?E}O2o5T0l!wAT4UcMwFnyBw9L}82Wd#pdJTzJiRCIn3qo4pl$`hLYbI?}eA zb@a7X5<dKKwZC(EcnWMLU-wvP{v~veyTVrDF|9`(r3<YEbHwnPAJP8SAVmz>Jv`>V z3={!jtyA(av@Q#wx*i6Ot1W18M5Sl+@Cf+A@)aOkyp*SySQ{6IEgu~nzigSJH|dra zHyB#a;=LyE5$GJLUVLaetFr?2E_WxSMU#@e{$u$Nme5j0q)9LneTJkr&^5CeYhyFr z1X_d3w1Ul7N6)H1X({@lyfKIoPyy7^C-o(;C^6hkdH2g=$iMT|WJG>!AnOkDQC>bL zjT7aGM9NPo*!Klg(f-4Dncl~iy>icUdl7+(@1u`)^s>@5_ZSYsZYeL9_U~rgi|)~% z_<}rKkmR(2;&~4HLgBq7c%(O4sfm-$`C~)*EA@!cRG_f-JkNcGm?;g86MLL~Ju8`& zc@(Ny+Ru{cor`E;dF?aS#(a4&2uq!9sf@daCj@ouhC0GRI?tH;>Zi2B3o|0{9{oK= zcQr;xMRG@DDcyaMq>aeUi1p2sd93d|jTOvkKZV<r=a}$34*QWti;&P3%u+%|q(&_T z3A32K)Wbg)C5<hPoE|a6S&cqzkfL@VI36f?q*6&U2U|KRYBwnSSRzi+7$GRWmBQ1~ z$*`9(oUCZrG7J3JS=f8Stjp`<l+L-rSc<fT<;T*yV*xsdmt0}IgzLMZC=@U&T3Ds$ z{t6GI@q1>>9A+6i<c$TBHn&;G%Y};G5GyE>m!K)RB9Z3XxH33f*lz^#b{EW&=Fg(< zvVDBVMly=^d>Zm>P)j3_8D6hodWm(y1dgDonC3>5C23~G5yQxVVXU?qi@ycriII`9 zCs=jI1WN8;Z<kU!{5(ofx8k+>X7cLDRA;mdHBZOJg8TErtJ_ejryvz84~De$b2d5! z8NL|x#j<>{FMTniFLv4&v-o01eX%@WY_~5~=!^Z+7n@{4&j@xEN?FAyXq}ujifh?t zY&DWS7M1jCkMO0EPj{5j8J`pTsG(iZPJ851X=N<gF)Gm+%5tDK>`|Bi@UD6lJ7JhH zz(XnV;ZoUOM~AuIy3j1_IU#usn5#|bDBA)IUzv2?Hj9OII@XcrZBu!KOVD}SL>}Q? zeBNbZ9D>uMVUkqGB*)&{Wn!Fg=E0if2$pe2>J}!9bu48I6QIT*_GZ(47TD~Ji@)oW z;X1#tg;yWLSokE@DxGJRBcJozpkyQsd)=d9qA#5<@-g|V>l=Swlyu%F&UL=(@<Qk} zzLBy|_n4U7WOL^+@!>7KAf2m&97T;zO1dXUAY$O+a2!fAm=_P*sT0klWOTN8@>8;6 zbe4GcYjS*aKiQXrj!uh?zJ}bm_~WH0n;cP0nWkovR+KDjCZmc5#7_KCl1k~9W7)3r zeg1q0Dy@{(6%kL7x`>-WF}9V=40_6hK}1Gfv1f*P0a7_VY{hCe3wF9~JRWolG$%iG z8DCmvD_aCgH5d+#M%q#9x1`&cI48?-N`*bnwua>y?$0x{ik)JjvW%*(hwf3>ENn7t z!seh#PXFlYmqxg4^+Ai!-Qm?2F(7pdfG*FE_@^DAYjG$R!p(e~mLVUHhV`j*-gD7C z`5gSdHc3-0(~{T5=(-`5(LvCX>tj*}%|Q*9F21@5!Mq-D7<pj(*hQ$E{HuZ9YmNEM zOE^Pw^|vH@>_Bl*3Yj*xzqtEHvV81<0lR<3-05m@we_C+GpoBN@cq3Y`CXDrZ?r>3 znC&;vIxsP&m^GwL7G50!YE=N0GXi810=lFpnPctkyzvXVGctg!8E8})Z9>~&7(nD| z4x()~c8YZLZ6uYJRMx$dik9#*t9S2+++0qw9YzLlEUoN#&mO9-x3WK~!1kk*?j3;Z z4yc52{-9F&FGOks^VwYRG^Kx36=3`L58Gbkkp4x9mDWNe_^!)w#I^&(qpV)=WeJ>) zIytsU`vS3SfOZ3mC9kJrl`TRc6KqrQLK%Ieh?Bh)CG79|QxK7=n^KgnnLJjUV_5`; zO1ZVRMvgqJFv(71#5!%53#Yja6F@hiIM!?SDD>DW7D35LyvKeh?iaNk?s|W3OBxno zYYRypmnL3pCijo)7sC8$8oP!(Ic^{vZCl3WC4jQ{(f6tmR9RqSo<^VYGqhq?5ctHB zf^JA!^iOZFCjs%2l7Zr9&E%PqK1m>Z^4s9u3HIc-FBg^-1oC}fZ9yoe){<tjx1=wM zxL7hIQpcLo=E5oPeNdogC{Sb(J3d1MSK;^+Rw*g!Tr+uMd|c{BXQ1=j%Ao}Z=!15n z1@n{88O_L{_Bo0!H-JcXkJt41-=(#WPaU*;G**i?G(&P^=<`GE|A(wbO&Bop8fRxC z#`z=u5B#W405dLIu<SaH_&rV$wyA7}8m^LeCd568lZ?gVJ-7H?S0BIIp5L*54!6?F zUqUUg6+1wJ$tXoGO5<#uo8L=lC#SG2^1)MJ<}vJ$M|ih%tiaeMpy#jB+uspOsj<r? zws>1<ZV9O=o$8#2n3et;K7#UF7t7=7nFYhVVv)8btd756hsIq4?!}kj9k@>fACBPu z<{fY$e_m{D;BLLnUg3co8r%O;Z2ub|hvnt!PwsP1Xz!@Gr!chjn9Dv?KSA#3L1vU0 zyE2`^d<dsclQ+r|#W`2V2W9cjgBVPg@%O{f;W9@;ZQ$<*fw^^DXe6bpF`7vY+iKZE z3tV?o7kvs1H>IM%mC6~R3%$R3q!Ld8>!Z@6?Bz0#8S>Q2^-Q459#$rOT1Hni!gTD9 z8FEs-^zOxY2<|1$ImHu9W=`yt%_!F@9`x`3^5s(2`z#|VYAQ12cHVZ@OXI&~y)*(I z4)s_1gy<CV=ET17;L|7K$gI)(SXDIr2eXMhZH7CJaC)ej{5-L*(N}v%pECOHNhH$h zX5U^C?!U=bz1`eqZ%7&4%#!|lB!6lqMddNFIP!3LqByCUJYSxgbsKvzzq5EFPzc@i zm2BV_N8pSz)NschgY%py`sqEk`QMOl%1gvBkz_qEBy9M-glm_{><5%RgF7kLpGQu# z^)h+ufvEINuVHmM0<D8{7+2d}#|V1nG9tPZM>%Z(=GJ+wp6Iet+ospZuMfm!yzEcy zM_**A(O&pspNhjlJ$pHVE(EbrNMQveTqe2)hbFNzS$MAPA)du-hDFiAVDb5qKmJ;h zNO@mX*iS&JQp(+O&#&IYWZHuR#jDMv_Q3(p;@Z)v^7JOxx8VWD;!brtR(}1*2L+*% z8qz`i3{>ukEN0zPO1r?EaOcCx5n8_eSvaK&P(Y)=rjBb&sDkz8P-wdC7JC>?=Si%_ z-tS7(Uu~D!d%G(Fw?~9mKQEBnNdwX{F=lX18CvY2v>(SUh89N{+(k5TGA0mIFm~4; zOd>B#8X~?qnjD>^j)qS22Kmo$0u}biDzT)bn9h8VbeWtYT1Jz>lheqm@>uOR*wFch z`B|W>Vr;{8Z<ymT%+d7VJ+=pdlF+0nfmi<oGCL_Iu#q7vk_LyhL%sMjd-Y3<xq@6v zis<=XC-u6A4a=Kf@^OFk5XODg-Oh0zNuL8F8~4d{5ipGVcs}lr9wLQ`gc7SibpS13 zsnOm<jC%sR`_bJu@<T!PkNhWW3)duZ2Mr$;CIj$dMA5FC3CF|&-T4K1bh1u7Q%<%` zeu#A#?Gb|EX-+CX)*KuVQa*)uAW5uv!h8dpHJ3u5A0zaSB06eu5U%yHn;?ti1}tjB zgt<xdI5^zVbbBWwz5gOPQ_&|5$(|4#BP}1T3~W6n!%=9BkM+@uM5=hm8Rv`1_=~)L zTQA_gi`H}eW9lx7z!`0TJD;6C(!JR7c`m}K+JckJM0yr%{^O1d9K~q?Kka%v5O$3W z_8dE5rNh{XaJW0Kdm8q(?(y9-JmCQ!wGIm`^fcHPE+0w0nc@~{GpU-oH<UJWk5gB+ zi6Vm@GK(*dCshy4i-GnM>~wjr@ckczA9Xh)T5}3VsrJuF>qDap-{XGa2&@e0lUqKf zxG)ilGr<-(!tPz2!39PLYgWhQh8VU%aTVRbHN+P9zu4ZY$Gc?tw0Hhq1?%(@v0@zA zHhpA_5ml=Ss=$4`KJ@WVI|hT!|CEHx=r^f}DbE^^;evJDx$B6dCPdi4rF8N#3Ez(% zs`+Yx+yIjTqZUhh``PY%(wXHZpx#eB4LAs|HL^qfmDRn;rWxI%&mw_kxtH^@k@6lW zp=W216EphtaNf({dCo`9RYwiF0ki>iDxrO5V!n2&fAmEZHFoAhzxkNxXQoc>28vHV zz4<ZPa?)GqS{#8bl-?0Kw+HS%LJH*c<`ADX*NA@hjc^JZcN_&W-gm(}uRA@4_dC?u z+f}_MVkIW^Gn2cZ)wqgqy?Z73ai(=z=L{*OmoOi34984iw!>INA9@F^Wx9u-0EW|I z9%uSzD%M8{p8sLOt%A@{C^N^Hk}`Cad=S>zT;W~j{&Xlx=niZg$Y;v&G0E`VFRcuf zenyU!(d@pYt5PnMS4<}Q%D&>I|B#BxcsTE#uAGqk)P6WRPMyV6s#yc&f|;96-$B%` zTqd6-zgG5>8<14~4i2poA5MU&XMDIHp3@$#$cS#SU>=7v+avsF^)VfyRUyaP0?XYs zEAWotSore_*yK`bO}{yNZaf?%2KKND0@-yqh9p*9ktGn>qmRqZlR1w*7lw0acTF>1 zveLM85-@A7ST>qGIqMrS@hX}4Sf&W`*7Mjfaakhy=&?cKnnbdtzE9V!jK4(EtW4Y! z_jr{|{9zP%e)f~dSDJl3kYuAMXG3rD%bdxwKS<FNL&Y0f^2`$%;!!Qx@x)m1vp2|} zPZZ}Cyv}UymG3<aV}GVa_!%&|(ju$})B&yoVr&-QqQlf%DZM-a!^RB52-@oeshu0+ zyn*S$Enl`#WOIjg3A1plG~fiAdpMDf^PKFsxPpPZmINJD%Z|Dge|DOel+CN#4S$(e zZzIAc!8(NZ6yloynpdgR<L8^x$wzwu{jZ4?EvqEc=JoH<`yN*!f04}|gA1r{_UvK} z^vxdEjffJ@<OpcM5Ow|aV2B?(X?DcY<HtM2Wae=au_S8P=6jUkKo3fi<h<U1s+PUS zP{TL#qv)c0JV`*WzldZBw}6XUXz1&ta>?}W^%L3b!c7rJJZ(8fE-cX~3b9oaO2cbb zv5Ds^{k3DArH?#DGL|Oxe)=9I5h!N!6bu$Q)^h^cw^W%i33;%a8||LpGB^jV9+MKo zR8mp}0V<I$uv^{@_F+{bZijwFdOVvf&YnoJp6%a#!)q8muNS^9kA@}^+q0SZBd{=I zU4YM)o!Fyig4poRWa%&lG^JFA=HlL`sl0Z9%$|bF>ogLt;DrE)#IVvbEB)uU<npu8 zW57?_w_$|)u~!9GUpQviZ2<H#?BMbTVHQjLS^z!IxmbDpb_4F8`1ZHgkK&>p^EpP{ zY$kcG40AfxUEjVc48#^DmyJ^%tZ2ZY3LW9UWOuDo5#j#GRm@1fRk3b?^4;Z1bRGhw zN`|BP-7E?B2kh~sbSuNL;VVRr7#qH3F`0*;V}mUq^?!<Y`ANvP0Mi{8Rz5&Q`7~gj zR<==zu*pR<|4Yz2SCQ3-xPl<=PaY-K>OKQ~i^ijj3ZfOEZw}Dkj{HyA77MK8jp_vP z?h*3X!T8X5;Py%LCM)^1I$pe9N5X5yChx=XKYSMO)Y;C?XWpU(Xi<mLob6@yIBW7` z+-u_qqQAln#K^NFd)x(wevD@88WTjL$u1jFN++UE%WLhyvgN_m^^PB_0!Nh5Z{g)+ z+fuiQNgdk%p)ZFLuey{Dzza*5Y)oXbAuFMKm`}9a9Rk_NErkbS11|Wv$bHZE%46cO zaSIa<R=(gG@V^odRs?;Lb20Sq;<5Z4^78ZL;<FRT&F8-pH;yM?EK3#_jwkKQMx{f} z(7@SHPd*1r1lE(%l4S{zxbiDSwV_wGusaKG&8!siggZ~X_9;2wPEBPO4&40m0d{I^ zj|}6l^H4mr#3zZ8yT91pOa?4Z5X(L#1<MsfR&GKoaXkW;#<@`Dw|m(wc!o29;8~O& zoGL*H<3Zk89^bX}pp8ZGKr|IA);G&jv^a<J&MakL4d4p>w~#<byBZ&XM9G1xTbOgu z7Y6b;yF-bK=I~*8(jxKdt@CaTyA@C|1qHhL&C!r~p5fw%@#LWAzTTIy7`mff-6eVq zn9nenxt~O=INiH_Uq{k6z<f!redIqY28yFoNJ8!Xn$d9UxIToN8~4GI=`?V;6oNaE z<^yXKq9HIe2Pp2<O@(&A(YD7F@h#a{Tg7gH{dG0C+hMew-iscioAG2fYuc_?1~OII z%$zEG3g&sV|8vUuRuq38IBKBJv0V6TeCD<~cG4n{8ig=a$-CLrMxJ{ix>s`^=5Mg) z9E|c~w}U*9YoQ9>dLaqR{Kqd0j(7#0ZLDtiJmZcdhMwL-y1&?W$SY7ZKId0#rEBrU zSVu2U<ICf$l9mqhxy7JQk8`6~OAWv5net*6+2B?sI_X42x8wHHxD*7|??LE$93(?S zc=Zc+(X|1!`Bzycx-ZDumBoD)bi5H>JqrEiduzIq{JAni-0&7jUlngGhPlJ)h8&K= z9ypnQSM-{B?%BQ+?<<Y@SATi;Jvbj9^fcf|sy5HW%Xlc6xK^d0k*}>v#<OXa4o}(Y zeqtdU#;X$xBOpT<&Q3FW?QZl#Fs@Cam;MOygw`&>*kKP-n<oogqPUA%KaUAmetpXw z@M%3Cbuf_^R>x#6d=r`)?wK?Z^K}Yx;_eyTBXmUb?}c!ssu%qOOg`z__$Ilyda9_{ zO>#)zaFlbekiPekS!4#m{k@n%WahgOum!^@SY0>RM=Q+rx^&IIyVB>N-(bRy9!s=q z`->A-lZv(K?k%ADFAy-sSJ%$($4HI{PjRScT&TWS?g%<4Lv9Tv#qYMjF7tNd2+4UV zB^c%adr9$+5u&o0Jo1l`y>%a<vHx`;^wBk>=^r|_wHG&3#XQ4YeeLjM*cwCFl~Grt z+#749y9lY-aKEV?(hTt~qhns;l6sTo1A)h}ymsbQC<fp6WQ$&8MjBtYPz5xKSq$;3 zNZ3X*6vOC^$*elFH|A=M@Lc_Ob!M+d&Jf4H+)u^jRQo%%fw{O5T|(&va&==$)(l@w zN@(=jjtbo9tiae8{udSa<wcUSDbG0+m6PVn=>Q&v(j*>6&^|njq~Sb_rXeiUQh`Sm zv<+EG^HXUH57Q{+VFqpHp^{$YVHQ2d!+6@nLmjQ>p^+X$=sIDMobxSoC#U7nZ9FWb z8+kaIuH_-z+B_UjYj`-3F6QAR>fqs2`WO#q(3w26(aAiVMJMoZ4lUxLo#yj!0X6Y( z5!E1cvP$e<0*Rpm7<qp+L*qHRjG;X^x`Ls>99_xKJGG3LnX<Hnqw5*^Jx4b%^h=Iz zX6RXtZe{2Rj&5h@A&wq)GJF@u>ls?d(Gv_^&(S7^)^Zf$C(vhs@;aYmF*{FS2MzQQ zj$UMF1xGJ2w1lJ03@zm7Rfd{4N`X45isLsJc>qWGv2{F0TNt$mM{hDTn4@hBy|aS# zKs!TQI4WS;(eF9R&OGRs9A%$w(z8H$o!JhM9^(lS4BfxN$#RKg_`f+hnxUIH8qd(R z991xMIY(0&>f~q|L+u=8SH$Tf991&3f}>dsE#at+p@keZGStLTiwylwRUFS_<N+Kl zWN184Ug^;+)}1F<85+RR@eFPAu<B1_=uaG-#Lz1ooyyP)K(Qf!&Dd$S!pT_f2{r`D zVHVj#oL0IQJUA)Frxu}{1Pj|oksE>{{FIeGiqa6I^HZMTl=)c(jgeDI=`2S1dr9Hd zKVpF3Uo-@<&smx8q`;R`gdr%JQJf%U`ACt5pm;x1ijNX)2vYbd5k5-1At=>Pxx1WK zTY)b`{FEC$N~$3!!%z9rN6{IAEPhIpk76_gF$>WiGR{i(`$!f;P@$h`tB;ar2pa9D ztoBg~4MA2vWeKOuA8pW#=af?VIHREbWMtj+&`;_*7Nhxf<Y3((aYG%sT6aN>#RzxX z%1T{7guAYV8}3$2aRgoOWf6p1jREK?N93{VqU%b4>ks+dfxm5&QrEVuCO>cOofx<p z%Ta2rKrF&agw=?yjKsqNn6a9qY#Shg^ZsqAEO=sDe7_bX)Q%K>K=2vhG~fi_FkmlW z2Vm=J^7gjENXF@rSk7`lBp{8D?yt=VTd{^E-1-9L8P^c!Ycai7A`vl`uyQl-6M!3p z?0PLpT(O3neJzazf4rvb$2d^B1wkRe3YZ9(3a|m@09<Rxn12;U`U=?q!dAduK+_uX z#=lC*ob?`Z=U;`PHZ0ThCM;mOX%`v0T~Y9Z5rQ4|Dt0Kg`n9M}6gx?;ONHg#>9eX! zL#!@?J(TQ`g@=C%FzYeQ_=kn;+ny1{5`8yI*b(wFR#M?6(z<<6!nmDS#&CV}BAoG& zY}>--+fkP7mb7$YdcFTl&&JMO^bnxlLq}O|TiC<f`zR}W3wwCG9%ZF(VGo#6$c3G% zv!iBQAy(JB7B}N`_dKZj{ANc84lD=nktBWoRW?^H9wtq%>jx|U0CUkLPgoX^fG`%- z7hPH$<0-oI=&4Xo*(7&f54n_o$@?`8|2G-(#t`w5cS*$?8RlCX*fx%R@`YB{8Bv3K zols}F7sW@UzSHkANyCo9+1&#ibvM0{EN;#r-ZxU5g$E$p%!l8*b~x<Kxn_7y+o{S= zwGTZeiPqYzgZ{DLdEonT6TjndfqfntSSnqpt47|a3G?rC>{_buAtv2~Edwr2;c|~} zEZO^<8GFW{s_2OoDGhMMvCoun6>vKnrOp3<Jp8%~$_+RBn3>j2Rwee49|pOBp4&yv z{5#&sK0uZZ%)#vRW$S*P;jYF&my|a58r%bZizpRFng4<!Dp!JG!FNcdWw@_K!{Od? z&Afrj&3Wi8j2Lw+wrxfK<4P;+TlSewcr{x|Jf<d$sBJxx;1FT=J#Mp=)pyGSH$ZI5 zSV|qsMY_P^b{;S6j8Ek8(H-%+@xZXDjj&mKS>l~KD`;ET!&5|k<7GHzAE6Ipy!Kph z;9fgRZDW@eYRHKl%6L|QA)nd@A{E)CNax)7H-N-!8J$idc1DYN>15E(zM}mZGInQd z=(LwRv{vxT#IdtLjCh$G*{Kixe0>M~sr5wM<q`|lljnE!ccMQhH*8@a3An@96W4IN zfz|H;-L($A*ChF6jys>avGbsfOEBD%ac{@Lz7)g}k(>rU0}}ScMP*(kXtDzK^*lVr z{tpkMS!c6bKOJA&SxRZQ-O^}@`F|5!cbn&bSW54_i9F~E6x#819rKmYr$O*wb8Ex^ zS!p_A<+x9Eg--t)z1KRj;LU#GkagtMHxtA+H00=;eHFU*+3Fd}T#UFx%f;p3ReUo^ zb8*3onewvTS?RCC;&$ukiwOMfRsm%y!-M;FzFp?slG=FYo}ao~?Yy6*Dg>&$_rj;J znXe~+yERXTbFgrBxrJTYc`cG}`AX@8-q;#3sd`yNmI1AKA%RQ6t5+aGufe(HvCLAR z(ih@4CKTV}6X`X_teyZVE5!3FJ~Bto;Wr_Ug;!4nx8YZNk5FB0F5{}Uf`G62za@R& zdMxmVXgGY=ywyMOQHC6SOA$DNAy?iS%6t*M-X0*HZYIXJ%UH1X?Wy8U6}k0xy0}C| zQuln#hyi;qvf%5zAF&s9?i(7-E@xnPLId`tp*+K7=Z9YY8w2F#zT9MgVw9(bCAzld z;Rz?U4uPL*{Qmy&#Vc8-vI9w44%Dsm=1Th*E1kTOytF?xtOsX^XAChbN#p*cnP1uY zEszLX6Vns@klyJb`x<)(7kd6U!vDQLnC|zp{iBnOnt}eeJ6K$mOlBVF*JIc{wvqfz z<`15-3#iY7_C+$rj=Ah8t57lM{tES?tuW~CRb&fV@s|Uip7cUzGF=GN`{iE_dcqGz z7o1TbXMX7}q%|)X=lhu@)XHdBU{W#eP}kjja?d_4!X?<N@Bm*G>@j=?gA2rNIi30f zcldpd{C(9GlF2*sQpur%QR3Po<lMnX(RqYiJD9=`Jz`1ULx$vNbTPHT6nEEbfYSi; zHJz3t>KFg^6~uX{pZrT4!u*eekCsEz;x<F0`42ukwTF?xqxD_F_gBm7u_!<GPG9lu zxA&}mCt3_#3oi>h?hrQ|BBu^3#D#~5_wZ4E@DW2Aj!Y0ANFuRE|91MZiISa19}m0? zi6KevT^6@)A<}#N{6kgmKmSPSx&uYBgAaZVk|eH7Byo=`_FSnilm*U~<HaGz=krsL zf#jp(i^P~cWOT!W;^DW+>kT8tB{Ae?!-~G&y#+}+atRqp_P`p+`HkzyW<+4wN&6T= zAGDFW6VtWtRx)9PA(ZomMabT65gq}&59sl_MMwZl2dqWf>A(Jwq3;(syWm4oe8&Y3 zvZDtzF6|H$Pow98cNF*fw-@6RKMeH^%roYKhY!Rz+L&}*taJ$8DfgcJ-?_Q|WcQLL z;QBZ2*GJmq0g%m9yusXXlP|d^JFy7jTiv@jNca@$noXO72z}D(Y4Ch{or`Nd&L?pN zil0yZ+t0VMlaH!_dO!BC?}KZN+~b2wLHzW_6CQqabY`IITbcc&l|F}WQ#&PD2}zD8 z^G+s&4Y`+-b_v;dGBM>PQ20lH$JrY`mFzp;<6kn+L$8ppPnvUC#lz`FXGewEJ-eEF zb?r%??M$NmfO;?g%`rtby%aOD!zO@j<Oppo2(^@j*J!cS!|T5TJ^L>E^pxFF8d6F> zWdxLIPbsC1z;RSn%FkobsRwqfUY9iJB_9XpUj7&EXa0-(ad7u0Q`U7ERP>iOhW+IY z&40boZwFbuE>-+3k{n#86?bhWE$fC3Y==gKLDg})|AT2v47r)(YLa282$~4H0PBQ@ z{INb;95REbUY^dJK<i%Ch(9bMA0a?2=oO8aI-M9_$s<p{@@f<=I9cq$t_Ap6fLNG+ za>{;sg-ptfB@Odbu~^L{SH4_ynWb_^xGQ&%BcdwT(?Fk*NVuJS@1V9%NWT9e%#D96 z<eCG1KCU@dAnMl~Ll=_Q?ehYlAV`uUDu5}3Y)7)#?fpFuIATSyPdl0a<PFTtH#NP< zQwtITgKwh(uPj&@cwZZkAx|v`=O>};lWHsd{d3UvJvA*b_ZE=2g{#Hl7s;Coxi+}6 zkZXf}PxI5z;-?kR0(<5?{fsOys1?k=FRDZ-lNN6im;X)zo|z<O{7xQvrXcYB{~+<- z&nU$Do8-(h4~u0tiPotQ14_w6=Zq=S#``DxKMz}kf4+l#7hveS7U5~Y^MEse6G-c@ z$V<y0ljXkoGq0G>5<WdTrb;+YIbFu?l0(DqE{<&M8=uAup%8a`u)6+1Z*Th5;~f%C zzk8f4YgANXD$XnMurK31zhb8OrI26hXvKk&%&v7Am6Bb_a^!c+(m%9EjI_%~I(n5< z-So;!s($wdl;B5mqI^ZULW6P1+8aeD+$(DG<D`32?1*8YFul;fGld&_f0<{oG{Mg! zcJk0|k72%j?rSac0+ZQIeeO?V`X>fV_%htn$ZCJfrzZlkU^jK{oN&{uLhjI1b){pm z-mp*hSUl$McPc+_%1Fj5wRt*r$#N33BfALrC5mlOe}(+<c8?V#@l<pbxO^UH#^}G@ z7HunJbtwTm`-Gf1nlDv#qu(+A0~S;{GwdFM8USt83RVa9+glf1k|n&>0J10R2&->R zYk2h`tmpVR#}fz&*?cNdEL=hAPsOqeotIAaO=cBqEx?Gx;V6d{?mB7lx;%?7dK}HD zVz`GWK8S)(UHgF|Htu<pgkRHYhH8JPHk23PmhsD{b3R~SgYWvFf7hs4Xd$XqyqsMA z;CM&|M5S()hWzV8OWf?L4*R_qFdXl~ypCpGU|BH{Pp3sKgwYN|m!=}s|3d~bRV4Rx zV(1@_;LQ~F6CoQPA&;CcV#eLU)A6FQnVdhJB)++h+&Zn#dV=*1HgKMN+;e-T8?-|- zoy=+MJ1bNgA7|iaL-?(bP^b?4M$NZm#+iQI*`cgAwT!EiKVKv3&h+oeNXOd5TNjxt z>12A)j1+SAOnT&#E>ye)o#1GP+K^Xup$ncN-9IuryV^SS$Ige*42;hb^n3}fWkEg+ zm+dpFOeOT^BD{`cMZ;bD1j9G>V*D<H{z(zPcY~F43oFtPWW&?Zm1?2L>Nw@3V*&qY z7}r$nYJ5sOl=j??G>VN5&G;A8SN&6Ev>Q9*MhnN#XCEf#KT-_UK8wDnW5<bj<x96x z@B22rS5+X5<4L}}?eDpj9vp&`erJb@>LOBlc0k(MZFn`H!AiYYiT$;G02^(#*WTk# zE2FcPk!@!O4$Sgdc;T+1n-YB*hK6t8aC>qza?v~5?9d$TboFR*>+Fy|tWf$bTj2TK z@M?{(mBe^%a0pM^xRlH}ml(tF>tEn7sg(WEsF~nHE?4d+|2mf_8b_1Ha}IIOcf|a0 zthf#zKYpxISRuQZKNAt)XO=KW`p^S2Sam`%40Oi~vg2d5xb6)3_TwQ*e%|rmReyE; zImR#oye?gkqZ=JGgADnkU(tpulElxou)2m<=b-{@vDx`MT7_bCX!fm(>_^-RU&e2< z^*!#MaFbmp8Av~z&I)BmbM#Z7zC9n={7Isid4<$}G8oh3=TF{BI)KH{zur6m6c>&l zXJM1DAiD8eR+IM_J`a*Pmo)G}l~?tq&jKxP4cd%R7wtXpJ7C!RS>UIso5DL2*wiQa zJp#B{_#PL!63d*wh_<O*BquYY<=%dFG}@12e36#pRlgZi>@w{}8Fz86JR?}d{&IIB zIeb1L!^p?kex(uyng0~We|>ZL3s^`yuW!cC#;N4%PvgWlrjpyA=B4c%iIUl+C|t<g z!tu@vnXfdXR16d@WEM1&%Fl+1gU*q4pQ&S@x&C?~GpdB`LyPG*m&tdZ4HDlzOTs@- z#k^+UnL-s$=0Anu#ia~3Kx5#d^Z(ev!F(pZ>|F7VJqc;JKJgn2H!gY(b~!i{cl%39 z=+X-Q1rOU?ci6wgcmY3qcAI%v*a>1C>yid^h0oT)Pe|N_V~pLTAfH{hFJMKpWl!?O zMwzn>F8|;lnGg-=53m5n0V)BD0P6vJ0UrRG0quaExRBEypam2FDgbi;KX;W0mjGLV z?*<$Od<M7yxC@BDw;25aIe=1tJp^9-Wk7ZV>H+5fmjFKkS^?d02+<#)1Q-Eh01p6c zfcbzWfHi=vfOi0=0T%&30@?swyU2tTz#u>lpa3utFc+`_@XszznQ#!uCx9z}Hb76b zKmkw!%z!a~sQ^2m8n7AgHsCbiHh}%DK<obd?^pu-TDcbCyY=wSp2E)MNlPb7^XC>} zZX)8V`u0ie_~B28u295SB!Cw`1BtOKOxkn&D>2Mz{TWVXklTK;@Lm08gk2Gzh%f@7 zfUpOG%{MFp418fCLhOSC0U_KG!b&ghlOePqgdavY_alC34&hXU%n1;I5LZ-$bJs0G z5<)pb1;WkLA|&IBN@4dki?9HMl?b0gXh*mZ;Y5V(2(1WNy_MJYJbwLb*F8_&RLlBJ zc>JMhvt}ynvlY|l&V0x|Q!!`m?CCS-%~Lq$J@WX&iUIv*%^M&P89rTJ*&0P2YW-o4 z?N&cg0sG2|I~W1TZd^%TiP(2eQgg$%`ET^w+iiXDuUmr2&OZ!Y6&kb#m+e}fTb10K zMBI)R(__h~+exxiQiW$fvix?Em}Vi}ZYKon8R!f9r}Iz@${=0u#0F0q42Sw)GU!fX zz;cx@WwL=(Av!eI7u|CodE-uE_~qdi0h*n)PxxuLg}iwuru*jnd$b+-<nEnVao8x9 zml#^i@(6!voQ3i8ziHzvB>Z-a7&D%8#7<*@%tf}eCy8V}V;8Sgqo~;6e>`Uqc0I?? zZq4w~Uj^WgNOxia&aCrQYSha-lZo4Skog*aea>F;!R@38jhwx+vQ7R<cU1n@CSN+s zb1?870r%wg879Nc=ul{cj=%Mw<$a3|{{Oq4DE$9QPmrV1@wKaE7rW%kO6+s-f8>Fe zWwJ+uy4J&W&;IJQ2fE5-Npe}_x<L`LyH4Hr79k6u1Y`ix0I2{4AQ}(>kOSJUT7(t= z1zZAL1bhZK2RIFA0-ONU0}cZY0`>xS19kwm0@edo0+s<7|B|Z~r?3FX9KZ~~B!Cr= z2e1H)03{$E5COn1mk1rwBS%~Sw0(z)0Vtpua1qc1I0)DdSOZuBm;;yu7!Mc?$O9O^ zn~Bx~$pEAR;sKF>P(UD{{R+;}02cwL0rddJzZc<lz<NM6zz&!S7!Ak+7y)Sj0dV76 z%ewb^$)bqzV&Q%46YszN{lAZ|8`MjdqGE@{9e?Y8#c;J+gw3~5DDqz!i^B~6--h3y zH}SWmD@nd+Ubm#HEbu^mFPTDgDqHXtfW7?+xD-KzY4Fa!A;1_2C@DVu{|-+7Um*Au z;Q!wNtGP@NbUbq)oqv{3G3ftyIIym_?270dCle@Ew5yN^_SZ6ACZr*^j(9YFvw{6Q zQ%AhM0?RtSxaf$d%|Lp;d+~E_#Ie$Kq%U{{{K&f_-n<EMl+zIx3tv5OJ4V*k>TCwJ zt8$p~0p&tvt#YsOsPeq>jm)m9zN+b}d8#F<H&pvoCspmL9_j?ORy|x@uAZ;1Rc}={ zs!ywbRtID$vP@Z{vmVHrpXJP2o3$b9wXC<Zj%A(9x|nq>%bS&}8K+sQ@oHK%eY7U+ zOWLj454G*upzP4><ZM%RN%nGQ_O9%s*#SDOZmDjaZjio6U!`BDzop-o^KQ<WoDqhx zh8c$0hLuLW$!2=owAggmblh~w<TbUK0?m=;IP)O0);!$&u=y$T^X3ia*UYEQlDT_s zpIk$3RW4el5QH@d@|73V&t|n|{h5`d>92W9b4oKTdtdehU7p@~LVsHSwf?$Zmh)iF ztA;*Cqj99M)Hu!fxY2HW&iI1yWn;7Pd*d(0fu<oQjmcozZQ9Q((Pa9_^f{{Xo#{u@ zuc(e>3NeS9`<VNhGt8OhZ1XVlDDwpKL*`2JeDl-hrKsLU^H%d7GisxR8bdHg`A_9B zr?Oc&JhL)$StiLmmH9<xfGSdztV&bqRHIa5Rj;bvQms<|L%m&nP5qPlwpu`EmuD@? zdJ$cGIZLGp)kbPFv?H{ewC`&_)?UgE)5Ykf=w|9>>jL$|^b_?>IbY=TGAIm#3_8Ow z!~F)QVYy+AVW;7M;RD0ZhJMBejLyf5UmI^5hnwCueQYW)k2B9SKWScVe#QJx^C9ym z=AX^C%zv7LbGzln<_<%bKArnw?pL{jMG&C!g+a=>%9BcW=7G$is*$P(R7BOF`apGF zbxB24KdWx3{!|64d#a<=3Uz<AO084ptH-J*sHdoxs$WtcR=2CgtPE$CDod9&CaW@Q zMOLt8phm5ER5MR=SaV!+QS-Itdx*||G;JD56Qm78TVu4z+Cf^a)}Xa$3$&xOR_z4s zgW9Ru+1eVdM@zKpwVSouv^%wLYY%Cgv}d%RYA-?D#O&DYFS5VMrrEz__s~73dqKBF zw?p@qu0eN3=ln$Xv#w3oT_2-Y>&^NCeW`wu{xSV)`rZ2Tkc(UTyZV7SALd-nY01fk zFqRwahDC;F4J!>N3?Cb=L0+<qON_gWpBt|jsj;U?XEH-jcbfK^PMIRivF3d9WQ_i+ z=CkH+&EdJKTwU(8+(&bt%-xlHAh#j6Jy&pAVJt!zf|Na!W0Yme>B>dQjmp=QXOv$m zzfu0AY|8Xzwr0ktY^pag5$jc-sIK#lRj3E3E$Z>=2h`IsAzx8{sUCt}d{J{yb3hxR z>!Ulbm-O8XDTafF>xRcospcW(E9PI#wp<6B6;ql15`<C8@k-}Sr7Ckq=8DW_)qM3b z^~kInSzR?s%}ULyns+p3b?@uT4I2zi$mEO(Z_E*dtqA%l2Py|EGnE>pUTK2vn6Gpx z>y$^7zbN};?o+*>-mDIXPIxeDQ`YNQd$LYsorNYCpqZdqt=XX2i(w7dW@~$9M`u5( zTj<oitvjsyQzzH=)c4g7(f^zy<%Af*3_T6ehH6ObYle>uvBv&JjWNggpm8du<m<-$ z#^c5hjGq`U8$(RtrdZPnb6)P7xyN(A&HatFZVA3lM-Z%xR!&qdP!_3YWB!~~|Dygw z9g(HUnwS-&iPk1)hiZ$o4{4nbLqvvU7iGWC1tdaOuKSPfxZapEC8sUtJM-`6kldKu z^xWdy>FB!Wb6?K=ckYqgv%aEMVk3ZA5Txvb{&*wvQs&LfJD9+7Rj4XLrB_W+Eml>l zmZ?^#R;t#h)~hzCHmiPAwWw~Y+MrLTsUJgszvxu2QLk5TP#;nMppJnuP0dQn%E(e? zWnoqrv!27OSedmZYkgLO=C(%A25Ea~S>02#D%Aa9$o+opyV~Eix3#kD;OwyMp4p1* zrP<4~S7t}+2I#VNW?g}9zwTY#3Eii<R^1()pzouft#|03))W1H{V{!`-g!ZPN&l1n zcl{lGP)@g;sGPW*emO&O^f~675jn*<lXBMOyao9m3!OO0__pz&@r3b?v6tzeCK<HZ z3G?3EuUVIG7KA(mTIJTvXH;X<PW4*#dG+t=)foF5S(T8c-I{Bf7R?*lqU=YrS7rYW zMWxo=Z@AC+sngWke4lwbQ;WL=VKjmQ<=c?-cT`7J$FcByp!!HPRy|4WP!o*dE%nl@ z@3Z8Z6wPqWOwBIMEzK6~4y_d{$dmeMISX?(<^0R!WqH*L!ZHN?m5(UbDi0_(tGa2z zHN7-_G;x|FO<&CbjX^U;GeuLaalWVdMDwGjt9CL}$R_PO+8?x$*-4Ox2eW5nZ_VDH z{h#bUx>Vh0-7`=eCv*q(NAw@%+{jsI*kbtH@V((*#+}AP5XaArktT&{DAtE-CKI&i zQY=MVAuGZ;uI_~a${|X(a=$V#^GxPXsw{Pmx>#MIUaVd_7)$0k&3VmNns1@$v|6DV zsueY#m^mqPDki2a)1J8?b4TXx%)REr=5<$|kfqH)a*H6~3}3iuZZj{--JaW&OEC;G z-hXmsnzB$iLupekQ*Ku_DXCJP8JVffG-ggjPU|zbX4YpmX9_BX%A%U2nyOlaMSe3h zOS4LVqPD1~s%NQ%h;{Sl$}aYjn?ubJSZAZn@n(fN)tqL|SohF8S+B@BX1jTTd69XE zx!SzUyu!TFyvBTB)jZkkV3>);FD;bCgy~+IF=pN8-|ahrdY)L<f1zyLx}^(cEB}k0 O__RzJ{1aFN;r{`h?oBoT diff --git a/Resources/WPy32-3720/python-3.7.2/Scripts/pip3.exe b/Resources/WPy32-3720/python-3.7.2/Scripts/pip3.exe index 5306633aa2b43aee86eab63309cb3f98b75bbb6b..6b1eb425cd96cf41cf68b526cc82383e72784de9 100644 GIT binary patch delta 28830 zcmd?Sdtg(=_AkDZz5=AAKnsPIw$K*JJI$j_nl=qWOTmJHLQ9pWK#dUvF+ucD0w=8! zH&t+a-~$B#Mdf&qw-2yTq@W;(97RE`M-EO6M~fg-t?b`tP12(0-0!`=Kkk3Gxc01B zv-X<Tnl&?PX2%adjXHfbYIVM<Wlg~-->Nr{Ek1JcrObA??S;I|qlkYpzEfti2=g-= zMX1Sq6XAg^*_p=?9yz%&^C>~shWRbwuuz1P?wsQynbLp!Ig2EzR23!p2L3(`N$DcF zNm8r2s5(p1r--f>Bq_pxFb>ed2CGs#om5Fuq$K6Gu}P`{UG?!QDWCXesiaNLHT`;M z>s8XqPI&o}Bqguum(<g#Q%MfuX#Z=Gq^YVk{ra#Ll_sapX^=@cWeBl<{7Zw>;fv<Z z_RN-~u0xT6H>Kf#FW7K(YUeZ*gx=JsScN*J6AB<rLNt*rQ}^k5p({`nMR2%xysW-e zwMMUbJTls+k)#bZ{YOg1_X9<3{%VcnEvhSub1GhKU2WSnMR6$+lH{VYIQbpPlH@-W zA4+h#@;i@|xLXHWy~D}(b)iJ9TcspOEe=T@-)20{??&4lZI~ioTyZfHBK|ukmK51* z`b*{Qa(`14r)vqxmD=SVO!!~OjcAuEkJnTfmAc|Kr*3~#{qT{Jb0S}XW<W`5>pe+t z)emQnMW#E-D_m<KwXUemsJLd?z!s=(3skr8J0Zw;)Yhnb;(%*CI41H`MN!=93dP&T zFC|G5g;x^UrO2e4L2ISxp%B>>iS~4>%J_+RNh*_DxD{{u52e`}J-y_XAq^2lt?sC2 zD(k9GU)~%j;_^?yW&6<qk>z|^ESnpZ=9?XoQ;sI{Ug7sZ3PQ;F^tz(REyjA=HQkf- zu6I$YfcK)4Pa$&A#UVvk+)<ES3Fc>R2<I-;#D8e0JpO#d_$IeBXgpWE_^-|XmBg?0 z5fyolN?bujRv|MV>U|`set#1(xOS%8$$$P`QL5xFd)q+pAIhk*X}nQ^=`F#@auJVe zL+%f#jrDE`y3xj84oMWkcJ+`XxfSJYOsW@xwn7jD)QwJ6f?ccRxVK1Rm93j6ro`58 z-w(lvKv9e0^|kTTzId~!rE=yBp(Or`+H&`B#j7!14R~Amzo46ymFUL4<t|sj$dDG6 zBe`5j&_KnI;wcW^Tj7$rP}g2@qI;b@L$vA@S#;GiLKWg|*S%xwidvoT#etK$HkelK zy0b$jYIH-UH=Z~v%J~em5!A?b*n2t{VZ2&Q)sDtcuAC_!4wbiDZc!5AQ4;?E$*p#) z+)_D-aw-WWGy+;`uX?pgf}@3{#w1QH8c7B2MD3T5jAUcIV>*ot|M}Jx7teJJ!$X0Z zj5CPIMVhj5sCGp~V01>Zl3*c;XX-|0#5vsy5iD`KJC!L3`WE(BOhQZ|>gD7;*y}NU zePDE1L_CP$<Q_ev9PFkFCpS!|3dILJ8CkM9A8hWc%BM~w&gECoyN!_<Q^jjL@mgi2 z`{z)ii+>hM^oYc#Ly1>HiSzl}p~Tf9@j)ae;8m6DA3YpW<LHcO5;RUp7~R6AX_Ba9 zwnlFOHHky2c#Dk6#JVRkIBe6TiB9w*dP_;jKw@1@MiZz`#gmcf+6LJb{Ka2Msp5zt zkIu*_+vq}zDhX-e1`RD#3e2s9@I;b9Z54`UA?k`8sHH|a2sxFNpvQ#_)iLzJL^QQr zP;$v3y_2OK$$rpesMmF4@tuauGU1&vJ{uikJX%{1D^cchf;5rmKuq0V5_NlG{D%^& z(6c?<8YdswEv&Lp=-wcPOPT8lk!y@7z1H8T#v8=mmDsy?V=s2<HSQPGn-txv@gG)K zS);w*>GniZdQ&*TqcPT31)`B!!AG<yO4+qDu49m!q`0N2E50K|hN*D!#~$z8%O}NB zHShgJQ9>^r(&9aTgBp3eb*>Vmmhrn{gl?l8sa5{Jr1bYBg4b{FUq%+kB?Rap0P2sD zP;m!PaqR8IA?`qQP}E^iJFyG00XO?g=bk=2<liv%Xk8^C5`9qRxAgZ!y3>MD?*9Jz z$jXUMo(m-qb=Nl5o^Dl?cyx9!qi&%(*p0G$8`FTW0b%JVi}$<0F5(PXv;Abv%iYj~ zp(29<icjtD(FAs#15+@zv6jNyLG^RB{3Jz;N9)!#A*>2$Z#o>BHTai^%V&)BwP&db z_KGIx3tDBJpRA_wYNh<4?F_rCOKSc}k$3@<V`XJUUF{WEJLlnOX~^aBGPlOn{5vVJ z356jWZaizdOaj})JKL|Z!(B3rf5U7nDi!MQ6z_MU_0wHhUD1(Gpd;NvxLZ9l)W%D@ zvcB5hzNziWxgt4~R@yGCbHp|3EL($@*VUdu(VgXibrkY4s0<nCT7`<T3R0Z>%2m=2 zt6+5PCBdXXUxDh@-Bwm1^n`zh&VeCEn=i(!>4TN6y@(+$W<Dpc4$aIV8mw9=#!r>s zm(t%m#2t5A-Iz3Ye<#<fVaw1EwO5e=hPY^lCCCPrNN7=A<V<H+L|yH9C><m@A*pU3 zMdZdhr@Bhk`0LeGzS^VSpNAa$xK(bEMY-!rkQ7HrwWpAB;N#Xn?MdoK(dGZ0Dj$)B zjtP;8;=QvWi4jL4>gY|ky3UiWDd(f9)E(MzMYPzVn}5axFH}zvx~1%*)~#@5>7aq- zWjq^`ga44ej1NG;k2SxBR=35{+RCfG+|WFz7gh^eS6mzxH;xpxBDQDjCz$>krJgW! zd~sj4DYoC(r!cJsiZrm2vHTg4nuyfkQLI*~tTt~NujzQjeZ8)z#pxpXd`GGn&cO|s ze&SG)r+TrjT@wd=1?C1!NPKn_6s|#5VCu?cuLko{4?Yly?w&IOdj4TDyQOP}`dTvc zb{+0R8TC}gc%&Qa!EIE;LIh<>;ICl?7%uU6S_~dhf;U5ps<r4`tb4u=U?Sxf8qz3n zIq#lE%}DD>OuE`~uIfkCJC71RffWCt<i;XeS124UhZ*lYKcDOi7x@kn5-KvSN<G6+ z4~%kint`nTEDSGbWnww+jbhL=OjC-JAI0Qf8Hk=SJ@n>8|GYM-oR$U1o*Lo~tefQ) zS{z>WCR&TyJU#dsyaEH)nV$e6k7o(G<Z(+ZC}*)oE}0+bDwntd6C&lVwpiDYk&>so z9Gf->Q+dfkZww!p1HNdvIMNoCm4(EfGG6LP*YJmiv%lzyWBWpKxYg?Wu#a?$x(~~f zB;B4C>Oss0PTn_<O^UN;IZ-*O-1`8Uv?xwjvw~VhbXPBKgl2?geA=7sja%WPmXN%O ze3cy?nZiHK$2wiX8~lP5jZj&8))Of=VC90v`HhB~OLnQXR`CvSYiC%ac3M#C*fPEv zGH3#NH5kLk_eQ1kyjTQs<IEYs_%gnL7`OqnMoEiIjP<_TrZ?=B(nd-R$BrG_P+v|~ zIDQ6X#O#$YdV<lm!__hJ;f<8yj>KH%pi*tzR7~q2Z;CszJYc3Y-uS14D0iQ!Ga8Ji z-XhV#s13v)(5n3h()<S_SNyUZ>b~@Q_w^aZZl~Ug151T`@IY&n99g@(h!u5DuL5c5 zM>CD}GaC4uQIfR0U@0||CyI|ipqNP<kHg6P!*CF_u(d4ezvB8Vh`+y&{Lzpj2Y!k2 zABdaCH;k5~6%(}5a+E&U*?%|<i%(Yz`>1=bsU9TBMcNf5p;YKxk=9?I2K(hgD~DAO z>o`#r+kr)?1wwq{idkC4rzk@Xpy2-cI47Tkf>us%2)#SMg%$Kj>y`dz8p%UDA7cx9 z9Jb$1OPF>QpZLdlR=#9BTVcEGjpx-cE1}`t4_Nb16>3j|GpUGq;wPEbQrVAS=@J;< zYWvXJi<22@lm??~`OmQHvef(raO-%@ecgna=3ghUU*j_k>*&Qpwgyjfqjc;T`nj8v zH`AIBq)v^}mtSIB*K6eQt@4@Lv#??-6WGLr-l<nm9pr|A)~!sGV|Qq~D7OT)+?l`v z2`N1XV^%vF9`YCCV9DynxA1bdFJY4U*%;QXr#|*6EXl)zz#7jc^c<)f&ldMwpq^IB zzUz5wr!Q_sJ39H7<Jh=febjsutLo+GR7x3Z`QRSx@4d2OoMhUZJ=j;hGSk6LDaX5a z2we!&SkI^Q5Jo7G&tn4;b5tAH^u)};3+c_x#Beq+{X8sVwZ`@vnY{*#LCXypXS=bj ziAmiXFb7lyrng-kqjh)}RPfQ`*yo8U8mw7^QEx37!j!~*J`vfw1ZL%`BV)NIp1+Lc zW4L}}f%?avSFF>|d($g;I`s+cMVu4Or>)>!QFvHVY*RLiS@^#!>Cbpy*#0F{kgz%2 ztH3~1S&KB@NMrp`_G0fIy@z4u-xE19*q5xNTUA%2!Ba_C*up;SogUka#a*~StK!&i zy^~|>Z&2pMv5Y=%7^7}bw$P^ih|)X{+E$LGY6%s(5_>u8%*g79z|2VOJ<IvGQ&?0| zfvSj&O3GDF*RiEZdC6!B%zJXgfeX=BN9!Y(dmg}s3L4wWdro1el8owaCbO922_sfh zFPy@XhfJgf%@13Wz~_SyE~XABO-os1?qgU@az@vqAc>|Z#j>N4EhtUXscyWmDOQr$ z3#Ey=FMx!FR1@v}(wY6LVN@$C{jG|pzcT*(k{G$DX=P%tTU}9;vy9I|A}bm{OqX)w z%>yuwY{mHDlfD5GlGZHKp!yhb#ha{n6BTc~?YG5)Wy`3ta#!YZhs4)l4#eQ_w_=;h zw}27LWbbtT0<eGu+wa5*Vq~fbeSM3<sROx-QWJwIwe=75H6E=Bbfwqh_(YH!rT&!V zl`f(wX3l`67IQx`r#6kp&<f9oKZkM-r_<U%F4k`GLry0l2S!1(<{kc~AN|#FD)v=! zOxy_!cNI+Cyf~?{oZn(*KPT%uwUE7X@|_de@V-4qZliAc68ZnBo9+i8+`@fAWeXO{ z-niPUp3eU3Do>REy4w2%^_%K4tL;0;M=mANagNI!I_xi44ihew%jWqo?4-xR`ztV; z54W1<g5bG>-!fa0HuB*pz@4*bYJZ>;|9mDP-kU=&A-|DgO<|+FUP4->+#1Ys@}~*4 zH7@QWM_2jtm#|?eoz(BoV3w40{Yk3OVYpb_F`Vasja(FMAuDj5tiWN|+=-MfmQ+78 zIYpoQ>kLV%Tc~oY%X#z<VmB3t!D^}z=Jq$NctSHNZviUPU=Ajg!3$%P*iR{$`I8~5 zp@RN+7cd~(@n&Qj>PYG+RN50GS8FksC-7CoB68LPW4DYm#Fi4Ne`P=;Ukbj<Mx+iK z>o-ulwek!!P&qF_D?0gPG!b^yXr(Cs2_@KFE>4Dx?&9PxUqpeclBG(2l~$rv!1M;J z3|pufQrYHIOQ#`_-%!pATG)luj0uml{Go>~qVd(3a_z&!q{AN`(!Ml#Itz8c)a8zM z#dU+IztYC~s@1$6ed_H}4p$ha*Cj1%YFeo;3rk;gn#P^Xr(=y+$&Y7-330qs5T?w- z5P(-k87lf&<4)ph1|tI{!PDZ9Ha<^t{j+nS4d+Q4{xK)arQvrG7oUK3K*crgPW)CP zxK%ux%4a+5!II23hG-i|sW!Usa4P0$2o&>FJb8L^s6UIEtlsn9XeU202qW$)9>L*o z(#aaRQ(*i#I3w4SPNCc8ak=PRps2}TeO@B|UBv$nJi<$0SQdUCcFhg^7)spy<aMkN z#GqYqF)IR9XvO*^5POB52`%*Ody$@ITGs>$&f{6NDj`l!L2RT5`Zft-)HxAko)N*Y z(+Hk_EJ_0wxk%dBhNxT=xluwKwtu6R=(SB)Ao&`jN|30+zYUbSu?>yb7}bJjr%0*3 zG3p9173_mpe*|jJ;GGRCv3?5Fo&%~RtfW40Km^+LNz|zlwkM$mMAy66-YE&of%*%G zK|&|1_`SsABH6;JmOzfOrQo@-B-oH>+p(+vJF1vN5WwGXv->lK6dwa#&hHNO2lNGw zg3c>GB=31}B(~#cMDm0`CdW~-S_<}e$G9$v<Ppt`XN*ywzLRBV_D~IHMVb9Z=t)1P zqUnuijY^>UJW6=q2$T?WI*t7%n^?W->OP<**i_7(%pBlbFdPhU6V*HsB0^nE+sgUs z@1eexVks*}ZoBHx`aN1_IsZ<Oi-T+AR>%*|aa(v#(m6R+EP5l~#FA=)Ydz-F_E$qI z#8qgN+Im6r_CN;w_kTe?;=R!L$=iZS$Rxx`>rl|y!~v$;{8*GX$f6#8jLWbk!SZxt zJQO-S1B8_A4=YM}x~9Vao8pNn<M}WZmHy478<M*edvzf2W^!*HYZ~yTmAc!Fy+x>x z;ekLhdXRb<y%GmAr40)jBP{OtwwcD`xAFD=WFHLdn*^?Kch3mVRc0!dlJG<eQ)lI? z`m@5Uq+Dp%^oFR>&@Ok^N^)MOAHZUBdIP7;C~}G3+nSCI31|&GubtGE&emj&7)YyU z{JB<J9nxF{wiH$u9>T!<jJP_$kS2JkP;u6jRfgP5gZg#XeuE)6L5n?I#0jI;!rPkI z9fR)e9%u@Y%LO@)KhVU!AC%sYW+hLWYb_a_Phgn*hazD~b@CMG_9P`?Z7~}@xDO<c z9h}+y^4EVB@$J`a`QTI}t{<G)b(^iDfK;AO2AAqys*<F)XspSjG<;_j8$M*XI_hgS zW5`^ef~?5RWISp-WmJ?4*y5uiZfTOMJl_F7<^ESBNvr^-$n!HalP1FP8P9i#X<s}2 zphoI&3Y7B~Of(~$gh1h(TFU!>3nN#bW@{1Ctd4gVaR=hFu&Y#}nud=|m#!6SwJ4f* zLRm#!er+N;&0i?JL#|wJS{Z*m9n;bC6_ZVAnb=2FX%(6Ws}jj$?yu5FmF+uq3<`%Q z+P?%l{XW>NYxqPIgZ;R*k8j1rC6QqxBhILVV_f!XvC)yMX(>}zw*?ClSRH!qYoh16 zcg3?+JWq<}74bYHo_odfG@jg!p5WK9p9<7IkGORmN%0QmL6F2!#V7Gcfm_y*SKQNC zuHA;vKWpLdw9dWKS(0p*J*RYgx*AUf)^Q{S){%!Su<j5&>(1j@W&2Q9^Cmdm>f0hN z`}Yx-6xdadK)31{(CbDvRry<|-mlZ`IiTCWJWE0oHtEQFtlMAXJ6M9aJLW)Z<gJ0) z^?1p4Q1@^eUQ;q^dyjRX^Zj*HIn-b+f<UdPU9G5ft*CEptDsYTo4@Nsj)wz&p5__n z=MkP9Ki7CNhctK+5YyEjL9)M&_%+S%3fxDL$mVSw399Bzz*h*S<xt7yClT$mj@X(X z0b1q%Ww3XszcDh@7qYJ{u#XA@=VQR#AtmzKIwCet1w|Hxy{o>nZqL!W(VCNR;!#oo zF>m%!-K1vntqI8k{w$eNNpd$KS+qlQl#ogIA7xU!`FpH4sQRsAJr({|m#5g@I^TnZ z_x-xs6G#fxN&?ngj|V*B^L^f45tqYs=ya&xnl~af)J}2DPj}#H%`$K}Vg0Q+UO3j5 zsZHKQ|KZ5+>u9a;>vQRqR%3|A5aQWPEvWd=gF-c$9fD`BPgj#Gusd}%!v(THSCc6a zudXIpAWL*LJp?jUSJRo0&7>!2S5oGQb-PI88f#DENj8Kpg=KF325Sw`5_5Hrjlk5P ztEtD+-&&@7ED2nmWqv-=6E~#6=c$BtXofWCYQK&G!-2F$p=Ts1q~D=-aQOF83rltD z+7JjAP+(mP(136m>*^kd_{Lh|i6*VNi0Cz;nR=8Mng?P?Qd;x7z}wT9>rJh19leIx z!yEMuwGmX{s(3Z+4?0G9>1wXQR$!{>vY`nZ+dwT5{hMh-O7bp>z`g%y9P05%bPcBI zXka1pq1#=L!-yhUg3yVhs<>DY<+jpro~|rC*?bm~{Twa)GfIx;5Oe&HgSwilFs9Tz zZ-ar3gBl}oSlC{^UDWDft5mxQxaX#owMP)v)o#JlDc5dc6St>!{p;<JF4p0?z^=V) z+4hVWl7u(etJ|}DB!ybM`4Sc+{>>DXn!f}7hk82s!r3T(^GRZ9U8sAk6>sQjn(*|u z&eN^hFX(sZ9&M)M+15Kf_xfA!@nrd17wBqVLRzTE%NuLSXrWIe&+M?`s(o12PLXpv zh93W67R|T1nmK5|a8*6pD!G<4jZTI%Av6%(D9n*4`W0$GVKyA0JYd5lSrD!rkd{+f zUjd^WBUeQF8#T?JAmZ&=<==Pyw~>l+hj)5Ndq`GrqE%?pvgm*aR|wtPh?Hfy@-9-A z6^EyTv7Fz7RX}B+_7GF;jQ5=hb`HrPPUK^L0?Q6$6{uZ}SL88mf!Zg4gu6!BwH6WG ze$gI^kQ(Ypr?QJQs!qIBhJ8UIR>I1s%1L=2jbXU8SA2)+NS?moF@Ymx>TRC_=Z!fd z%v@P1szx<SgvN$g1V>s2$8y^+Z?|wKpgN;MBdV-98Y&m63H4Lp$#;=$FvHL9)79LG z!A;t^il%rn>F?mh##-W-IuOi%>SY)-wEBFoy7g150=37$B@14E{Y1n+LL6sEcs7p# z6S@g+=xWE4Y>dGJX`V!C686HZ^k|xRuO48VcHN;m!?e5mrIQ*xGs#&|#z)RV&mYhS z)~y0bJ|q9?|HZm2I->4f=G>h%XC}rQ9L38mde3B~js~eL#%*W-9+vdM0x_b`cZ}#z z`VET~?0hp$79~#%j&$I3{Aq}IpH73QpZxpwvM+Y$`luSDJhGUMhU}xXA^V6LfQl)~ z7OgiUm=ZFY?zE8AyoLXSL!Z!`tyFp<YZc#DIQ9XzyzhB>iuFRBXgHi=k^1v|93F&P zSyNf*84*Gwy|V|%BAZ|zw>~6XZn%X%@t@-V>E9<txA!httR!mBE}JdyqgJ_LF%VI< zZADe;gh{Dz-pr$>W4Ux`PeSZD;uIF2XZ_52>>a8Tlu*&`$_3Yiy*+(Y-~X=bdsqIm zuKs<8z$tlpD7y&Z*FFKqW<jfzLra)F7=t8^PKigzPtwdIi&dC>r1>t?j;rAs2zz}t z|EMU-_wD=*vz*l24T%&X`F~hfg)9fI56zrG`-AsBCa3V~*BlP%3A;l&3`j3@NDBc= z0S$l;k><wj5a=dtF`vhv7Kimu9@t1{B=UjKjsjPOYL9x7+)c()Oh2@f`i>u&ZD>aK z(18-2i(ms`@E7c~uzQEzr`|A-T^O3G%3#sM@)NP|5Nl5?acenNgHB~}q>`|{h21)A zy!xvF?A2jI)O}{MPlu(bk56Y;ho!5xOlL{MHzl9_h)NH~iIaAdS^q|JJVUFK)(vTc z*xBK!YRA7>+wepy9UkStq0<NMk1e61=vG{pGC+<g=Oa$i{*F$V0%KgVSs>o?VBOTh z#*Y|Uc01AoeKTn3ef_#Or7?<7QgtO^8-!V=w|R=-k@wGD_<JzlfBkoF_dl|11xxU( z|9YFZJMQAhjz~&m$3~=MKhiQHN0r3}>Wv9IMX7W(<&axf#BrcU^<SV*P@|0=(ig?Q zosN2qKM#-kT}SoUAJ@I<$YNjXlkt9=z68&r?7Ih0!F08g$$lJBI{Nt3hoY6@T??s| zQom<svhVSQo1vRr>?#N~gp<b{M?(Z=xXR>$O(>7@+128OYF+L3sOr@yI+j!EYQIFp zjnh-O#VhIfPaYMy(L`!6O0&@-LhZ(UgvNTf-2AOk)p1pF>8Lo|1t`h_*%9O-rJ_KE znpVd?Qks~&a%I{8N@*4;1=O_FagVG@><eV&%B+Euc0m#S($RfAM1OUZe8Mp*dEcxo zmTVkwi|E%fo=8MKk++YJV5EOu6WlRP{4>xR`~|CMQ|ayQ{+*Yj$toM8h=5E{_>fMQ z!4C>0SYy0dHHdv_9IxKe!iJj0h=`HBVoFt8hO;B4{>Ary0GqL!7FN9XG+N}y5A8$* z9)@1lL~jo#FTcU?*lon1?A}J)_8WBL2TX5H^>qhDe&`qy-&>BykVYq0juPo)ICcp! zWp_Pd`~pxcYvhMcBUX@w9`Ynv*HX@u=aUss(cP59_kpppvTm8Pu}DMf!xg^P4C3x4 z$DoW|P)4}q))G^?b!Hn*VaG(?psyghzcEf3ukkjP^F>G5C3B%_Fw4$K?6I0u>J>Oh zc%m8#+Cl?wT24m7+aO_J@~7y=4(W+vb`4=q<_!0(hGsP=340M7jKu!9Trn@i5si}Y zE-2zsSCV2L4OJoM1W`H|>M447M#F~r;HN`_o>&}|c5o^u{jrpyY#@1*-W5KNLwLy_ zD{^Sw247D(oL1YY(667zO^ec>q2>ukNU&GaSUhc;25(~HY*fYsmc<8_Ej;`Xt(-?4 ze&}ZC#i+(&7mXF@|5LDrp?WQL2}7|qH&Zo)?axh8&pyx2=JwOjiMe8)KZHeE2HB`s z=8``Y71kVw1?9Yz6b~NNlT^$JkqQ=foyvJsBdEr!FapO~n9GvkJ3}R&3;%#Puh!MH zqEK=%ZjO*Eb?;$t%f&-OvDZQ|Qz-U)D3%|JJsOHRLb2tc*yvDfQ7Bdtip>qhrWH_& zc%qO?*<luE?H%7?K9!KwsNe$*vjXdg5vZakeuc7nFhVVS6yBH$J{Yr4W&0(B6qLD~ zfAl^`e8WH4bJl*oKQuobns?$RteuVZGaE4OPNPL!E73m{%BQZS6mE1-1AgjSC?Y~d zK6SZ7L^SQEaCd@3lFg(k8%Z-fJ^cm5h*Nlf0WnEfWYl{zHjuJy%%P+Wq#=z_LUx%0 zE?;63FNc(LV_2JXH3v`uQcZSmUVJ6-%Vtt$EKP;JhKm>K=Z&G_h4gb{sCYssztDx& z_MSO%0jamEehR6H$o(_r<VNVrS9z(aH}nNWa3_(j48__rq$@1hmaATQh)uDX)Qul8 z*_NrgpS@|zO3b<nX+odzQi3ZHQH+7pE$ou5pQ?rR$RF6}kuMdcoTpEx&gd$h5im&Q z{Nk-_a=y_?CehQ|wHhIRyckLjL7m|-QWA(%$tR7&VZ#!*bzQUY;2U6zXiMjvTwdXN z<0(*%L+0M8NZUURk4jd!Fu~_9c!K24oZ(4w5yrE^lC^HS*-1kkRAW6(F4A05cQAbR z6&B4J{tGrSVwk}pW{s*ro9k-Mpc^(W2l_}++)H%*z+bJ!thZkD&PvD$Z9J4u<uifl zCvi{16{(nOirIn!OBbXPUBtH+WOQ1JBCe=Dzw#3Mx*)OBDiLd1ed%%gAa#v_4Y3bU zH-E{>>`MmLUw2>`<?y%mT=WB#Q;vN95J(+<-8PkQqOqUsgM5WJm%^c@N>>vDsyl)Y zCj#0CI=CEb9VGmK!mha9hNrG32TZOu1uYsM0i(YXHLBpxog&MQOV+yDz2Fj8!uG$7 zr1G+=jR#GrhPaUT&Tp`~keuo|1_3zyopVa@g<kY%jrC6Y<CD%(kDt~{k^{xcUp%;+ zHzHCOSwu=I9~ttIJL2PCT!$cG$meBFz5_EDu8%?NKG(Y-?x%9$ym^42qfFk-%C1Nl zF3`Gn(AK!((o~Xbp%RUu${)Lxr8*MS`7JERk>_}yx*H344PJ6$VRZ(Y-?2Q2-6xhO z5&RHjQM-8(&}MrmLN%i{W7`~=3EOa)CfweNnIgh_{k<bgeZGaokIamrLvFLDh&e_M z!s)kr<mkSjRNwVp4Fb2royL!$rG@`tqCX0JAC4^Uf~2J{1*7G@h$r7Ps817028}O} zulX;e%8j@}f+O<?6J39qc~i1_RST=WDIL-`+%!BspK8(SC#OTL;<3Y8H?^>DZyHKm z-A47N@+b-CT3Ftwq>R=3VY^!^VdMr{qxPcki&N1K7a^hUA&%y?fyf>hW$yJqrM*8Y zWAL_07`Il`K=BT>7HK{I8&N$*5A<noC3eyx!5{a(;YWEQXvx@uY16wed>$F+O6!iH z==lsbnJ#7Eh-M9nN=G#Ah?#-vCR+jBFshy`UlXS#*5qwi2!~H#bj}<ehvm6A-H%YP z7F6PwW(mHI^AnEh3ksdGJoq-4=``iX(TQr~XRK3Eey104j{v9M^JcLLMYH<lAm-#B z&jRh*naZSka^cdsX}rrp<;?n&oha%%0wvPb`7vgM<hsahR*Pqbj4L2G7AnWF&`PLV zWu@`dO@S$GlwDHs1Rt3NI!Rh1(i+-ltnQG4lWY~P&UqqmzQ|ww2d!#UoHmD*grt)B z!&(tn$*;awk8btn|NJhDx60eMF7oHSzycO=#H8r`jfqM^=xoWOE%sl}So#T!+RPaZ z{tTbsg#8O%l9e*KFSSegetNq?wnMdg_?JbhJh2tZCl!1GE}6i)U+F)rS#kZ$P^j-Y z!8ucohTe7}PH%)$`Q%3Wg&DVncW6Z##q>UjRTAbS3vuM^Leq8Qb*ItCSo2h$H=ecj zO*611W74sld45c9b?_6mXH5FQP|3Z$DtPrC)S2(bRyB0O4$TO53D|>}?{DA3R3$x< z2S8ebyE7jYZXFz&;8>x&jxuNZk`y&=csWZlVuuPDbP0l=IK@_!^ofnSLD_POZ7C^J z?^dzjONPgKVGqN*+{9DNH$II`9UIr((m`#0n51N#Vh6@24EgO4=AnJCHQ`$yW4MBC zF5UX5<Ua$$tq-TT_3_&w_V(B$?T6viOn%^F_RZKHv0Fh1-}1QSW2PE6g0XQK>XsJv z*0_Pb&_P$|IkB!ZLo?)v{~KMzfuy5d4pcVZdMDkw4^v~>siQ#^M?n*Dw*WUa)3Hkp z6yfYkTe;=0xUX;mTg*lYL@WZeX>|560=8<4!xPJQilqKX3VvH=JiCRJ-E7PnhdvNT zM;<M=3!ILPVsQw_Pg$XxFr3)f=DFD$HxE}2y_J1+voR6oC>RyK4i%Z^iFd2Z$|mxE zxY(f5{_1^|Y<y`}PrCVl+a3x0ffF6=oqU4bJHet(zmvT_VKzzO-Qe8zH>Fk{Xo`-A zshlaKvW#2q?R*;sygv;Wa6e)%-qNQOyo*wKHi&^l-U~sfiu`TtGpUXf;NFxE!^T#? z*y40+IbM)sU~nHP)x-Uz!kv#RV8zQ{9A{C^*~O29Vha(3m7xdFV2*t7V;&eEhf9DF zfg-xv#xKFwBA<b&YJtZph1<m&TGEl?<7}(5kNW9(?1=Ma^_3R3aN<tw6$}ToS!eM% ziGGq@{o@?AaMFFfF}@@Zf24<Q_XoQDfhI(8NjjE)`vGg4RH{yx!^V}RWSQ`Cpy&$T zH??1sU5QOxrGNZI1zUa_EH&28ac{8IWpDpEr}D{V>YcOM>yyX!rtN||Y6dR4^x|J0 z>1e8d9bvIkGLu)5f%^?)92on5yY5ZmTM?nAUUP&^n9^$yHFY0e8z#DM2#5t5JS8`v zl|>7)N2g>Y+QWS5f{zAZa8DWEH-~*NB{TL1Y(Byrb+wVTP3b-OcCa-d=Q18OmkgiK zWQ=IkIDx+|*u@~8(8vnQGqewa61pvu(#RH+_qKN~qy>dG*a-ck8&VpcMVgK;C#vHl z&pTGH_P(xMPj7`U)NFV6oC?bx8e^=!yk95O58IIDt8>}+<xbXlYJc^rpV{!K$#9}< zeYOvqH`Uhfrq{?DhJ6d%%)jF(;;MHDzl_~RSW}*QpB<Q*sfmJFsC*kY1aEn+FXr*P zpUcG4P&s4hpU(vkbQ}x9y}b(JYzI1y1*a_A@or^;T0KC|Zh0Yz{czhqRm0fF)9z6{ z${f=liWPp;CcNV0cMWBqOkbq_Y6u%M<7;*1cPwjWwi-w7x6K@>P8`l&m^oOTF`Ruc zGgX~7d`EDmTcy4>m{rWaAM#Gj{#0E*hP`sTSM?_AJ7<Kt_ZT)|&QNu*m@S=ii+bG) z?D(9Ch5qM-`|d}tL;B%9hm_)h+YWF9@Cl$d=wlZ<{&))g)D-j}&GNYA^ER74xA&Y@ zXr5Tc&?2AIkmgm)C0ZxXr#TrT6?YzJn2Kw_nBy*_VvN?&J;Jo#JJt$YNJaDTLWhb5 z-wLVd$+y@WYui=yP?-2C!myT=0{xSU{;)PtmBI}32E^WSgR9~#(nW0pC?Q>}n!+s4 zX0VLE^-3R$dD~y4QTy#mrB~-K9PEvA7s?Gh8}v%r9{5hOHP6P!gpaz}1Am*+?-4W| zABWjAbTlp0#fE5DUB24Ge*D{gN!vQ1zM<1%S@>x0s_&%RU86tQX(X$Dqo4YHdB^%U z`l!_(yv5$#b`kyd>YK*MHT~cRZhCWd<ndG>p6yF?9lmuZe+T<u_S^Q`Bl8KFz2kB9 zN1wBwcL=@6+9~wnuAM?Jp5B>8dhyoIWvWQIFPLq+tB}J>yEm%~cCfK~rm4T)&R*G5 z9O=a6b$F-K)c0+NOZpCV=k09i-Zb@1YuF2W=MGAEjC7=KtTfH<kfyD0NRI-V0p`Ct zq>+G!0n3oa9-Wsm=k(T&Cb%Cc8bt9mU%+ssi3^R?hi|{ZF{Oi}0UQ<Y5wq><#R>e; z8$8)Sn;%0m9m3$o@f)j|u4+!#iKS!#aAVn<%+*`j@+$Mqq3ij&+BIMnpUU>(dtdJ; z&22zKrKv%>JYEZL^Ve8aD?Y0?ZgD)XyTPh|{qZ{<9Xi*5EhW~iZE|!4oLNq}fDCn6 znH#4-Wy)uPqE=8xy46np<|+2}9sSf&Df{w{0bT8nLTQ3QEb3%4AI`eDvXcg4oq^c| z9!9zqi5Vq|2E{y)KHZh*yMpxsQX?>GMwVCLThjH=N7}u4$NC#0>rBgV)Gc}6S0E3A z<b^t<W)dp-)8Trc8<#57$yi@`8`gtx8X*(DL~s!*u887FbZ9sC+ZKHNLT<AJ3ENeC z`*YOBuDCMoT~LP54kH$6Z8WGGXWllW0oPTy)Q=il8@$kQ9R)X=y`w{xz~kJ2-A%=v zDZX)_xK+5qIFgcZRnwDP&dr3wiBJlOsF0W<$zZ@rU{|?j6#C-7Y(wQ?7zZ2_dvKNd z9Lg(f2&{^@<{0NX(jE1?40?yD*o26`2-8^rNO&QH1wsv4rS|L+jSbr~F~L<6AJ}g1 zVB=9h|I>Q(`wYbjxu;{Uq9`hlwv7LSEI-`?)$zD3EbiH4?JB$)vLz!%vuEAOYFTD` z-M94n3ac$_^5LFz4Ol}Cl^WE&-6vemw>Ip^x@)v54&5esqxi=#GEkNC!`agLiM|!x zkaL|UT17|T-tXO!V=DLotUhSp7APQ{M6-NI?D$YHXe{SWys+F}P+2!s<>^l!4>NFs zHwMa7SdQ)Zlw{F)X!|iZ6^yvN9IA9g5dwH&fK-WON&bPPN+bSLmncWxTd)!OXI*-J z6T9i|N_CI1Y~9`8W)3ew(UEKHBheUMN|AP@s#u8u`be~h1Y>!Z61MrCaYK*^Dt^m5 z;cwOU$K{b@VrAcW8|^J3p_d!zb|`k^2G(c6X!YR_*sKK^8oJsUz%@^{YC#{hbct<Q zFhH%j#7-{gtIqv^{kR})xZxq{BX1i%y%ic<)E1=Aa0w^vxs5I>mhY9~MoRnfU<C_P zG#ZNFfk-m*E!1E|uySGl9Qav+^YCp_D^xd;1`4hd<NNZ2c;LpCki(V2I1#5Wg_H#C zCDybsD;}?2`!ryt%a$`{B1?bfLO2KR9i=`qhRweBrk*Vr-GM}ZSA~B9EVR<Oe?5Ea z-eWzxb|hVTu|4U@7uj=*2B{|-*x^Mto4>T74?cvS7aunC;~sE9p93kp0$8Jz2!-dR zK=FadECm5B?H43pV#W8l>2roZ&b-&#*P36wfHvdX@uX|ct<6=DWNVwq8zDyQXgTM< zM`&pF4$FX#ov$S+;x#eQuX_K+XM)ry#iB^T_5M~i!jstJUJHhMw0sh){{z29$?;FZ za@^`k#S-d4&#<`r$U#jG2itGn-u$a|Y>y{hU2uY(^CZTfBHcqugXbW8ev*>I<G<aZ z@pe)T`_V)xaa|62j^R2s621Z<{UAs-9JD`r9qkxVS9IQql(WZ~eep!~y(YG1aS!!0 z6MK2_(7ZRE3%4{rglL|r!a;~~9Ro9|!mY+DgK^7H<g2SWh&9>aqU-9QU1SsY4b$GA zqP`=O4ZXiVYH-W_{qVf^ehZ$j-Jhv0ZDAkZpW;h~f}pQDmiDhai%Lf0;~q4@FHy3# zj&?2B3Du2OMJ`Xfjw};#`?0cl!*Afrkw3b@6zaV|WLFuzH04)(XH0*5)iW^YaANT8 zWc49k+=r!a0~7IOAiHNtjJ6A?AwB#1Gi=3@S!(OE?As;jI^=vK+nr{X@W5QQ{DHN- zH9hb-zqpCH0lg<V{TJzLce?JF{ow1JEgN9dun0eQ6T9%>0JZl4*7YyOt`1Pc7dMIV zmcJ~{B$78yo~V^4X{{6Sa|L_Rx>7^g#CKY-pb2&n2>bpo{e93=`aVL(7d_Q(zsXBl zTMyd}R~u}1cV5wv#D9GfHv6mpZbRLEG@`%B5q9(WFUFxL?T5p(0J&PMbyVnT$z@v3 z$6-fV`3`;GlS!vN!^^pqC@6@he>oop9BbC{@V=7w{fNzfHFa<sS=TUoO9&V2=Yju` z{q%9R3+y}o!2Ze~*q{3kcKJBF{%VGLjG1M;mZSc)flYgD#DwQD)^QZre%8KmF)hS| zvq(|)NHE%Ps$+}>9wr>N-LGUVx_}jcsI>ox%6QZol?;=L@8WUF&i=jfQMUUvv-+R+ zvwy!fM{R0hlU_HgyD%0&fLJ|(GmF`^*GDtQ)@P$bt;f1A9Ycw?Egjplb3Ih@KRzxi zzn{Im)Iz?ED@(B!dXLSjPEbFynB8A}bAbk`4~tU49S;cOaE0drf%aC`%{vckjG7sb z^RxGcss%3(vV>yMDlf*vlxjO{v3*%T%6|5;!k%6C|9AGcdfDz}De5m8*{93;n-)@Q z6t$M|&P&?!x!94<8w>vz`Fyd64fT!o^;raCxmd%yi%`oui!hF>MHtV2MH!XFiTo!D zb9hih)A+X{%n(&j7H9EKM0_a!P=tEkAVL#=M}*0IhX^hFbrI(CjR^gR9ExwTgRd2| z(R{TCOZZ9=mh$Bybn+!4oXi)Ba4L6+a2lT_!dZN(2<LLA2wl8Hg!8#WgbTSvgdVOJ z;S!!D!lgV7p^pl&dpQOt?@r|1v~c1v0$oXH8`@6Uy^7FF0$ok$1%W<6=vjfTCG?~~ zpC|NvfvzX?ZGmng^zQ=QLg*%ezKaWN5`RYE^@Khu&_jg!1$u<gB?3K0=-og?nNLz| zrbwW34nA3+X9yiD&~t>^1=>WYQK07u%@QaF>f?O{ev!!C1$vs$7=gAB+O~jN;0mFa z1lmgI1%b8^dRCwkEDJv=&`3hx7pR8Nw}FZ>Ybmx>B*YQAVS$fi#}odPASV(k3pAO~ z<pND3)GN>oLfrz*B6OxehY~tjpn5{b3e-fXU7!|1jRMUlG)tfk722Qo75Hc(cNb_0 zp)o*3p-U<D%RN+3C!s$GbTXme3Un%=X9PNp(BlG~Md(4G*sH-K)3JZRWtvJk+7@Mj z<?_V%ocu-b;1C+0YUc}*lh=hPm;fAM%3nehy)DWWrraqgi%mB3d_gJa6-4=SPP!Vp zfdXHtEoyZ5z3h;rI9pT+QGD>PrG`lHwy4rDQ*4NmXp3@&DZkw<3Ylz+njEGyhbU>b zsHtJfXCX?4Eoxer@_vY7u|+KmQ+9+X`Iy@TWwMiR2$3ANs3l>h$3m3Rwy33HN_B`* zVvAZHrraeci%V_h8bK-Nl|&J~FJ`ToqaA{kduZlAv65}68La+!B|Ba7nGsVeu8mcc z`@hrqFX(L7XUz0QHM{h<*xML^#`4A=Xjc3G8R7p$vuM!e%}U0T(suS$ZO@eWH!=5T z)WvOgNUIS(f#~WCJe&Z}o2>go1J&TPJ(NMg?1z#wC2+!ba1H6_0p|dx0mlG`0Pg~J zzR6yCs3e{^WrZXs0x|%W?d<wPb7P<1K?&DBM}DO{*n)?9_gs%e#3*6)HsGfLZQI!k z52vc1-@)E{IE#YjhxM7nf!v)4rUGUGT!4juC4l7swu9MMmc)lL*$To=z#+hy9qiea z<!pIilr{_NA%4CF4xcF-*oi<|@iBa&!+!^%!R6SAvWR(jV;1Jx4(H~^p@?nN)5Am$ z3m)MCK&x_m063DF9~qiJiJ_}1ba-7tIazAh;ztJe?S#z$eS35U=Wp@k@21hVpTuq$ zYGeBz88FX-!^5z@sTa`TTl-1c26_a0?x(yr&?DG+KjprG9*srkA%#x$>G}Y^c*6$@ z)lE23{uESeQIj_Yho|Q^<68?HT47i`_IFkyTZcK|$HEtLa_ndz5wR4tjz3r3TONPz zuA^GHVp?EyH;qzsF8C!sypFvg4_BwX!M>7*+Q0iN?UCuHEl}%?4wT?VBWxUfBZ}Yn z37xxFC}ui@r&|#?bT3)ePhC5N-L<N}Pk9xZO(zpQ>qcQ|zsQacQQCNOnCcmEKv6sE zOnbw9A!Oit*#dFJ;xqbnp2%|LvyC;7jgpX$vVA}3#!uexDMAk277Fzxy)T&Ivx;&j zPMy)e0mo+a!v(l5=%QX)y#o5M1s52^^(3r^+t`}qUYe~SH}Hoyu<>=tKC!0BxUmRD z&ShwG{1QQV1zZWd8G8@^yo>nIt^`ei?%>{P*G@DpemUt9v>qSn>uP9{kPA+uyIfn4 z1m6?g?f#W)nz%sX#GRJ4l(3P6D4$WhM#RfXI|wd8aJI)cP6dX&dYsD{5{Z{KP6llQ zJ>>B`)ETw|o-x)~;`J1Q17BC#*o;T5IV9Is#65#Rgp~fu$w1LXAiXzG4wRw`rDzoI zMUjYWEjD)i(e$jE*&WL;IxYIsce3|jA`Drp?mm|ISg|_U$7Vfd)qb<AonE_)ZGOzJ zp0tb&UOgZUHg9^v2Kt^g5KB*dYSs&^;UV#MHCwp4r_VYUdNUnYf`tDOXI%KG7-gu$ zaq0MqpdS6JDyE+;!=^I)^CcP7^mL)9{U<ma<@{eS;T6=dum5_};t$LDKPb-ER#R?T zbVbNH#UBPihFgIA;P~|<#CqZfPlEsC3;vXlTFsKyWcJJZ!|TD3y1$Q)!w+3uvQF1{ zx==B<4`8#`q^F(TMboF2Jmu}5itjxSmn}Eo5+&`mHmxxY*|ii-1q*);LAd&Mpky`V zXef$sv0X2#OQxF$5sw=!6_lDL@%$SveEk=ATKj_g>SzlN_;hr$hVBhL6E8N{<$M(` z<*|Dn@1?F^%N~BbuX^Yu_VVK?>a0ub@Z)z!p5RE$WdkD12${{&B1aHX%SMosW;YwC zKGVX!#;+eDusktKy)~UZ`@|5nFP$|$@g)&oe)0?j%b)s)UT{7=f?_M4j#oQc*warJ zQtZsreJOVB=>f^pu^S2>0L%j#&<6{ZU87M#e&=FlU7HcxS1`c&CGpI~%)K@>Ggv{Y z=Z<r&6jm1L9S+21Z>Tvw^8Vqh;b55f3c_$!<ADCBw@{tjjcK0Aj2(Z2ZHku;8+->q z2{{PocEeVVvfe^j&jmf4bq}uYc#V2%1Iqgcmt}YUt*0aNhkypZxN#PTfqFUQ6p&-E z^^L~XSLOMrj8Bb2X0S?joQ7TnS>?$t<7aVsk^HcFC^w8~mynK<KNHtl&%<GU9{09* z09oLga6rT7dxVdZgAl5M9EbA~YohCKSOa3aZdbfr>7+y_f4HrXPx>1RJlkKKrcLR$ z6Yb1B_lcvC-@_+q^AHUyrsh7@|G7-f{cyPd*GU?C?)E>Onbf@t0iiP!4V{_9J)f@D zeZ0f`e4;wC`b((S#20$27re^mzmTRL|0;X*h5h0ztv9QBaf<q1UD>`D|8$^s;B)3& ze@`dN=O`6y=r}fEMt|14evf+D6YQl8;tb^<8^UL6{fE#GCI0!FkmdV+1g+zfkamW& z;Vd{aZAa}(C923tXHcq-H!c=uaD&*ro0h6CzQlgqbesCImssiMvFa|J*wdR=sdsN+ znJ=fRuieawU!F7Q!?C2ASgJgI1UKmq<0d_zf1^XX2k;=E9`F;=*w-%?`!=XCB2VB) zT^haJ_>?=^6+~{7KMRf%=Ok?t@#7rmZ91HzlZr-siY$y0Ojsu;AA@%)gD3tczuxcI zqsvop#UG<3-gSgjb~N4~zvJ|Cf$2U>V8UOM%r91j#?8kFLqkMvkUt4&@eSuqAATsz z_i6_pF9jO>Sf-!ysmm8$FkBxJM}x9Xx4$$y(*KRhbJ)pKP}x8E;*(kWmcFrLZb-VN zijCir(tkH7;`b<Sr8h#04)GOi=;H&vzK+#xu@5h=fLp0D<W{<GqR3xl++GMB4k!Kj zT@|lcTpgSJR(i6DCWM$ka<sAjwqQ>_ZC-n2`P_MI{aa~6v7ElIOr~E9k}shSf4qwj zIzPzq=j#=ZUP!$7R*Ekq(XWlhb=6=3{}gjn*us1T6yD(@$_;BDZtX;V5NTwzU+G{6 z-=ExMefSGu9)AZ99{@D?k5ElR{>BpWJ~rXETr@Ba$K^<Tq>W__r;gqLX8d@LxVt89 zv9(`d>sUMDiq8y<*Nh7~FRW%CzulAWt2MtplpM@;qb*u!oeNq=zZZ5pWHlJiLL8W9 zJqDfYSPK5P^A4U!dN+KtDAA>fM`_N^L5=Y{JLJ<m=kCG{Bn0M5L$-1@7Dk{o5iL;7 zo_MEQ50n8vWmoeStSCNmkR$hDy4(9sO3Ks3V>~+w$9eu`_!ZqGajUAXm~uq%ue_6< zONG8#jP^*vi2;?uf7lW9%hhLOZxf0?<54QUzxtfyy>J+8@ZS$Hk@yMkruA_@f<zn+ zHDP$ouG4~yC5cMh5?uK%{g%$*cL#J{cRN}G<%J$zefMCD1xmGXei3_bpChRS&ic^8 z=Lul);UE?yiR1_CUc~zB&q{b4K3*&V_-3Sre+KH=S*&7ziuOTzvp@Y#T*@rAV*hw? zAhJK1X2~D+r>Yy*GTnRDp3|xDmH1)4BK&^HvMyLTC-Sk6vKj9stMf0hrSD~SrGvs? zMg>w}`ggFuzc-*ekq)$~ubrXo^x+{<^9}5~_lCqDeh4LOL2bQlSUb_rb2P4C1M2Ou z@(e0mPnvLt&tUh~_g3FGlC7#w8bgAx`Nh-Qnde~o*eEE6y-URJci{ep$B19`7{OQU zM4C9`d~OF>=m+rYDgyCL;1M&}5A|syYHLxGjdT=+H@*nyt)U;9>EVv#Zjlrc?RhVt zmvP$43J#1=j~>bHIWREGz8$YdG&p%480kFCcHMI;_7Jp<=pRn2;E$HEeFp{&ng#`c zp6UGKuPD52T6?os@HZWhV)eFqU?JN0MhDX!9NvpE<$8!kC(*=W4pov(J~%8!q+P6G zD-WjhCcOFc>p1YEKmOEh;DeOQh3vh9DeCbK_Vqz;O41crfFdrAFalF9lC!yzE?ZI8 zD3{ry21D9pln$CZ7Xf}38s+T8Esz!d$(mueu|o|;we~CaOT+NgFz+nz8qYRAMBQ`b zR+Q8d<?X^>zm*jq%2Y>QW_KS-9x)X>*Ur$-1eZL69|2B37?^T}?s^R3rFbKBmcu6l z4J~om-a{$sXMSR54h@5fN5B83-BQ`10%xa^N>7FA#7@JWAb{UU4e$KIci5b3sG99} z*n0D}sqEFpB=y%*+2O{~S&Ir$#CrU=0dA;O3B2Qm+OG%D1E@1@sNMb(GaMPAF8e2& zbHvyigYJ(v)Dp^Q{W_8V*24aNWU%_|H|*?@42@gO*F6XUJ3f6sO68+_(ipQEcTc_! zXyw(Y_1H(KJ>BplxmaC0uo-@3vglS|26(S=bWWQ(`X%lJURmBz-L@&<B4hlgorDt& z+Foma!8VA#j)PBfIc}hcpUr9PqjiWCndhHXu=DKqBfahWa1$x~!$e1|q^fiP^_wRZ zqH!3XP%6rU28fg!gWAv`kQ&*G9qn`+9US#!4DC#NNIHAkNEtPRvV$gR>3Aasx`X4X zD@SjNSli;*GjfjVJC*NHf=W6CI1l&<pzW!WQUC@(31BW@IpA@?7QlYM7l0N(WG|Hz z56A%I1111|!iVZjfRljTfGwcy1k?l009pWQcsk<&gA?I{v;(;numrFQuoG|ya2n7A z_yO=MpnGq;05Aa@fJuNEfcbz20X2ZNfGvRifRli8fFA&DfUbR1(g474fE_RnFs+YI zB`pTB8t^jUUBIV+3xGC2Pt+g-U;^9(m;{&$SOQoH*aWx^pugp)nH#VQ@SlJCQXSHZ zPKPvSk3)L?Gi>SCI6^G=QbDT!77M;K2Roqdn-cz|dkJe9X*^)94z>CPKP!*!EE%z5 z`*C%wZ`O4O{ThO#K}eTBxBc#rNRA63_5e~GLTo&wZEX&zJJAvLAUeWCgzJC99WR7V zgxF9@O}{#%B!mkQ(#9zbVG7ZIaY(5M6A`8%+}7%#D<MOq`m1oHfv_Io0|;vnK8VnT zunplXgbG5()g3d=p6G0mW=kHxd`OumEyBMAfHdh&=^i}iBgG4B4%Tawng?2@bRX4d z$Dj+jswog8O##Df>2@&BL$U_~((u9}#7H#J77DR*fZvCIX%Kmz$lwm(10ire;se9N zfwS80ZeeG>Ywer=Uj>*c&BdFGB)c?i$8nyePU}BXYSF>}g>p<P8(lW(a{bGD+`~#% zv`(I!^N#ekiX{YX)Xw5>wG|D86FkR;HT`<Bg~7y#wMh<kSFo@8@nrT$FjX~xy@uyN z_CYXJJy@hfyfrKo4jmEVyhqQ*UF;M6ezrsUJe#>MrbJxM38g$y%3edX${C7&U&g+_ zn4(L-*FczEsL7>_DGv7I#ok>%E5AYeww&4j-ABFTHj<?tbvwz>9iQV61*XR{a~y0z zu(#Sdk2qBEO!Y&mdWTQ2f4`d8)depit3J`e^$@$B&@Qv!0-X!MA1nNK?}+icLxuWa zk9coag9siyBuZR=N+g!Y|3kdcQQRZpVz(Fn^G`$RBsBr}d^#Tq|4pNT)gnndVW!&u zo(HYu5}Nw|bwdU(@&A28#(ru!7&k*zykODYvpwVI-FM&YJLXle?U%k7AxqBL9@nIK zcQ05xulUaS^U4;^n^PebsmeTy@ZZ=92}6xIMlHDe_B-#Hx8up5?uue3)oq>Y;bEn} z`r;j(TT?Zvj3)=HuKP-Uc1WWE4nRJ@0x$vefGj{7AQ7MiNPyNW4(TF*1I`1Q0OtT_ z0H*;b0mlGG0EYnefOi2q0b2m;0c!!ozxs;9C#?js9N+=W2h0LY1~>tw00&?wAPs<D zLzCJyFcI-MfCeA|S}!}K761pF0UQDB1#AMW1}q262h0Ub155^#UY>{A1IY)N07C&8 zfMh^CKnrO52|@wq04D*&e+c1Tz!tz0fEvI;z%;;QKq<fihy%1;ay<QBgeu|b^=j4F zk%K~=KpjDU^i}U4|NhVMrw2!<`WxsRul;X}YNVu(9w}{$87a{>#q>!h2DkXz8WF;m zcg7@VXUl($+*2Q+N>lrAl~mFL=;hbIlorKkX6pDeeuL%f{|{(?0YCL7{_iV#6fDj5 ze*}x<3;&t$LIz&=pMYvgeNGu5-1Bs_>Yr-g?J9|5raljapud*6Dk%%8?eWAaEJrX? zx5w-6!KZFfH{w|yq-WlUpL_=KxEt{$uYn)3+uv_`199Zj9#@zAeb29*RGpo^GoW_X zkJR6)_vl~OAJU)KU(=^%56T{ytueR^4;Vf)XpPCnLB=xUOydIMX5-t&!^W?T7maFD zswu}b-n7u<H9c+GYC3AVVEWk<VNNv<G8daG%=ef*=4Iwb%+Hu#FmE&OH#eAj<qXcL z%xTE^DCftV*j#;XAouCq{l45Eb6azj+<40f%P7l(mKQ8<ST0)zSm#@P*2k?It%t1K z+BNU_ytnf{%lj^GuB}J@8~Ho)59d1yN(!bIxC#PxL*eYgI|>&U?ke10c&6~X!YhTS zQJN%8Mj+|q^u6>0^ag#QevaO&_v_c{U(z4Z|EBMp-6K0UyDHmPWcbYd*PN?4zvrap z4#<5o_h{}&>n`h*yuSIx`Qr*cC^%K{ZNZNPG4|PZsGCHW2{iiU`px?7`uFr7>Oa$e zss9lzrp_LlJt2E)_T$-4XTOlWDZ95J)sSfzV#qe+8tjH5L#d(6aI0Z9T6CddiD9{6 zmElRlvxfDC*L;Q@hW&;Q3?~d<8_pZPH?$a{jj?F(&rMy-gUw^jTg@lTm&|QuL(WY( z^K+_m9?4mgvms|k&fXkNZlBy=b0aO?EVo$7Epse)T3)sswEWA`YRR@vu)btnnpcB% z+mqMTX10yAm7?A1^Q#N0>|Q(lI=JM(c`gFq-?Cc_d8QSnznM;$Hs$oT_O<r2W?BbX zhgq|&X6tCH)B2S4dF#toTi%kq$MO_gT>iNHiu~F63-Xub|22Pi{(<}t^Uvr1kgqDp zESOfXreIS+ygl3gpuO3yDXcCG6h2@0M&Z%Ij|)F9Y$=qSl2igM@1)Pv57(RY1^SzO z`ic4_`VDB#!}{*ov$J2#{yn?i@PnbsxZJqL_<`|PqskO(>H%fTGz~YIOa-Q~rdv!E zrs<|TO$$s*O%It?nVvH3H61ryHW|%U^BnV?<^|?j^ExP8LC&lkcg{;WujO3M`6Wk} z8=sq;+dp?u?ugu+-2B|pxi{xJbA3~DXXoCXyD0bm-0IvFxew>A%6%Mq`Eu^Nxd(GU z$UTw!S?;;q^SRBrEl|=(OJ_?Di_v1WOti#Vds<Vi1FR+1eb)D_XRO~sQGc~Y=5@-; z%o~wcl;_NI=Xvw0^Xl?g-ln`Sp}euSc-wGv#w?pUKOui${yq5*<*)YTzn;G%|E>JS z{NM651$_%96x>y?r{G@&T6=GMs(q||0s8Aj`+0lNE)|X`oLIQD@QK3hg`XBmvm|L1 z`Y&3as4v#vsb8Rf2Achb{$KhZ^uOwj+4<Qe*|ph^W<Q&~Bl~prm)VhqZib$Q41;3m zZyaWH8ecSCF#g+k&FJf7N)ruRVVYsO-z1yXn4X0$d~E7wo`+^UjIk=`wpgxNqVkgS zwD=gLnh$SOEj5G=SIg&SQsq40-7ETN?m6lSP?hW;UaQ}zRfWMiIj7JkR@gz<Z$ zlJj$Jz<SKuYVDo(yKRcS(moxdX`cN~``z|?efF2_JM5p>|7}l2{iNmCTR>2@J~`Wj z;WXQ{+1!+qo_kB~y}7qq-?tsL9k+dK`-kmw+n2U)Y+AI_HE8rk`*!=!XrIo7NreLn zZz^0?xV5mk@G{wf)!1Sn=mKS{(m$%dGkZbyLBq#}PR1U_$uMYl8hsBMYmG(ba`O!H zT%kH=&EJ@RH)rPDi@~-EgKc}xo}4dpq}<rt-nsptJm%aPxm74mlJz-jbYAZ~OWwG= z>3IwC{+@RtZ&ShH0;OPpz1Z%w-(g>6|GRw}6=FR$NC;f|yY)W(I{lmaJ^G>9YqHm6 zCm2Tf3>FyK=MB3JpBa8J++>uE`KFspC8nvSIi@Gg5jiQjcjQWzla_JTyOG_cf(Uzp zeW2ZHpJ~6>?zcZ_f5rZm{X_eg_8;x4!XAa0g}IdBHc4!z-qv5#M`m|{rAy3C&Q61g zT993r{Y3WK?B}!B!$@t(-kR;(mi=qCWQa6q3|hk?L$zV8;YGtH!xqC<!*Rnk!yu#H zXfj%i`9_Cvw6VlkYJAH0ym7s8lW~i2l<7g!a#O8|nbw-tnO-t&H+^IJ&h(=x$80lu z%@3Mam>)9NnIAW=H>+|w=fvf#&3OfFxj*M{PEM{3!>=rNaqeGyxyv!!T5@|>QY;x3 z6Nc6}%OuNG%k37IWszlx<*ydm@|5K{%O=bVZ&|Kbx>~cWTl040y_Ki4CD{7errH+T z6x$4#pMC}1?0I%(m+B>HG#WvrKdXN|`&{-<*+#=8!;gl3#<9i*=zOec6sD~h^K0fK z<{!+yNiY$sbGBGt%+u%Jm;WM+<VG@&C$WJ>phM&J%1+8o%}&oAm^~!>MKsy_+23ZT z8pc44(v3OBF~<4ES1>D{GX7*NGTjY>^b#y-8!|APN102_t1$n4VOGrA98*qJ&OJF> zbJpi>&fRS}ZMoeVu<r9&k6RzhdopiB-fMaL^4jt=wqCY$+efy{{Ji`-^B>J$Rlo{# z_MYhN7wi}9y9%WythrFIqxzHjNrq?8S${LUZTQ@<&A8k6j`0AB(bGgb9U6{vvt8Nq zv;V&p7?v0|7`7PhF+5;+#O{pU6}vmY^5l)(7rQ@pEcVm4Br=NSq}b<x(habE0-Boc zv7cbiAi&5F0c2|!co<|DbQr7v)|xL2I1E(`HGm~chG7RV^_&2fEFTR27)k)saD-6` zuv%MSbin9=5d*OPQUE8J1Yjw?09Xt>FlGQ2Gdd<f0~Mz0B{ANxVz6Ve<FMng6R;Dp zldzMqQ?OI9v#_(VbFg!<^RV*)W}pze2)h`&47(h=0=p8s3cDJ+2D=u!4m;53Qw7_! zEJjJ*wp8Z8>g@}7&jQUoJ6#QUFe|5Ca!#gRZenKM^olIT31})rvl$K8z5x|8FaQ8c CCr|AF delta 25687 zcmd_Sdt8*&_CNm2!);U$R8Z~=h>C(b%rL_Yb74$CG|@p6q!i5yO=M|0V`kcD1LZhU z>ejT5njW-c9W~2)siBx)Y1&P#%*f2PjTM~?Ny+E;-g^eD^Z8z0zt`*c-w$2SUVH8J z?CaWVuf6tu=CjtI56=g!G|JjmnTAhY|Hs%Nm)0jQ?tro94=w&0@y4L#i~r=|dy7Bi zp=a^G5q{qM_Tn!PURtkM{5+?>=;wdeA0Fo6xJRbjSe~nrqgS=a$G!9DXAcPezh*&{ zb(3`!geDL+`G~I!Mi>ETW^s!T7X=|mkTF7MDAXe!C<vNg$tYR>?sEcUf{~?LWx`hL zs^ngwyJdo<iy*B0RuJM>CCBwXc$a6#czl1&f-qXPDmk|25c|vpcF=N8;=zh;BDZCU z-Mgj(Z3bijPLuv(Qr8HS$r#!MGFD8?L|MW(MCAzo@@GLoL*~w$HG8@sywM#As0x4W zfjyhV2V|=QgM(4=E!92A{J{QMpXUpL>qKO2P5T{5vf46kTLk(<7-;ntE6s>xiJ?_8 zDN1Oz2%e&L^%*L3A?E_4A`^!R)?(^K0pO^qx5pi-Pb7Z?_RTG`jYcN5`R!`SHZ>hw z?#1oy#Vzz<bcgzsp)q{TG23`>6jOyHNmXK*<Y=d*(SpFjDSgPKpg3nqDX49XFu<4a zZXg;_f{HlgRMU&`$~`SqjSO5T6oy87f~Tb_sJwRZXTNRpES9G?2IRNB>}hdNxN&_c zdf+HJAh47k>m~^9{F|P98YBoUcgo%QZHC6h0q(_Z9i<)W!>i#eWhLFDr+c#KO_cP( zT}kq^sOt^4!q?W@=Aim+#|<kT$;kOP((`Xt1)<1tFu&}p1S^}KkqZK<<2sR%?zo!n zXiop_DN)^8C6uz3jq#O*MwQX&D2Qi&GOqsCMVoUC3b4`}f3oJK*5;X|iU+;jD?Jg1 zb|azE(8C^5XlR`Gvp2w<-@@{$h{5~$E#*^AHB9i_aNP(ktr~(ZQn$JtZ8RCRC@;ql zC6wB1gNF$=Cb+1Ayi|&kV_1=5FYrE5X4{L*Yx8efYn}P+R@%txchkd%;l6M#CWgC? zk+u14*7r_;l?|L%@s~WUf6dbo$r#H@X$Xc3B4Rk{_{<xiZY^N--5-J~P4S$p;Pox_ zv`A5rsEMn;P$gE$s+>Y8%gZW7<uS2qscChDN>N6}(H88G72w6myrGNbf}`gan`Rhm zM3*W7Z^VO3U2pNFUu|1@;}Qe_(mqU3pKw>FUPR23FNc7srLr=2UTVA)mBk9VSeus` zVXc~jU;$CegPj?mBG*vYqSS~vjv(ae71i~&(`dPsj%$~sr7KdGAvZzEx|hFbw^ns2 zk)lHJrtJpj8w9?pyoyrfnpI|{Uyb42=(eY(c{Dn(Rkcdztw@|fZ$gwB15-oqu<U=k zBgIL`^mkvXjb8Ajrt#GC<ea>>GmqnkFm1sovt{(M2#tBElbE8CqAbW8*+#RxGpuE0 zWstHm$&N}$Q4vT38flx#b9fRtc)*1*0yVmh^(Gpj@NuDg3RyiRqydli<r`s@R6L%F zxNRBFN6Bjfwg{iLI1NcGMn8K@dEZ)ucX!;ekds|fMMng=(Iq|hAl@mV7RZ76R82i} zLW%7nNX2w3$?O^x8v{MZWwxW=OKkD&tY;i#YS*Z^A0dt`+3`cAD_8K`vd4rU2(FS_ z>Br$@W!FB=gZ^~8TwP!34tAXs%V-6Lq6Cv`Jo1T?s)UJ4e_*o#^Vv%O{DaN3h)S0s z#nB(HxXdXf;cMz`%XpdJAk~-8iBM$aY7kwG5CtC>)WJE<bqHLj1-h{^33Ah)RVTBf z9?|@LZ+AwF4<}Q*rQV+!&>@%ayYt(b#)}F>Z&tc=DfYlBg*T`w#Z@0zUTmdDz{y)1 z+E~+!Ry0Ahys5QwL~jq4-_GvjY_}*UOYy16qfG6VRzVEXc!nkxPqEA0`zUz4VU0B` zoCczcik`JBqCQpW4!y6orUkh5WKccR?sO(v<Y`ma*GT9j$fn{?HUf2Rkht3CnL<{# zmV3?`+U7+q@3DkT4UIMGIL9o;QC?D8a}x^8dNNo@zg1dRCAVGwlZmDhbz-_s{gvT2 zrc7t>cXVxJe`u<DBbFXM?s9Z=r9RQ|1Mla)ww2(*h*)XMEiUll6klh~3L{}*G0ysq z<Rd)UmuBq9tZTq6j7+7W!9i<lZm=enx;L_rK8lCC<|>}*di_Qg+HSI}m{80`R*cFR zZHl{o>LgG-{0-NM=-NUNS~;N6S!wvx(VBkj<2Fx=hnLWi(wX3{xrBT?yzBo*qNmBj zN4D0A{GtOID{H<&cE>+%3up?cW$%^M+B4;)^awh=b3|;DP$w%r-6}~%(mpo)5+7h` zAFHvnkGEzecuGsl=q4-zt`o{K`XdJQ^z~GXj3JC^MThuX!}Y;f=&M3)b0n<(ra1C* z_ugR_Vg;d5=nefw7siqv;mKoO!Ghq<mm3-#VRR!;jYg{X6xR1jy}_ZN%Q|1Fx?7vy zVzsgS>F#(|a;SF;RC+{d34Mhd6%k}ha<743;f)4pdin{Lr1A!j1f5apamb<Yd(m_x zR`srME9u!7ayUFyyc0vNgbz`1E*Dh4eK1~ym}{a_hC#5#L$II{qD$%PG3X|&DQv~C z(()McQAA?mzywA_O^|uCKEK`Gi`u}4soIrJi6LEkC`RQBL<<Y#p2At~?w&%MdrY9G zyCKXLH%zei^n@v5u*?+BafHz8{lORPDF`$Kr9~sLw+AnErpswz60!Fv2-}JN@pqv$ znl$#9+w+$}f)IY7g>_E&fe<VGW)Qi*r&%maA)cNwVGZbDKYwK(@>b8KPS!BN5lzPo zMOP%!^{`HD5*w23$O!Y~D|?`)0jnyEwR`<Akxj6+8p+YWDzri$v?C2Q3@f4IkOxzl zuX{u2*BDpy>v<k{8mClvBTHx|W1z=DYZO9Jh`K)X!O0B=g@|E7!<PC2Qr|1aRxnJc z3bfK~nXLG_ty;G5Iufe_OWm4!q|p_BO9-lpomkPJ{^StL!yB}P@w-Ds*D;0bc;M1M zmOwlf{aAIk0vr+GDDxaY))wRmtXZNV!y@}SL0a_D6m@+?15L{kge4|B>kfMmy@^sJ z4a<%8f~fyRaCmJEEpt<r-kplvpN#c<l-_jgk095vh+<l)5rm~<LWL!$ytk|Cqyh%e zBy<*x2`*Ex3tVlHi+j83Bdk=fDTTb`hb}Gjy#Sv4P*=SICI)-w-YdLuhGTP+OuWSC zrBg#CrzE8xLtA)GeNgxE$ql}Dr#2IPl%f|^|Bc4-BXgoonjg81=J@k2F7}x%zU9I5 ztNN=l!*7mA8p$RrpBD#VT3ye8PGE)?9ib)m-s7@Ppp~8`C?|JOo8d!8AG!`)jRFJ) z!voYKT;By;UnHN`gRe?-elK#ncR$q;_TmXcgFU`cIDHy3y9cx_Thu}7(kOiWHRf@> z+*8!%xd`<XMw5Dx;yy7+3cQBWFpqka@Otb(doj&CE#6T2Lr>!Flh~VS<x{>+cHM}8 z9;hv9p_9pxKI6oaY|<lI88#ReuwVAi^&n%S2godBe)Mc{S}yq^`hhOZX7rbperF=1 zVq(RgwWKn}(j}LdLytv}H)GO5Rx;()96>I}^y>?5)PT-_72(e`1gN^6evbeOCz?J& z`p0Tz+sNeDeuH+iH#LmmEBEB9>Ux;$hC5gSuwF#dvsl9YdCUzbJ7VK{HbR4zyC=7| zjtI3lW|z@XCh|pWq8w(EH|WrUI3mR+J9*^b0%(p_OX?DPBsJa3rkU-Timpof$yXhH z%c$3cZih;Uu%i1y%jjiP-kpCHmh)r08S#zz?XdnNiv1G=@+z%CR$OT=ViocAYVkn{ zdZALEFLwm0>rata<9fv;z_>XOILw=Xy_mI1R+}%!Q;M3=Og@b38@2?s@VBdPcXB5# zKCJ8><#rfJjo+o-fCbdglMB?<Bwe2gX~S}&F|bOPV|$^W5{NC-l)x%kNh#ekk_07~ zWEwI&AzQo|N){#L#G@;)YIp*UT?=l_QwA=vKLr~f65B?v6_QU9)Z$x(BqVXn&~t1Q zra|#w&CQpi`=ME)XaETQD!hj68>-+<V+%$%Qk|HZQjS#K6;>>Qe^CRTg3#?QM+AMX z8#$+li})O?oeZ;gW`s~)N)yw_9fk6~b{!X~<s(qyDM*W}4Q9@<+F=sZ0oKA6R-Jh$ z@^96N1i@dO16|2u$?;;h--#!AptA%Cye6Z5@2rW-v;-<jDD`M&Kyk5w@jh9G)n|xa zGYJIy!}RVbL8zlJwrDi4n);`@&}AbT;<(@UQcbIU7+>lPtkK0tEBCZ{)2y_Bp@zo! zv7X>cmvKQilcNXrW3UG8g~XiFw|j(uOeb}PrU;=229nt+eY*V&0rKgM)nrwQGW+#H zL8zT0s}f6TuNG$ihPp%45|;NqbUj#oivX<!>R|%ujVmd&((H87meMc!M^GEEttcy_ z+hJLtkoYuIsDZ5SJ9y;2eyq`LRC_-{dO#~+5w1l0U}2+)L9Vlsx2LBd9vY*YmG=Gx zMOVfP<*v$5fh|S6_m!;r(WHB-u1hK_s+7L*A2K#Ib<C~*{H1$upyx!a2Xho$1=2%_ z9qZ$%IMfD;v?|hOWm!FqN~&A+n#~vxN4HY!@1gr=HIrkhqn!_6=+NWxs(3mX^Q@do zShqPLf{x~dhsR<{V%wUDa?k>KRUB<dKmk&e4NF1Y2DT9G7=xj_%7*fXB0ra$Ze(0^ zD*A!H)`hNMgen=G&gy44X@?Eqe2USwFo~#}iicIwj7K18ta#F-8@`drZ`M1mI)bfK z25y>$hj$1(4<5Ovi@WF&JTY&NL@t;DGpQ&4k~_cIwfL&Q_<w-;MMJ>vS0kSMW;zSo zt_FG;HNL*~F4ii(`YpYYhJ8j|TN37L*iH6~YGKcW8|<l}>}k4+XXT2h2n7qm0(p?o z%!xslc+l@64+ejRV8e?+a<F*vg}Qb`J^6uk0^-m_b)k&TZC+<%P$3d!__u|n*0rM( z8-rT#>=G!{HwN7V2Cd`U#Jg)QV#VLG42!$ZqL!i-u`#e93-s&1(57;>^H~6*Z3i}< zQj{I2%fuK246=;67>|wR&Akp$%xf?QJogr;Ez#+F5(m613)zbdX#IH7oR(g27<eiD z+RMiS;?d1@vg^_hnHSJI44a!Ko_zGL$q_7B6uc=_A+}3A`Bg6|9yp@+wGuWNA~6A& zPEdIABc-6`M6zpO^3XW~2E8}{()yKJaxcD$`ff`>eX-s!Z%(s;+gahYAFvMDswH;^ z_IGYc1_L(H)oXl22v64TQu-%Er@WjSU7o<*t(H)iJ=9uC$N$901>RMjHstS}UZtaj zOaeV&+&l|>3(Nf&+h!~>9k2Q<mnG<;ntD!i^g;pjF^Y^3?}MR;-|dY<AwEuKAAwHZ z8I(1AkqA-q@OJFO?D6YpSXy<FDZ@IfD4*#4#oF;qmY2EiNcNBtI%X8fOz$mT9!f^0 zzvX0OT?OmbH^8-A7%+Tnh%qD)FnUA%@0GLp0FUJq^_d6hhVOm$2j>0JJ{b7YDv`dA zMHM;^dLR;S`(>cQx135ERy!$b8PC@qFj;Db(I7d<QyD`_AbR#*!(4w1p8IQU;ZLqV zdOX>^Dr_Q*w!kxMkT$s?C=Xhrs(ZPiDXzZnG1yF#8>kcgi5dnUZ0iMc8ngyFtAo@I z`zvFp!luwzgc%adDQH|cqujGnBWeNZ&2SDdf30<JiL58tIykv!_*G2WF`?LY1e{fe z>ZtS`xiI*No-4ldkxMx_hc5b#JTRnhGFyb~3fp?7rhbDebDao;=!JVGU^a}CqSkB4 zrXjJ&`OuJlJzKB*J&S8sNXw8UBt{JF*WHrcSwU8x;X_A<AGZp^AvQZb!{zidE7>%3 zh$z28J{mg1nS>;^%~zjNe*#bKHSF!skSbxE?Hckareih<!juYZF(!CsrOL+@GoKXQ z=NpyD$K@Dj8}nPU*}M|t!2LvPkJYg~k>{A-VlR@{oU+GNHLE|Fc<Wa~e(1b#s)2rB zYZqByO6=*|8L}M_I$z~7?MNfrP4mQy14)88Nft>=W@9wQOwDcVvf3I7``aqb50s+T zG?P{4A~9+pX*Q>e!;48+Zlc&amJH7AD}FqdjLY5X+;*8s6>No-Y|{P$YmB~(#mdvR zg>Mx{%&{l9oU;UbAJ3CEEc@~*1$_f!UhX+bLr_jbRTz!O4z%1gvt0m-+i}&P;rmG~ zmi<@=<_2HA!<XI_A|Z4kFbTh5bB-~&M}*rvoE`6hcYQOdwqzu127@~x9)0Axd)JZJ z7{pLEK2p@yW^%^TC+QwxGqU%7<+|JM=n3bs#}df6Nx-na814bXw6bVYF-)DYomV2f zx)iz8=2I*DRBQaCY>;d+Mjsvz+d$W!92u6M`74%xPtjG(tw&EOp|Wb<vatF(u6om# z7s#sX?E}O2o5T0l!wAT4UcMwFnyBw9L}82Wd#pdJTzJiRCIn3qo4pl$`hLYbI?}eA zb@a7X5<dKKwZC(EcnWMLU-wvP{v~veyTVrDF|9`(r3<YEbHwnPAJP8SAVmz>Jv`>V z3={!jtyA(av@Q#wx*i6Ot1W18M5Sl+@Cf+A@)aOkyp*SySQ{6IEgu~nzigSJH|dra zHyB#a;=LyE5$GJLUVLaetFr?2E_WxSMU#@e{$u$Nme5j0q)9LneTJkr&^5CeYhyFr z1X_d3w1Ul7N6)H1X({@lyfKIoPyy7^C-o(;C^6hkdH2g=$iMT|WJG>!AnOkDQC>bL zjT7aGM9NPo*!Klg(f-4Dncl~iy>icUdl7+(@1u`)^s>@5_ZSYsZYeL9_U~rgi|)~% z_<}rKkmR(2;&~4HLgBq7c%(O4sfm-$`C~)*EA@!cRG_f-JkNcGm?;g86MLL~Ju8`& zc@(Ny+Ru{cor`E;dF?aS#(a4&2uq!9sf@daCj@ouhC0GRI?tH;>Zi2B3o|0{9{oK= zcQr;xMRG@DDcyaMq>aeUi1p2sd93d|jTOvkKZV<r=a}$34*QWti;&P3%u+%|q(&_T z3A32K)Wbg)C5<hPoE|a6S&cqzkfL@VI36f?q*6&U2U|KRYBwnSSRzi+7$GRWmBQ1~ z$*`9(oUCZrG7J3JS=f8Stjp`<l+L-rSc<fT<;T*yV*xsdmt0}IgzLMZC=@U&T3Ds$ z{t6GI@q1>>9A+6i<c$TBHn&;G%Y};G5GyE>m!K)RB9Z3XxH33f*lz^#b{EW&=Fg(< zvVDBVMly=^d>Zm>P)j3_8D6hodWm(y1dgDonC3>5C23~G5yQxVVXU?qi@ycriII`9 zCs=jI1WN8;Z<kU!{5(ofx8k+>X7cLDRA;mdHBZOJg8TErtJ_ejryvz84~De$b2d5! z8NL|x#j<>{FMTniFLv4&v-o01eX%@WY_~5~=!^Z+7n@{4&j@xEN?FAyXq}ujifh?t zY&DWS7M1jCkMO0EPj{5j8J`pTsG(iZPJ851X=N<gF)Gm+%5tDK>`|Bi@UD6lJ7JhH zz(XnV;ZoUOM~AuIy3j1_IU#usn5#|bDBA)IUzv2?Hj9OII@XcrZBu!KOVD}SL>}Q? zeBNbZ9D>uMVUkqGB*)&{Wn!Fg=E0if2$pe2>J}!9bu48I6QIT*_GZ(47TD~Ji@)oW z;X1#tg;yWLSokE@DxGJRBcJozpkyQsd)=d9qA#5<@-g|V>l=Swlyu%F&UL=(@<Qk} zzLBy|_n4U7WOL^+@!>7KAf2m&97T;zO1dXUAY$O+a2!fAm=_P*sT0klWOTN8@>8;6 zbe4GcYjS*aKiQXrj!uh?zJ}bm_~WH0n;cP0nWkovR+KDjCZmc5#7_KCl1k~9W7)3r zeg1q0Dy@{(6%kL7x`>-WF}9V=40_6hK}1Gfv1f*P0a7_VY{hCe3wF9~JRWolG$%iG z8DCmvD_aCgH5d+#M%q#9x1`&cI48?-N`*bnwua>y?$0x{ik)JjvW%*(hwf3>ENn7t z!seh#PXFlYmqxg4^+Ai!-Qm?2F(7pdfG*FE_@^DAYjG$R!p(e~mLVUHhV`j*-gD7C z`5gSdHc3-0(~{T5=(-`5(LvCX>tj*}%|Q*9F21@5!Mq-D7<pj(*hQ$E{HuZ9YmNEM zOE^Pw^|vH@>_Bl*3Yj*xzqtEHvV81<0lR<3-05m@we_C+GpoBN@cq3Y`CXDrZ?r>3 znC&;vIxsP&m^GwL7G50!YE=N0GXi810=lFpnPctkyzvXVGctg!8E8})Z9>~&7(nD| z4x()~c8YZLZ6uYJRMx$dik9#*t9S2+++0qw9YzLlEUoN#&mO9-x3WK~!1kk*?j3;Z z4yc52{-9F&FGOks^VwYRG^Kx36=3`L58Gbkkp4x9mDWNe_^!)w#I^&(qpV)=WeJ>) zIytsU`vS3SfOZ3mC9kJrl`TRc6KqrQLK%Ieh?Bh)CG79|QxK7=n^KgnnLJjUV_5`; zO1ZVRMvgqJFv(71#5!%53#Yja6F@hiIM!?SDD>DW7D35LyvKeh?iaNk?s|W3OBxno zYYRypmnL3pCijo)7sC8$8oP!(Ic^{vZCl3WC4jQ{(f6tmR9RqSo<^VYGqhq?5ctHB zf^JA!^iOZFCjs%2l7Zr9&E%PqK1m>Z^4s9u3HIc-FBg^-1oC}fZ9yoe){<tjx1=wM zxL7hIQpcLo=E5oPeNdogC{Sb(J3d1MSK;^+Rw*g!Tr+uMd|c{BXQ1=j%Ao}Z=!15n z1@n{88O_L{_Bo0!H-JcXkJt41-=(#WPaU*;G**i?G(&P^=<`GE|A(wbO&Bop8fRxC z#`z=u5B#W405dLIu<SaH_&rV$wyA7}8m^LeCd568lZ?gVJ-7H?S0BIIp5L*54!6?F zUqUUg6+1wJ$tXoGO5<#uo8L=lC#SG2^1)MJ<}vJ$M|ih%tiaeMpy#jB+uspOsj<r? zws>1<ZV9O=o$8#2n3et;K7#UF7t7=7nFYhVVv)8btd756hsIq4?!}kj9k@>fACBPu z<{fY$e_m{D;BLLnUg3co8r%O;Z2ub|hvnt!PwsP1Xz!@Gr!chjn9Dv?KSA#3L1vU0 zyE2`^d<dsclQ+r|#W`2V2W9cjgBVPg@%O{f;W9@;ZQ$<*fw^^DXe6bpF`7vY+iKZE z3tV?o7kvs1H>IM%mC6~R3%$R3q!Ld8>!Z@6?Bz0#8S>Q2^-Q459#$rOT1Hni!gTD9 z8FEs-^zOxY2<|1$ImHu9W=`yt%_!F@9`x`3^5s(2`z#|VYAQ12cHVZ@OXI&~y)*(I z4)s_1gy<CV=ET17;L|7K$gI)(SXDIr2eXMhZH7CJaC)ej{5-L*(N}v%pECOHNhH$h zX5U^C?!U=bz1`eqZ%7&4%#!|lB!6lqMddNFIP!3LqByCUJYSxgbsKvzzq5EFPzc@i zm2BV_N8pSz)NschgY%py`sqEk`QMOl%1gvBkz_qEBy9M-glm_{><5%RgF7kLpGQu# z^)h+ufvEINuVHmM0<D8{7+2d}#|V1nG9tPZM>%Z(=GJ+wp6Iet+ospZuMfm!yzEcy zM_**A(O&pspNhjlJ$pHVE(EbrNMQveTqe2)hbFNzS$MAPA)du-hDFiAVDb5qKmJ;h zNO@mX*iS&JQp(+O&#&IYWZHuR#jDMv_Q3(p;@Z)v^7JOxx8VWD;!brtR(}1*2L+*% z8qz`i3{>ukEN0zPO1r?EaOcCx5n8_eSvaK&P(Y)=rjBb&sDkz8P-wdC7JC>?=Si%_ z-tS7(Uu~D!d%G(Fw?~9mKQEBnNdwX{F=lX18CvY2v>(SUh89N{+(k5TGA0mIFm~4; zOd>B#8X~?qnjD>^j)qS22Kmo$0u}biDzT)bn9h8VbeWtYT1Jz>lheqm@>uOR*wFch z`B|W>Vr;{8Z<ymT%+d7VJ+=pdlF+0nfmi<oGCL_Iu#q7vk_LyhL%sMjd-Y3<xq@6v zis<=XC-u6A4a=Kf@^OFk5XODg-Oh0zNuL8F8~4d{5ipGVcs}lr9wLQ`gc7SibpS13 zsnOm<jC%sR`_bJu@<T!PkNhWW3)duZ2Mr$;CIj$dMA5FC3CF|&-T4K1bh1u7Q%<%` zeu#A#?Gb|EX-+CX)*KuVQa*)uAW5uv!h8dpHJ3u5A0zaSB06eu5U%yHn;?ti1}tjB zgt<xdI5^zVbbBWwz5gOPQ_&|5$(|4#BP}1T3~W6n!%=9BkM+@uM5=hm8Rv`1_=~)L zTQA_gi`H}eW9lx7z!`0TJD;6C(!JR7c`m}K+JckJM0yr%{^O1d9K~q?Kka%v5O$3W z_8dE5rNh{XaJW0Kdm8q(?(y9-JmCQ!wGIm`^fcHPE+0w0nc@~{GpU-oH<UJWk5gB+ zi6Vm@GK(*dCshy4i-GnM>~wjr@ckczA9Xh)T5}3VsrJuF>qDap-{XGa2&@e0lUqKf zxG)ilGr<-(!tPz2!39PLYgWhQh8VU%aTVRbHN+P9zu4ZY$Gc?tw0Hhq1?%(@v0@zA zHhpA_5ml=Ss=$4`KJ@WVI|hT!|CEHx=r^f}DbE^^;evJDx$B6dCPdi4rF8N#3Ez(% zs`+Yx+yIjTqZUhh``PY%(wXHZpx#eB4LAs|HL^qfmDRn;rWxI%&mw_kxtH^@k@6lW zp=W216EphtaNf({dCo`9RYwiF0ki>iDxrO5V!n2&fAmEZHFoAhzxkNxXQoc>28vHV zz4<ZPa?)GqS{#8bl-?0Kw+HS%LJH*c<`ADX*NA@hjc^JZcN_&W-gm(}uRA@4_dC?u z+f}_MVkIW^Gn2cZ)wqgqy?Z73ai(=z=L{*OmoOi34984iw!>INA9@F^Wx9u-0EW|I z9%uSzD%M8{p8sLOt%A@{C^N^Hk}`Cad=S>zT;W~j{&Xlx=niZg$Y;v&G0E`VFRcuf zenyU!(d@pYt5PnMS4<}Q%D&>I|B#BxcsTE#uAGqk)P6WRPMyV6s#yc&f|;96-$B%` zTqd6-zgG5>8<14~4i2poA5MU&XMDIHp3@$#$cS#SU>=7v+avsF^)VfyRUyaP0?XYs zEAWotSore_*yK`bO}{yNZaf?%2KKND0@-yqh9p*9ktGn>qmRqZlR1w*7lw0acTF>1 zveLM85-@A7ST>qGIqMrS@hX}4Sf&W`*7Mjfaakhy=&?cKnnbdtzE9V!jK4(EtW4Y! z_jr{|{9zP%e)f~dSDJl3kYuAMXG3rD%bdxwKS<FNL&Y0f^2`$%;!!Qx@x)m1vp2|} zPZZ}Cyv}UymG3<aV}GVa_!%&|(ju$})B&yoVr&-QqQlf%DZM-a!^RB52-@oeshu0+ zyn*S$Enl`#WOIjg3A1plG~fiAdpMDf^PKFsxPpPZmINJD%Z|Dge|DOel+CN#4S$(e zZzIAc!8(NZ6yloynpdgR<L8^x$wzwu{jZ4?EvqEc=JoH<`yN*!f04}|gA1r{_UvK} z^vxdEjffJ@<OpcM5Ow|aV2B?(X?DcY<HtM2Wae=au_S8P=6jUkKo3fi<h<U1s+PUS zP{TL#qv)c0JV`*WzldZBw}6XUXz1&ta>?}W^%L3b!c7rJJZ(8fE-cX~3b9oaO2cbb zv5Ds^{k3DArH?#DGL|Oxe)=9I5h!N!6bu$Q)^h^cw^W%i33;%a8||LpGB^jV9+MKo zR8mp}0V<I$uv^{@_F+{bZijwFdOVvf&YnoJp6%a#!)q8muNS^9kA@}^+q0SZBd{=I zU4YM)o!Fyig4poRWa%&lG^JFA=HlL`sl0Z9%$|bF>ogLt;DrE)#IVvbEB)uU<npu8 zW57?_w_$|)u~!9GUpQviZ2<H#?BMbTVHQjLS^z!IxmbDpb_4F8`1ZHgkK&>p^EpP{ zY$kcG40AfxUEjVc48#^DmyJ^%tZ2ZY3LW9UWOuDo5#j#GRm@1fRk3b?^4;Z1bRGhw zN`|BP-7E?B2kh~sbSuNL;VVRr7#qH3F`0*;V}mUq^?!<Y`ANvP0Mi{8Rz5&Q`7~gj zR<==zu*pR<|4Yz2SCQ3-xPl<=PaY-K>OKQ~i^ijj3ZfOEZw}Dkj{HyA77MK8jp_vP z?h*3X!T8X5;Py%LCM)^1I$pe9N5X5yChx=XKYSMO)Y;C?XWpU(Xi<mLob6@yIBW7` z+-u_qqQAln#K^NFd)x(wevD@88WTjL$u1jFN++UE%WLhyvgN_m^^PB_0!Nh5Z{g)+ z+fuiQNgdk%p)ZFLuey{Dzza*5Y)oXbAuFMKm`}9a9Rk_NErkbS11|Wv$bHZE%46cO zaSIa<R=(gG@V^odRs?;Lb20Sq;<5Z4^78ZL;<FRT&F8-pH;yM?EK3#_jwkKQMx{f} z(7@SHPd*1r1lE(%l4S{zxbiDSwV_wGusaKG&8!siggZ~X_9;2wPEBPO4&40m0d{I^ zj|}6l^H4mr#3zZ8yT91pOa?4Z5X(L#1<MsfR&GKoaXkW;#<@`Dw|m(wc!o29;8~O& zoGL*H<3Zk89^bX}pp8ZGKr|IA);G&jv^a<J&MakL4d4p>w~#<byBZ&XM9G1xTbOgu z7Y6b;yF-bK=I~*8(jxKdt@CaTyA@C|1qHhL&C!r~p5fw%@#LWAzTTIy7`mff-6eVq zn9nenxt~O=INiH_Uq{k6z<f!redIqY28yFoNJ8!Xn$d9UxIToN8~4GI=`?V;6oNaE z<^yXKq9HIe2Pp2<O@(&A(YD7F@h#a{Tg7gH{dG0C+hMew-iscioAG2fYuc_?1~OII z%$zEG3g&sV|8vUuRuq38IBKBJv0V6TeCD<~cG4n{8ig=a$-CLrMxJ{ix>s`^=5Mg) z9E|c~w}U*9YoQ9>dLaqR{Kqd0j(7#0ZLDtiJmZcdhMwL-y1&?W$SY7ZKId0#rEBrU zSVu2U<ICf$l9mqhxy7JQk8`6~OAWv5net*6+2B?sI_X42x8wHHxD*7|??LE$93(?S zc=Zc+(X|1!`Bzycx-ZDumBoD)bi5H>JqrEiduzIq{JAni-0&7jUlngGhPlJ)h8&K= z9ypnQSM-{B?%BQ+?<<Y@SATi;Jvbj9^fcf|sy5HW%Xlc6xK^d0k*}>v#<OXa4o}(Y zeqtdU#;X$xBOpT<&Q3FW?QZl#Fs@Cam;MOygw`&>*kKP-n<oogqPUA%KaUAmetpXw z@M%3Cbuf_^R>x#6d=r`)?wK?Z^K}Yx;_eyTBXmUb?}c!ssu%qOOg`z__$Ilyda9_{ zO>#)zaFlbekiPekS!4#m{k@n%WahgOum!^@SY0>RM=Q+rx^&IIyVB>N-(bRy9!s=q z`->A-lZv(K?k%ADFAy-sSJ%$($4HI{PjRScT&TWS?g%<4Lv9Tv#qYMjF7tNd2+4UV zB^c%adr9$+5u&o0Jo1l`y>%a<vHx`;^wBk>=^r|_wHG&3#XQ4YeeLjM*cwCFl~Grt z+#749y9lY-aKEV?(hTt~qhns;l6sTo1A)h}ymsbQC<fp6WQ$&8MjBtYPz5xKSq$;3 zNZ3X*6vOC^$*elFH|A=M@Lc_Ob!M+d&Jf4H+)u^jRQo%%fw{O5T|(&va&==$)(l@w zN@(=jjtbo9tiae8{udSa<wcUSDbG0+m6PVn=>Q&v(j*>6&^|njq~Sb_rXeiUQh`Sm zv<+EG^HXUH57Q{+VFqpHp^{$YVHQ2d!+6@nLmjQ>p^+X$=sIDMobxSoC#U7nZ9FWb z8+kaIuH_-z+B_UjYj`-3F6QAR>fqs2`WO#q(3w26(aAiVMJMoZ4lUxLo#yj!0X6Y( z5!E1cvP$e<0*Rpm7<qp+L*qHRjG;X^x`Ls>99_xKJGG3LnX<Hnqw5*^Jx4b%^h=Iz zX6RXtZe{2Rj&5h@A&wq)GJF@u>ls?d(Gv_^&(S7^)^Zf$C(vhs@;aYmF*{FS2MzQQ zj$UMF1xGJ2w1lJ03@zm7Rfd{4N`X45isLsJc>qWGv2{F0TNt$mM{hDTn4@hBy|aS# zKs!TQI4WS;(eF9R&OGRs9A%$w(z8H$o!JhM9^(lS4BfxN$#RKg_`f+hnxUIH8qd(R z991xMIY(0&>f~q|L+u=8SH$Tf991&3f}>dsE#at+p@keZGStLTiwylwRUFS_<N+Kl zWN184Ug^;+)}1F<85+RR@eFPAu<B1_=uaG-#Lz1ooyyP)K(Qf!&Dd$S!pT_f2{r`D zVHVj#oL0IQJUA)Frxu}{1Pj|oksE>{{FIeGiqa6I^HZMTl=)c(jgeDI=`2S1dr9Hd zKVpF3Uo-@<&smx8q`;R`gdr%JQJf%U`ACt5pm;x1ijNX)2vYbd5k5-1At=>Pxx1WK zTY)b`{FEC$N~$3!!%z9rN6{IAEPhIpk76_gF$>WiGR{i(`$!f;P@$h`tB;ar2pa9D ztoBg~4MA2vWeKOuA8pW#=af?VIHREbWMtj+&`;_*7Nhxf<Y3((aYG%sT6aN>#RzxX z%1T{7guAYV8}3$2aRgoOWf6p1jREK?N93{VqU%b4>ks+dfxm5&QrEVuCO>cOofx<p z%Ta2rKrF&agw=?yjKsqNn6a9qY#Shg^ZsqAEO=sDe7_bX)Q%K>K=2vhG~fi_FkmlW z2Vm=J^7gjENXF@rSk7`lBp{8D?yt=VTd{^E-1-9L8P^c!Ycai7A`vl`uyQl-6M!3p z?0PLpT(O3neJzazf4rvb$2d^B1wkRe3YZ9(3a|m@09<Rxn12;U`U=?q!dAduK+_uX z#=lC*ob?`Z=U;`PHZ0ThCM;mOX%`v0T~Y9Z5rQ4|Dt0Kg`n9M}6gx?;ONHg#>9eX! zL#!@?J(TQ`g@=C%FzYeQ_=kn;+ny1{5`8yI*b(wFR#M?6(z<<6!nmDS#&CV}BAoG& zY}>--+fkP7mb7$YdcFTl&&JMO^bnxlLq}O|TiC<f`zR}W3wwCG9%ZF(VGo#6$c3G% zv!iBQAy(JB7B}N`_dKZj{ANc84lD=nktBWoRW?^H9wtq%>jx|U0CUkLPgoX^fG`%- z7hPH$<0-oI=&4Xo*(7&f54n_o$@?`8|2G-(#t`w5cS*$?8RlCX*fx%R@`YB{8Bv3K zols}F7sW@UzSHkANyCo9+1&#ibvM0{EN;#r-ZxU5g$E$p%!l8*b~x<Kxn_7y+o{S= zwGTZeiPqYzgZ{DLdEonT6TjndfqfntSSnqpt47|a3G?rC>{_buAtv2~Edwr2;c|~} zEZO^<8GFW{s_2OoDGhMMvCoun6>vKnrOp3<Jp8%~$_+RBn3>j2Rwee49|pOBp4&yv z{5#&sK0uZZ%)#vRW$S*P;jYF&my|a58r%bZizpRFng4<!Dp!JG!FNcdWw@_K!{Od? z&Afrj&3Wi8j2Lw+wrxfK<4P;+TlSewcr{x|Jf<d$sBJxx;1FT=J#Mp=)pyGSH$ZI5 zSV|qsMY_P^b{;S6j8Ek8(H-%+@xZXDjj&mKS>l~KD`;ET!&5|k<7GHzAE6Ipy!Kph z;9fgRZDW@eYRHKl%6L|QA)nd@A{E)CNax)7H-N-!8J$idc1DYN>15E(zM}mZGInQd z=(LwRv{vxT#IdtLjCh$G*{Kixe0>M~sr5wM<q`|lljnE!ccMQhH*8@a3An@96W4IN zfz|H;-L($A*ChF6jys>avGbsfOEBD%ac{@Lz7)g}k(>rU0}}ScMP*(kXtDzK^*lVr z{tpkMS!c6bKOJA&SxRZQ-O^}@`F|5!cbn&bSW54_i9F~E6x#819rKmYr$O*wb8Ex^ zS!p_A<+x9Eg--t)z1KRj;LU#GkagtMHxtA+H00=;eHFU*+3Fd}T#UFx%f;p3ReUo^ zb8*3onewvTS?RCC;&$ukiwOMfRsm%y!-M;FzFp?slG=FYo}ao~?Yy6*Dg>&$_rj;J znXe~+yERXTbFgrBxrJTYc`cG}`AX@8-q;#3sd`yNmI1AKA%RQ6t5+aGufe(HvCLAR z(ih@4CKTV}6X`X_teyZVE5!3FJ~Bto;Wr_Ug;!4nx8YZNk5FB0F5{}Uf`G62za@R& zdMxmVXgGY=ywyMOQHC6SOA$DNAy?iS%6t*M-X0*HZYIXJ%UH1X?Wy8U6}k0xy0}C| zQuln#hyi;qvf%5zAF&s9?i(7-E@xnPLId`tp*+K7=Z9YY8w2F#zT9MgVw9(bCAzld z;Rz?U4uPL*{Qmy&#Vc8-vI9w44%Dsm=1Th*E1kTOytF?xtOsX^XAChbN#p*cnP1uY zEszLX6Vns@klyJb`x<)(7kd6U!vDQLnC|zp{iBnOnt}eeJ6K$mOlBVF*JIc{wvqfz z<`15-3#iY7_C+$rj=Ah8t57lM{tES?tuW~CRb&fV@s|Uip7cUzGF=GN`{iE_dcqGz z7o1TbXMX7}q%|)X=lhu@)XHdBU{W#eP}kjja?d_4!X?<N@Bm*G>@j=?gA2rNIi30f zcldpd{C(9GlF2*sQpur%QR3Po<lMnX(RqYiJD9=`Jz`1ULx$vNbTPHT6nEEbfYSi; zHJz3t>KFg^6~uX{pZrT4!u*eekCsEz;x<F0`42ukwTF?xqxD_F_gBm7u_!<GPG9lu zxA&}mCt3_#3oi>h?hrQ|BBu^3#D#~5_wZ4E@DW2Aj!Y0ANFuRE|91MZiISa19}m0? zi6KevT^6@)A<}#N{6kgmKmSPSx&uYBgAaZVk|eH7Byo=`_FSnilm*U~<HaGz=krsL zf#jp(i^P~cWOT!W;^DW+>kT8tB{Ae?!-~G&y#+}+atRqp_P`p+`HkzyW<+4wN&6T= zAGDFW6VtWtRx)9PA(ZomMabT65gq}&59sl_MMwZl2dqWf>A(Jwq3;(syWm4oe8&Y3 zvZDtzF6|H$Pow98cNF*fw-@6RKMeH^%roYKhY!Rz+L&}*taJ$8DfgcJ-?_Q|WcQLL z;QBZ2*GJmq0g%m9yusXXlP|d^JFy7jTiv@jNca@$noXO72z}D(Y4Ch{or`Nd&L?pN zil0yZ+t0VMlaH!_dO!BC?}KZN+~b2wLHzW_6CQqabY`IITbcc&l|F}WQ#&PD2}zD8 z^G+s&4Y`+-b_v;dGBM>PQ20lH$JrY`mFzp;<6kn+L$8ppPnvUC#lz`FXGewEJ-eEF zb?r%??M$NmfO;?g%`rtby%aOD!zO@j<Oppo2(^@j*J!cS!|T5TJ^L>E^pxFF8d6F> zWdxLIPbsC1z;RSn%FkobsRwqfUY9iJB_9XpUj7&EXa0-(ad7u0Q`U7ERP>iOhW+IY z&40boZwFbuE>-+3k{n#86?bhWE$fC3Y==gKLDg})|AT2v47r)(YLa282$~4H0PBQ@ z{INb;95REbUY^dJK<i%Ch(9bMA0a?2=oO8aI-M9_$s<p{@@f<=I9cq$t_Ap6fLNG+ za>{;sg-ptfB@Odbu~^L{SH4_ynWb_^xGQ&%BcdwT(?Fk*NVuJS@1V9%NWT9e%#D96 z<eCG1KCU@dAnMl~Ll=_Q?ehYlAV`uUDu5}3Y)7)#?fpFuIATSyPdl0a<PFTtH#NP< zQwtITgKwh(uPj&@cwZZkAx|v`=O>};lWHsd{d3UvJvA*b_ZE=2g{#Hl7s;Coxi+}6 zkZXf}PxI5z;-?kR0(<5?{fsOys1?k=FRDZ-lNN6im;X)zo|z<O{7xQvrXcYB{~+<- z&nU$Do8-(h4~u0tiPotQ14_w6=Zq=S#``DxKMz}kf4+l#7hveS7U5~Y^MEse6G-c@ z$V<y0ljXkoGq0G>5<WdTrb;+YIbFu?l0(DqE{<&M8=uAup%8a`u)6+1Z*Th5;~f%C zzk8f4YgANXD$XnMurK31zhb8OrI26hXvKk&%&v7Am6Bb_a^!c+(m%9EjI_%~I(n5< z-So;!s($wdl;B5mqI^ZULW6P1+8aeD+$(DG<D`32?1*8YFul;fGld&_f0<{oG{Mg! zcJk0|k72%j?rSac0+ZQIeeO?V`X>fV_%htn$ZCJfrzZlkU^jK{oN&{uLhjI1b){pm z-mp*hSUl$McPc+_%1Fj5wRt*r$#N33BfALrC5mlOe}(+<c8?V#@l<pbxO^UH#^}G@ z7HunJbtwTm`-Gf1nlDv#qu(+A0~S;{GwdFM8USt83RVa9+glf1k|n&>0J10R2&->R zYk2h`tmpVR#}fz&*?cNdEL=hAPsOqeotIAaO=cBqEx?Gx;V6d{?mB7lx;%?7dK}HD zVz`GWK8S)(UHgF|Htu<pgkRHYhH8JPHk23PmhsD{b3R~SgYWvFf7hs4Xd$XqyqsMA z;CM&|M5S()hWzV8OWf?L4*R_qFdXl~ypCpGU|BH{Pp3sKgwYN|m!=}s|3d~bRV4Rx zV(1@_;LQ~F6CoQPA&;CcV#eLU)A6FQnVdhJB)++h+&Zn#dV=*1HgKMN+;e-T8?-|- zoy=+MJ1bNgA7|iaL-?(bP^b?4M$NZm#+iQI*`cgAwT!EiKVKv3&h+oeNXOd5TNjxt z>12A)j1+SAOnT&#E>ye)o#1GP+K^Xup$ncN-9IuryV^SS$Ige*42;hb^n3}fWkEg+ zm+dpFOeOT^BD{`cMZ;bD1j9G>V*D<H{z(zPcY~F43oFtPWW&?Zm1?2L>Nw@3V*&qY z7}r$nYJ5sOl=j??G>VN5&G;A8SN&6Ev>Q9*MhnN#XCEf#KT-_UK8wDnW5<bj<x96x z@B22rS5+X5<4L}}?eDpj9vp&`erJb@>LOBlc0k(MZFn`H!AiYYiT$;G02^(#*WTk# zE2FcPk!@!O4$Sgdc;T+1n-YB*hK6t8aC>qza?v~5?9d$TboFR*>+Fy|tWf$bTj2TK z@M?{(mBe^%a0pM^xRlH}ml(tF>tEn7sg(WEsF~nHE?4d+|2mf_8b_1Ha}IIOcf|a0 zthf#zKYpxISRuQZKNAt)XO=KW`p^S2Sam`%40Oi~vg2d5xb6)3_TwQ*e%|rmReyE; zImR#oye?gkqZ=JGgADnkU(tpulElxou)2m<=b-{@vDx`MT7_bCX!fm(>_^-RU&e2< z^*!#MaFbmp8Av~z&I)BmbM#Z7zC9n={7Isid4<$}G8oh3=TF{BI)KH{zur6m6c>&l zXJM1DAiD8eR+IM_J`a*Pmo)G}l~?tq&jKxP4cd%R7wtXpJ7C!RS>UIso5DL2*wiQa zJp#B{_#PL!63d*wh_<O*BquYY<=%dFG}@12e36#pRlgZi>@w{}8Fz86JR?}d{&IIB zIeb1L!^p?kex(uyng0~We|>ZL3s^`yuW!cC#;N4%PvgWlrjpyA=B4c%iIUl+C|t<g z!tu@vnXfdXR16d@WEM1&%Fl+1gU*q4pQ&S@x&C?~GpdB`LyPG*m&tdZ4HDlzOTs@- z#k^+UnL-s$=0Anu#ia~3Kx5#d^Z(ev!F(pZ>|F7VJqc;JKJgn2H!gY(b~!i{cl%39 z=+X-Q1rOU?ci6wgcmY3qcAI%v*a>1C>yid^h0oT)Pe|N_V~pLTAfH{hFJMKpWl!?O zMwzn>F8|;lnGg-=53m5n0V)BD0P6vJ0UrRG0quaExRBEypam2FDgbi;KX;W0mjGLV z?*<$Od<M7yxC@BDw;25aIe=1tJp^9-Wk7ZV>H+5fmjFKkS^?d02+<#)1Q-Eh01p6c zfcbzWfHi=vfOi0=0T%&30@?swyU2tTz#u>lpa3utFc+`_@XszznQ#!uCx9z}Hb76b zKmkw!%z!a~sQ^2m8n7AgHsCbiHh}%DK<obd?^pu-TDcbCyY=wSp2E)MNlPb7^XC>} zZX)8V`u0ie_~B28u295SB!Cw`1BtOKOxkn&D>2Mz{TWVXklTK;@Lm08gk2Gzh%f@7 zfUpOG%{MFp418fCLhOSC0U_KG!b&ghlOePqgdavY_alC34&hXU%n1;I5LZ-$bJs0G z5<)pb1;WkLA|&IBN@4dki?9HMl?b0gXh*mZ;Y5V(2(1WNy_MJYJbwLb*F8_&RLlBJ zc>JMhvt}ynvlY|l&V0x|Q!!`m?CCS-%~Lq$J@WX&iUIv*%^M&P89rTJ*&0P2YW-o4 z?N&cg0sG2|I~W1TZd^%TiP(2eQgg$%`ET^w+iiXDuUmr2&OZ!Y6&kb#m+e}fTb10K zMBI)R(__h~+exxiQiW$fvix?Em}Vi}ZYKon8R!f9r}Iz@${=0u#0F0q42Sw)GU!fX zz;cx@WwL=(Av!eI7u|CodE-uE_~qdi0h*n)PxxuLg}iwuru*jnd$b+-<nEnVao8x9 zml#^i@(6!voQ3i8ziHzvB>Z-a7&D%8#7<*@%tf}eCy8V}V;8Sgqo~;6e>`Uqc0I?? zZq4w~Uj^WgNOxia&aCrQYSha-lZo4Skog*aea>F;!R@38jhwx+vQ7R<cU1n@CSN+s zb1?870r%wg879Nc=ul{cj=%Mw<$a3|{{Oq4DE$9QPmrV1@wKaE7rW%kO6+s-f8>Fe zWwJ+uy4J&W&;IJQ2fE5-Npe}_x<L`LyH4Hr79k6u1Y`ix0I2{4AQ}(>kOSJUT7(t= z1zZAL1bhZK2RIFA0-ONU0}cZY0`>xS19kwm0@edo0+s<7|B|Z~r?3FX9KZ~~B!Cr= z2e1H)03{$E5COn1mk1rwBS%~Sw0(z)0Vtpua1qc1I0)DdSOZuBm;;yu7!Mc?$O9O^ zn~Bx~$pEAR;sKF>P(UD{{R+;}02cwL0rddJzZc<lz<NM6zz&!S7!Ak+7y)Sj0dV76 z%ewb^$)bqzV&Q%46YszN{lAZ|8`MjdqGE@{9e?Y8#c;J+gw3~5DDqz!i^B~6--h3y zH}SWmD@nd+Ubm#HEbu^mFPTDgDqHXtfW7?+xD-KzY4Fa!A;1_2C@DVu{|-+7Um*Au z;Q!wNtGP@NbUbq)oqv{3G3ftyIIym_?270dCle@Ew5yN^_SZ6ACZr*^j(9YFvw{6Q zQ%AhM0?RtSxaf$d%|Lp;d+~E_#Ie$Kq%U{{{K&f_-n<EMl+zIx3tv5OJ4V*k>TCwJ zt8$p~0p&tvt#YsOsPeq>jm)m9zN+b}d8#F<H&pvoCspmL9_j?ORy|x@uAZ;1Rc}={ zs!ywbRtID$vP@Z{vmVHrpXJP2o3$b9wXC<Zj%A(9x|nq>%bS&}8K+sQ@oHK%eY7U+ zOWLj454G*upzP4><ZM%RN%nGQ_O9%s*#SDOZmDjaZjio6U!`BDzop-o^KQ<WoDqhx zh8c$0hLuLW$!2=owAggmblh~w<TbUK0?m=;IP)O0);!$&u=y$T^X3ia*UYEQlDT_s zpIk$3RW4el5QH@d@|73V&t|n|{h5`d>92W9b4oKTdtdehU7p@~LVsHSwf?$Zmh)iF ztA;*Cqj99M)Hu!fxY2HW&iI1yWn;7Pd*d(0fu<oQjmcozZQ9Q((Pa9_^f{{Xo#{u@ zuc(e>3NeS9`<VNhGt8OhZ1XVlDDwpKL*`2JeDl-hrKsLU^H%d7GisxR8bdHg`A_9B zr?Oc&JhL)$StiLmmH9<xfGSdztV&bqRHIa5Rj;bvQms<|L%m&nP5qPlwpu`EmuD@? zdJ$cGIZLGp)kbPFv?H{ewC`&_)?UgE)5Ykf=w|9>>jL$|^b_?>IbY=TGAIm#3_8Ow z!~F)QVYy+AVW;7M;RD0ZhJMBejLyf5UmI^5hnwCueQYW)k2B9SKWScVe#QJx^C9ym z=AX^C%zv7LbGzln<_<%bKArnw?pL{jMG&C!g+a=>%9BcW=7G$is*$P(R7BOF`apGF zbxB24KdWx3{!|64d#a<=3Uz<AO084ptH-J*sHdoxs$WtcR=2CgtPE$CDod9&CaW@Q zMOLt8phm5ER5MR=SaV!+QS-Itdx*||G;JD56Qm78TVu4z+Cf^a)}Xa$3$&xOR_z4s zgW9Ru+1eVdM@zKpwVSouv^%wLYY%Cgv}d%RYA-?D#O&DYFS5VMrrEz__s~73dqKBF zw?p@qu0eN3=ln$Xv#w3oT_2-Y>&^NCeW`wu{xSV)`rZ2Tkc(UTyZV7SALd-nY01fk zFqRwahDC;F4J!>N3?Cb=L0+<qON_gWpBt|jsj;U?XEH-jcbfK^PMIRivF3d9WQ_i+ z=CkH+&EdJKTwU(8+(&bt%-xlHAh#j6Jy&pAVJt!zf|Na!W0Yme>B>dQjmp=QXOv$m zzfu0AY|8Xzwr0ktY^pag5$jc-sIK#lRj3E3E$Z>=2h`IsAzx8{sUCt}d{J{yb3hxR z>!Ulbm-O8XDTafF>xRcospcW(E9PI#wp<6B6;ql15`<C8@k-}Sr7Ckq=8DW_)qM3b z^~kInSzR?s%}ULyns+p3b?@uT4I2zi$mEO(Z_E*dtqA%l2Py|EGnE>pUTK2vn6Gpx z>y$^7zbN};?o+*>-mDIXPIxeDQ`YNQd$LYsorNYCpqZdqt=XX2i(w7dW@~$9M`u5( zTj<oitvjsyQzzH=)c4g7(f^zy<%Af*3_T6ehH6ObYle>uvBv&JjWNggpm8du<m<-$ z#^c5hjGq`U8$(RtrdZPnb6)P7xyN(A&HatFZVA3lM-Z%xR!&qdP!_3YWB!~~|Dygw z9g(HUnwS-&iPk1)hiZ$o4{4nbLqvvU7iGWC1tdaOuKSPfxZapEC8sUtJM-`6kldKu z^xWdy>FB!Wb6?K=ckYqgv%aEMVk3ZA5Txvb{&*wvQs&LfJD9+7Rj4XLrB_W+Eml>l zmZ?^#R;t#h)~hzCHmiPAwWw~Y+MrLTsUJgszvxu2QLk5TP#;nMppJnuP0dQn%E(e? zWnoqrv!27OSedmZYkgLO=C(%A25Ea~S>02#D%Aa9$o+opyV~Eix3#kD;OwyMp4p1* zrP<4~S7t}+2I#VNW?g}9zwTY#3Eii<R^1()pzouft#|03))W1H{V{!`-g!ZPN&l1n zcl{lGP)@g;sGPW*emO&O^f~675jn*<lXBMOyao9m3!OO0__pz&@r3b?v6tzeCK<HZ z3G?3EuUVIG7KA(mTIJTvXH;X<PW4*#dG+t=)foF5S(T8c-I{Bf7R?*lqU=YrS7rYW zMWxo=Z@AC+sngWke4lwbQ;WL=VKjmQ<=c?-cT`7J$FcByp!!HPRy|4WP!o*dE%nl@ z@3Z8Z6wPqWOwBIMEzK6~4y_d{$dmeMISX?(<^0R!WqH*L!ZHN?m5(UbDi0_(tGa2z zHN7-_G;x|FO<&CbjX^U;GeuLaalWVdMDwGjt9CL}$R_PO+8?x$*-4Ox2eW5nZ_VDH z{h#bUx>Vh0-7`=eCv*q(NAw@%+{jsI*kbtH@V((*#+}AP5XaArktT&{DAtE-CKI&i zQY=MVAuGZ;uI_~a${|X(a=$V#^GxPXsw{Pmx>#MIUaVd_7)$0k&3VmNns1@$v|6DV zsueY#m^mqPDki2a)1J8?b4TXx%)REr=5<$|kfqH)a*H6~3}3iuZZj{--JaW&OEC;G z-hXmsnzB$iLupekQ*Ku_DXCJP8JVffG-ggjPU|zbX4YpmX9_BX%A%U2nyOlaMSe3h zOS4LVqPD1~s%NQ%h;{Sl$}aYjn?ubJSZAZn@n(fN)tqL|SohF8S+B@BX1jTTd69XE zx!SzUyu!TFyvBTB)jZkkV3>);FD;bCgy~+IF=pN8-|ahrdY)L<f1zyLx}^(cEB}k0 O__RzJ{1aFN;r{`h?oBoT diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/RECORD deleted file mode 100644 index e4db3106..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/RECORD +++ /dev/null @@ -1,620 +0,0 @@ -../../Scripts/pip.exe,sha256=_2SMG4FIQFG4OIAmAOHkMahm8echBHb70bpPZ681eKI,102731 -../../Scripts/pip3.7.exe,sha256=_2SMG4FIQFG4OIAmAOHkMahm8echBHb70bpPZ681eKI,102731 -../../Scripts/pip3.exe,sha256=_2SMG4FIQFG4OIAmAOHkMahm8echBHb70bpPZ681eKI,102731 -pip-19.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-19.0.3.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 -pip-19.0.3.dist-info/METADATA,sha256=UFkQ3jmvF9jPeZVMc6IScYpjOYGZx-05u0kqWTl8MnY,2892 -pip-19.0.3.dist-info/RECORD,, -pip-19.0.3.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -pip-19.0.3.dist-info/entry_points.txt,sha256=S_zfxY25QtQDVY1BiLAmOKSkkI5llzCKPLiYOSEupsY,98 -pip-19.0.3.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__init__.py,sha256=_0sfqHzmBOW_4x3-R2jxH4luuX5Ffe32B7BaisaO2XI,23 -pip/__main__.py,sha256=L3IHqBeasELUHvwy5CT_izVEMhM12tve289qut49DvU,623 -pip/__pycache__/__init__.cpython-37.pyc,, -pip/__pycache__/__main__.cpython-37.pyc,, -pip/_internal/__init__.py,sha256=b0jSFCCViGhB1RWni35_NMkH3Y-mbZrV648DGMagDjs,2869 -pip/_internal/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/__pycache__/build_env.cpython-37.pyc,, -pip/_internal/__pycache__/cache.cpython-37.pyc,, -pip/_internal/__pycache__/configuration.cpython-37.pyc,, -pip/_internal/__pycache__/download.cpython-37.pyc,, -pip/_internal/__pycache__/exceptions.cpython-37.pyc,, -pip/_internal/__pycache__/index.cpython-37.pyc,, -pip/_internal/__pycache__/locations.cpython-37.pyc,, -pip/_internal/__pycache__/pep425tags.cpython-37.pyc,, -pip/_internal/__pycache__/pyproject.cpython-37.pyc,, -pip/_internal/__pycache__/resolve.cpython-37.pyc,, -pip/_internal/__pycache__/wheel.cpython-37.pyc,, -pip/_internal/build_env.py,sha256=M6gja0toc36njmTGewkXMx0A-ZiaG6kL3yIw-N8Eu9M,7439 -pip/_internal/cache.py,sha256=WfnnzjrXNi5-i1Ahy6UZWLs2LxSKAY5Sswri3z-pf68,7684 -pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 -pip/_internal/cli/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc,, -pip/_internal/cli/__pycache__/base_command.cpython-37.pyc,, -pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc,, -pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc,, -pip/_internal/cli/__pycache__/parser.cpython-37.pyc,, -pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc,, -pip/_internal/cli/autocompletion.py,sha256=ptvsMdGjq42pzoY4skABVF43u2xAtLJlXAulPi-A10Y,6083 -pip/_internal/cli/base_command.py,sha256=YymFGRVq-Z0RcOyV5YzcRDANLeV19Em4XkipwBFqkEU,12725 -pip/_internal/cli/cmdoptions.py,sha256=pZQFNv-E7S0k4IYm6MW127FfLO0RP4yKkfyfb3V_x90,23885 -pip/_internal/cli/main_parser.py,sha256=ReG-nZ95-7WxZJLY1wrwknPGbECOd-zkUnHiShKr5ZY,3016 -pip/_internal/cli/parser.py,sha256=VZKUKJPbU6I2cHPLDOikin-aCx7OvLcZ3fzYp3xytd8,9378 -pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 -pip/_internal/commands/__init__.py,sha256=CQAzhVx9ViPtqLNUvAeqnKj5iWfFEcqMx5RlZWjJ30c,2251 -pip/_internal/commands/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/commands/__pycache__/check.cpython-37.pyc,, -pip/_internal/commands/__pycache__/completion.cpython-37.pyc,, -pip/_internal/commands/__pycache__/configuration.cpython-37.pyc,, -pip/_internal/commands/__pycache__/download.cpython-37.pyc,, -pip/_internal/commands/__pycache__/freeze.cpython-37.pyc,, -pip/_internal/commands/__pycache__/hash.cpython-37.pyc,, -pip/_internal/commands/__pycache__/help.cpython-37.pyc,, -pip/_internal/commands/__pycache__/install.cpython-37.pyc,, -pip/_internal/commands/__pycache__/list.cpython-37.pyc,, -pip/_internal/commands/__pycache__/search.cpython-37.pyc,, -pip/_internal/commands/__pycache__/show.cpython-37.pyc,, -pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc,, -pip/_internal/commands/__pycache__/wheel.cpython-37.pyc,, -pip/_internal/commands/check.py,sha256=liigNVif0iz2mBfhvsajrLZT5zM5KIvgmKvhAW91EzA,1430 -pip/_internal/commands/completion.py,sha256=hqvCvoxsIHjysiD7olHKTqK2lzE1_lS6LWn69kN5qyI,2929 -pip/_internal/commands/configuration.py,sha256=265HWuUxPggCNcIeWHA3p-LDDiRVnexwFgwmHGgWOHY,7125 -pip/_internal/commands/download.py,sha256=XPe3Kuj9iZfXwOiJq70mYVYNZD5lJCLnGT_C61cOsKw,6623 -pip/_internal/commands/freeze.py,sha256=VvS3G0wrm_9BH3B7Ex5msLL_1UQTtCq5G8dDI63Iemo,3259 -pip/_internal/commands/hash.py,sha256=K1JycsD-rpjqrRcL_ijacY9UKmI82pQcLYq4kCM4Pv0,1681 -pip/_internal/commands/help.py,sha256=MwBhPJpW1Dt3GfJV3V8V6kgAy_pXT0jGrZJB1wCTW-E,1090 -pip/_internal/commands/install.py,sha256=OqLybBwThV0IRq0xwnlsENWBB9-hw8Dcv5pUPg5QtKw,22580 -pip/_internal/commands/list.py,sha256=cbJEvxkBlFfSjBalQrbTqb_KFR6eLMo7Mp_JXttPyQI,10150 -pip/_internal/commands/search.py,sha256=sLZ9icKMEEGekHvzRRZMiTd1zCFIZeDptyyU1mQCYzk,4728 -pip/_internal/commands/show.py,sha256=9EVh86vY0NZdlhT-wsuV-zq_MAV6qqV4S1Akn3wkUuw,6289 -pip/_internal/commands/uninstall.py,sha256=h0gfPF5jylDESx_IHgF6bZME7QAEOHzQHdn65GP-jrE,2963 -pip/_internal/commands/wheel.py,sha256=7MNPZqK9WWxZC3TgzvMBH-RPRlOFLpwq927lkzUiUjI,7167 -pip/_internal/configuration.py,sha256=KMgG3ufFrUKX_QESi2cMVvFi47tl845Bg1ZkNthlWik,13243 -pip/_internal/download.py,sha256=KoQvMd0OfeMUn-Wi_v2e99jxkue_zKkxFBHiiQDS3Z0,34696 -pip/_internal/exceptions.py,sha256=bRSURPPUs2wMBb2TillETj6EBTDgpp4fWp5CcKZe3K0,9145 -pip/_internal/index.py,sha256=sYBuVbKkl11YqinxBIxro8_tx0GQ_5n4gbx9EpS3xN0,37840 -pip/_internal/locations.py,sha256=1JWExRYwqJq6slzprUVq0u2nxTzmGem-8L1CxU0tdVs,6944 -pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 -pip/_internal/models/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/models/__pycache__/candidate.cpython-37.pyc,, -pip/_internal/models/__pycache__/format_control.cpython-37.pyc,, -pip/_internal/models/__pycache__/index.cpython-37.pyc,, -pip/_internal/models/__pycache__/link.cpython-37.pyc,, -pip/_internal/models/candidate.py,sha256=avICbDUtLA5zIwX_Xy4z3-Qg6tf_ysZzz30sdFbVnys,1094 -pip/_internal/models/format_control.py,sha256=p0L8487xgkUrGyvULKCBQcJW0uZbWeP3ZXE_eGBGfe8,2264 -pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060 -pip/_internal/models/link.py,sha256=mQu9rcPjaRGSqsboFLAdgMRT6B6iatiiCoToNHv4zS4,4817 -pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/operations/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/operations/__pycache__/check.cpython-37.pyc,, -pip/_internal/operations/__pycache__/freeze.cpython-37.pyc,, -pip/_internal/operations/__pycache__/prepare.cpython-37.pyc,, -pip/_internal/operations/check.py,sha256=KND1M5Bh_zMBP9hvvSovZPwolTxH3sWGiHD6hABegs8,5137 -pip/_internal/operations/freeze.py,sha256=To8UFKGiZIOfA87Y1S-7HVn_-cKjRYXJ4X45maMWA-c,9321 -pip/_internal/operations/prepare.py,sha256=l2SemS5Z_KYB0PQ7y2E12Yl28-rfegcmSRbqvElsQpI,16740 -pip/_internal/pep425tags.py,sha256=t0VKiMvgd1VYcTdJe4H_6_VYeCB6PMDwnqZrE69FYH0,13142 -pip/_internal/pyproject.py,sha256=nBCBkD95mF2BoYTOjyfS0zccTjYPgIUWM-NB9pOBgbM,6478 -pip/_internal/req/__init__.py,sha256=gneiGyc-U5QXYi2XN0q9QzcQ2TK3R7vwQ4AzTmk9rIk,2343 -pip/_internal/req/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/req/__pycache__/constructors.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_file.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_install.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_set.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc,, -pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc,, -pip/_internal/req/constructors.py,sha256=bMUEADysJNU7rnXK_k4OqpNXGMWFkE6b3JjqFULf0GU,11378 -pip/_internal/req/req_file.py,sha256=Onq9xqes1R1qptlkAUOhlvGO2JZLvVvOBA7aO72YIfc,13730 -pip/_internal/req/req_install.py,sha256=BXrpciRx6_Ah1OfeYHQt_8BKUgpRpgRPqOT0LobMsD8,39955 -pip/_internal/req/req_set.py,sha256=dwaxSEGvtFV4G6uW1dNLbfpV9xbPBBdDbHslR1FN7jc,8064 -pip/_internal/req/req_tracker.py,sha256=aAvF76NrFVc0SmOtj3Ee570i9g5yJbxv0uJsBxumbG8,2905 -pip/_internal/req/req_uninstall.py,sha256=OaIJ6Hdo-LJ27LU2cAPWzHfbl_3iobsGzay9wDlfLpk,21458 -pip/_internal/resolve.py,sha256=whoi0DJIk0B-j_W6wLkosFwcMKCImanHnpZKeYd-X9U,15226 -pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_internal/utils/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc,, -pip/_internal/utils/__pycache__/compat.cpython-37.pyc,, -pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc,, -pip/_internal/utils/__pycache__/encoding.cpython-37.pyc,, -pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc,, -pip/_internal/utils/__pycache__/glibc.cpython-37.pyc,, -pip/_internal/utils/__pycache__/hashes.cpython-37.pyc,, -pip/_internal/utils/__pycache__/logging.cpython-37.pyc,, -pip/_internal/utils/__pycache__/misc.cpython-37.pyc,, -pip/_internal/utils/__pycache__/models.cpython-37.pyc,, -pip/_internal/utils/__pycache__/outdated.cpython-37.pyc,, -pip/_internal/utils/__pycache__/packaging.cpython-37.pyc,, -pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc,, -pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc,, -pip/_internal/utils/__pycache__/typing.cpython-37.pyc,, -pip/_internal/utils/__pycache__/ui.cpython-37.pyc,, -pip/_internal/utils/appdirs.py,sha256=d_iHU6K5MlL2Dq82QVtZgFXpyfrrDuDwuCmiI5H9tQ0,9435 -pip/_internal/utils/compat.py,sha256=AEVxz_VJCAVl2HoW0s3H07QXPG_xj_HfMGVws-mB9n8,8565 -pip/_internal/utils/deprecation.py,sha256=MF43y-bB3N2foClmZUdfBtUsfAOz8Bdck1EzRy5RVe4,3044 -pip/_internal/utils/encoding.py,sha256=jsXgq7MlYmX_fB9yqzC54H2SpPfQbzYfMXrx8PT15R4,1225 -pip/_internal/utils/filesystem.py,sha256=ojaIDvOFOtkpKme5se6X2N8ARmQxu8cxvaaI-NFqVtk,990 -pip/_internal/utils/glibc.py,sha256=lxM6vJc-nUhUX3Dc1UOFlNBdjCylo-9Ta6c536uyvSA,3296 -pip/_internal/utils/hashes.py,sha256=FMYKr_y6NAalGcjOkN5dgM91vVhm3J-hCAc70SCQPO8,3569 -pip/_internal/utils/logging.py,sha256=VjAGhQKvmuN3tUplwamHGVMQfZoBefGI7GtvlQDLW2g,9719 -pip/_internal/utils/misc.py,sha256=-4KcZiJ8ErnLzOZDYm6bCj-KwB-MbxJZbnTDxqT3eF4,33547 -pip/_internal/utils/models.py,sha256=DQYZSRhjvSdDTAaJLLCpDtxAn1S_-v_8nlNjv4T2jwY,1042 -pip/_internal/utils/outdated.py,sha256=vnSpakXMU3lFiFxyX3stWzTyu2OnWGG8KA2rdOlcrBY,5974 -pip/_internal/utils/packaging.py,sha256=cDVTZVp3eR2MQX45DYlkzGyHP6zcF45ujm5oCAoA230,2785 -pip/_internal/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 -pip/_internal/utils/temp_dir.py,sha256=0Xq5ZlOd2OOeHwKM6hGy66gnMGAbyhio7DtjLHd7DFg,5339 -pip/_internal/utils/typing.py,sha256=ztYtZAcqjCYDwP-WlF6EiAAskAsZBMMXtuqvfgZIlgQ,1139 -pip/_internal/utils/ui.py,sha256=l4CEswlh8fWvISW4-RUtlXtw2hFvko08OZBYCWBTxSw,14256 -pip/_internal/vcs/__init__.py,sha256=O1rQ5XoDr4r38kKktwuCL3SNU2h0WGjB-lVHjPuY-pw,17278 -pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/git.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc,, -pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc,, -pip/_internal/vcs/bazaar.py,sha256=AqsBYeXjl5Zw8IaoIVI8WStDE6_UqZ1RTfvVH5qZkG4,3670 -pip/_internal/vcs/git.py,sha256=zO-_jOa7baD_Y6y_zDFQVhYSvc1jgnDEA307y9LATAA,13407 -pip/_internal/vcs/mercurial.py,sha256=aAxoCGfLjHcxZtN7FSvFL28MwLOUL0dZzUssZ0IU__g,3447 -pip/_internal/vcs/subversion.py,sha256=hxFLX0Ncdth7dY7excIdFo6UGQrjuZ6KIeIL3jqr-3o,7081 -pip/_internal/wheel.py,sha256=110d-8C4sg_RmZHw-bVAPvHiAiF9TAhZJXo9tvN1PIk,41001 -pip/_vendor/__init__.py,sha256=vsMCQHIwFuzqN63uGhBNE0zimx6rlZl3SC-m7YHmjG0,4779 -pip/_vendor/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/__pycache__/appdirs.cpython-37.pyc,, -pip/_vendor/__pycache__/distro.cpython-37.pyc,, -pip/_vendor/__pycache__/ipaddress.cpython-37.pyc,, -pip/_vendor/__pycache__/pyparsing.cpython-37.pyc,, -pip/_vendor/__pycache__/retrying.cpython-37.pyc,, -pip/_vendor/__pycache__/six.cpython-37.pyc,, -pip/_vendor/appdirs.py,sha256=BENKsvcA08IpccD9345-rMrg3aXWFA1q6BFEglnHg6I,24547 -pip/_vendor/cachecontrol/__init__.py,sha256=6cRPchVqkAkeUtYTSW8qCetjSqJo-GxP-n4VMVDbvmc,302 -pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc,, -pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc,, -pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 -pip/_vendor/cachecontrol/adapter.py,sha256=eBGAtVNRZgtl_Kj5JV54miqL9YND-D0JZPahwY8kFtY,4863 -pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 -pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 -pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc,, -pip/_vendor/cachecontrol/caches/file_cache.py,sha256=8vrSzzGcdfEfICago1uSFbkumNJMGLbCdEkXsmUIExw,4177 -pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 -pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 -pip/_vendor/cachecontrol/controller.py,sha256=U7g-YwizQ2O5NRgK_MZreF1ntM4E49C3PuF3od-Vwz4,13698 -pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 -pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 -pip/_vendor/cachecontrol/serialize.py,sha256=GebE34fgToyWwAsRPguh8hEPN6CqoG-5hRMXRsjVABQ,6954 -pip/_vendor/cachecontrol/wrapper.py,sha256=sfr9YHWx-5TwNz1H5rT6QOo8ggII6v3vbEDjQFwR6wc,671 -pip/_vendor/certifi/__init__.py,sha256=timLpLv3BNcGLLEz2s6gyA34hOhMb4AIPBz0zxOxna8,52 -pip/_vendor/certifi/__main__.py,sha256=NaCn6WtWME-zzVWQ2j4zFyl8cY4knDa9CwtHNIeFPhM,53 -pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc,, -pip/_vendor/certifi/__pycache__/core.cpython-37.pyc,, -pip/_vendor/certifi/cacert.pem,sha256=zGy4Y1gu9Zy-6CGvg3apdC7kXMm3f1ELolJwNDnBRv0,275834 -pip/_vendor/certifi/core.py,sha256=K_VfM6GwSemTFisUgFWyk__w1m9pCGFRF5zTzO5bGv0,288 -pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 -pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc,, -pip/_vendor/chardet/__pycache__/version.cpython-37.pyc,, -pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 -pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc,, -pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 -pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 -pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 -pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 -pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 -pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 -pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 -pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 -pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 -pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 -pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 -pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 -pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 -pip/_vendor/colorama/__init__.py,sha256=lJdY6COz9uM_pXwuk9oLr0fp8H8q2RrUqN16GKabvq4,239 -pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc,, -pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc,, -pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 -pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462 -pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 -pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 -pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 -pip/_vendor/distlib/__init__.py,sha256=7uthK6m96pTekk8hjlT-MybcwYmmxwP8gEOxXVg1f2s,581 -pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/database.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/index.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/util.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/version.cpython-37.pyc,, -pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 -pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc,, -pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc,, -pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 -pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647 -pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 -pip/_vendor/distlib/_backport/sysconfig.py,sha256=JdJ9ztRy4Hc-b5-VS74x3nUtdEIVr_OBvMsIb8O2sjc,26964 -pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 -pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404 -pip/_vendor/distlib/database.py,sha256=-KJH63AJ7hqjLtGCwOTrionhKr2Vsytdwkjyo8UdEco,51029 -pip/_vendor/distlib/index.py,sha256=Dd1kIV06XIdynNpKxHMMRRIKsXuoUsG7QIzntfVtZCI,21073 -pip/_vendor/distlib/locators.py,sha256=S9G2IsZp0RnMMbXGrT-gu7892pNpy1XMlUEuUHX3OI8,51828 -pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 -pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 -pip/_vendor/distlib/metadata.py,sha256=BNCnpRfFVslyZcosr4vnE_YbkRb3TNxXtk7TrDszJdc,40172 -pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 -pip/_vendor/distlib/scripts.py,sha256=NYqRJ2uuEuJwr_NNLzWH0m_s_YsobDFQb6HqxuQ2Sew,16638 -pip/_vendor/distlib/t32.exe,sha256=ftub1bsSPUCOnBn-eCtcarKTk0N0CBEP53BumkIxWJE,92672 -pip/_vendor/distlib/t64.exe,sha256=iChOG627LWTHY8-jzSwlo9SYU5a-0JHwQu4AqDz8I68,102400 -pip/_vendor/distlib/util.py,sha256=gwKL5geJKmtR4GeIUnoMAWjsPPG3tVP_mFxw_Sx-isc,59681 -pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 -pip/_vendor/distlib/w32.exe,sha256=NPYPpt7PIjVqABEu1CzabbDyHHkJpuw-_qZq_48H0j0,89088 -pip/_vendor/distlib/w64.exe,sha256=Yb-qr1OQEzL8KRGTk-XHUZDwMSljfQeZnVoTk-K4e7E,99328 -pip/_vendor/distlib/wheel.py,sha256=gV53KDG7BgbxsdeKjnATbP47gTEJRNylcIeE1TFin1o,39880 -pip/_vendor/distro.py,sha256=dOMrjIXv-3GmEbtP-NJc057Sv19P7ZAdke-v0TBeNio,42455 -pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162 -pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc,, -pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc,, -pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705 -pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552 -pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580 -pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289 -pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc,, -pip/_vendor/html5lib/_trie/_base.py,sha256=uJHVhzif9S0MJXgy9F98iEev5evi_rgUk5BmEbUSp8c,930 -pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 -pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 -pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015 -pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518 -pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc,, -pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc,, -pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 -pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 -pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 -pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 -pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 -pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248 -pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 -pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963 -pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758 -pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 -pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc,, -pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 -pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 -pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 -pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc,, -pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579 -pip/_vendor/html5lib/treebuilders/dom.py,sha256=SY3MsijXyzdNPc8aK5IQsupBoM8J67y56DgNtGvsb9g,8835 -pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764 -pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122 -pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714 -pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc,, -pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 -pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 -pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550 -pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309 -pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 -pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 -pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/codec.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/core.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc,, -pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc,, -pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 -pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 -pip/_vendor/idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733 -pip/_vendor/idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899 -pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 -pip/_vendor/idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21 -pip/_vendor/idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292 -pip/_vendor/ipaddress.py,sha256=2OgbkeAD2rLkcXqbcvof3J5R7lRwjNLoBySyTkBtKnc,79852 -pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 -pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pyc,, -pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pyc,, -pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 -pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 -pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 -pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 -pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 -pip/_vendor/msgpack/__init__.py,sha256=y0bk2YbzK6J2e0J_dyreN6nD7yM2IezT6m_tU2h-Mdg,1677 -pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc,, -pip/_vendor/msgpack/_version.py,sha256=dN7wVIjbyuQIJ35B2o6gymQNDLPlj_7-uTfgCv7KErM,20 -pip/_vendor/msgpack/exceptions.py,sha256=lPkAi_u12NlFajDz4FELSHEdfU8hrR3zeTvKX8aQuz4,1056 -pip/_vendor/msgpack/fallback.py,sha256=h0ll8xnq12mI9PuQ9Qd_Ihtt08Sp8L0JqhG9KY8Vyjk,36411 -pip/_vendor/packaging/__about__.py,sha256=Wg0-hNgTU2_lBZcGBh5pm1R9yroQ3rv-X0rig8KjA6o,744 -pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 -pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/packaging/__pycache__/version.cpython-37.pyc,, -pip/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865 -pip/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416 -pip/_vendor/packaging/markers.py,sha256=-QjvJkhSJBxBogO9J_EpPQudHaaLV3rgVYsBDqn-ZLc,8234 -pip/_vendor/packaging/requirements.py,sha256=grcnFU8x7KD230JaFLXtWl3VClLuOmsOy4c-m55tOWs,4700 -pip/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778 -pip/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520 -pip/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978 -pip/_vendor/pep517/__init__.py,sha256=nOY747zTld3oTdEetBG6DWxEcZXTeOQk0aHvbR-sa5w,84 -pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/build.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/check.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc,, -pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc,, -pip/_vendor/pep517/_in_process.py,sha256=xMY2kLutkjCti5WqTmKOLRRL3o8Ds_k-fObFyuMv1tk,6061 -pip/_vendor/pep517/build.py,sha256=-n8PT-ugS1TdqoTUY1vatDQjrLtx48K_-Quu2MuQBiA,2699 -pip/_vendor/pep517/check.py,sha256=Lu7nMdYu1JVV58fE3hv-d_avTy5h0yO9LsIzAt82Clk,5885 -pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 -pip/_vendor/pep517/compat.py,sha256=4SFG4QN-cNj8ebSa0wV0HUtEEQWwmbok2a0uk1gYEOM,631 -pip/_vendor/pep517/envbuild.py,sha256=9-u4KffexPMEm52rTaIjEOxsCAd2DMByxzv5H566QLw,5763 -pip/_vendor/pep517/wrappers.py,sha256=9dZn-q7F5KyQKUJMie2uKwur2FG0CLXz_kLZzkJOhZc,5912 -pip/_vendor/pkg_resources/__init__.py,sha256=JGk92Be39-a8sQIltjZF-Dk9ZOIAR0lpCZ9rYrGHfVM,104648 -pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc,, -pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 -pip/_vendor/progress/__init__.py,sha256=Hv3Y8Hr6RyM34NdZkrZQWMURjS2h5sONRHJSvZXWZgQ,3188 -pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/bar.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/counter.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/helpers.cpython-37.pyc,, -pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc,, -pip/_vendor/progress/bar.py,sha256=hlkDAEv9pRRiWqR5XL6vIAgMG4u_dBGEW_8klQhBRq0,2942 -pip/_vendor/progress/counter.py,sha256=XtBuZY4yYmr50E2A_fAzjWhm0IkwaVwxNsNVYDE7nsw,1528 -pip/_vendor/progress/helpers.py,sha256=6FsBLh_xUlKiVua-zZIutCjxth-IO8FtyUj6I2tx9fg,2952 -pip/_vendor/progress/spinner.py,sha256=m7bASI2GUbLFG-PbAefdHtrrWWlJLFhhSBbw70gp2TY,1439 -pip/_vendor/pyparsing.py,sha256=jh8A5pZOiogg5mR2riJEb2vlfKQ4grylOcYSmW2SU0s,243692 -pip/_vendor/pytoml/__init__.py,sha256=W_SKx36Hsew-Fty36BOpreLm4uF4V_Tgkm_z9rIoOE8,127 -pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc,, -pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509 -pip/_vendor/pytoml/parser.py,sha256=2tDXkldqPQJhyadXzL2rGhVbjUyBNeXXhaEfncHl2iQ,10326 -pip/_vendor/pytoml/test.py,sha256=2nQs4aX3XQEaaQCx6x_OJTS2Hb0_IiTZRqNOeDmLCzo,1021 -pip/_vendor/pytoml/utils.py,sha256=JCLHx77Hu1R3F-bRgiROIiKyCzLwyebnp5P35cRJxWs,1665 -pip/_vendor/pytoml/writer.py,sha256=WbNNQg3sh_V-s3kt88LkNNbxEq6pPDdhRE-daJzArcI,3198 -pip/_vendor/requests/__init__.py,sha256=ZI8kbaEzLAxsqex3MmMPr-v24d1RfZbNAOY8fUxg2Xw,4074 -pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/api.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/auth.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/certs.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/compat.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/help.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/models.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/packages.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/structures.cpython-37.pyc,, -pip/_vendor/requests/__pycache__/utils.cpython-37.pyc,, -pip/_vendor/requests/__version__.py,sha256=8KG3anaNCi-PEclPPOHJ_cv1udY_L1_njVr84gRZ9HM,436 -pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 -pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 -pip/_vendor/requests/api.py,sha256=hWZgfD7OriCZFOnpeq0bv2pbXDl8YXfxDwAcU036qDs,6253 -pip/_vendor/requests/auth.py,sha256=QB2-cSUj1jrvWZfPXttsZpyAacQgtKLVk14vQW9TpSE,10206 -pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 -pip/_vendor/requests/compat.py,sha256=FZX4Q_EMKiMnhZpZ3g_gOsT-j2ca9ij2gehDx1cwYeo,1941 -pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 -pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197 -pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 -pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 -pip/_vendor/requests/models.py,sha256=6s-37iAqXVptq8z7U_LoH_pbIPrCQUm_Z8QuIGE29Q0,34275 -pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 -pip/_vendor/requests/sessions.py,sha256=DjbCotDW6xSAaBsjbW-L8l4N0UcwmrxVNgSrZgIjGWM,29332 -pip/_vendor/requests/status_codes.py,sha256=XWlcpBjbCtq9sSqpH9_KKxgnLTf9Z__wCWolq21ySlg,4129 -pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981 -pip/_vendor/requests/utils.py,sha256=LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A,30049 -pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 -pip/_vendor/six.py,sha256=h9jch2pS86y4R36pKRS3LOYUCVFNIJMRwjZ4fJDtJ44,32452 -pip/_vendor/urllib3/__init__.py,sha256=EZviRQA_iuL_94EeJHY4JAArRXbRCkAzA0HH9iXZ15s,2722 -pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc,, -pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc,, -pip/_vendor/urllib3/_collections.py,sha256=-CAKsDE-WdubAjlBSZLx7b0e7WKenaNGwWvGLDEF1TM,10746 -pip/_vendor/urllib3/connection.py,sha256=KLFvknLgllcMkgJ-zUsFjCzOt9P03fDoIpTPz_vqXCw,13839 -pip/_vendor/urllib3/connectionpool.py,sha256=rgc_3D0VsD5VDxr4KzzA8Plee0Rmerm5WKb71FcxWu8,35097 -pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=lhYXvB5_oGKSeurX7za3XhcGyERvNjXRQ3eJp2GmQ3M,717 -pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,, -pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=x2kLSh-ASZKsun0FxtraBuLVe3oHuth4YW6yZ5Vof-w,17560 -pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=Umy5u-3Z957GirdapnicXVOpHaM4xdOZABJuJxfaeJA,12162 -pip/_vendor/urllib3/contrib/appengine.py,sha256=VvDpkc5gf9dTXNxXmyG1mPdON_3DrYG_eW4uOqN98oQ,10938 -pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=5ZpMF7N9B6NEjVU-r-xjDOV_-hkNvsDoNc84J2yqauI,4459 -pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=-kI_9y99Iwybv6Wy8IF8PugVl61BeMBEEqGwrDYNCuc,15823 -pip/_vendor/urllib3/contrib/securetransport.py,sha256=BqXSlChN9_hjCWgyN6JdcgvBUdc37QCCX4u3_8zE_9o,30309 -pip/_vendor/urllib3/contrib/socks.py,sha256=Iom0snbHkCuZbZ7Sle2Kueha1W0jYAJ0SyCOtePLaio,6391 -pip/_vendor/urllib3/exceptions.py,sha256=rFeIfBNKC8KJ61ux-MtJyJlEC9G9ggkmCeF751JwVR4,6604 -pip/_vendor/urllib3/fields.py,sha256=D_TE_SK15YatdbhWDMN0OE3X6UCJn1RTkANINCYOobE,5943 -pip/_vendor/urllib3/filepost.py,sha256=40CROlpRKVBpFUkD0R6wJf_PpvbcRQRFUu0OOQlFkKM,2436 -pip/_vendor/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 -pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,, -pip/_vendor/urllib3/packages/backports/makefile.py,sha256=so2z9BiNM8kh38Ve5tomQP_mp2_ubEqzdlCpLZKzzCI,1456 -pip/_vendor/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 -pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=WBVbxQBojNAxfZwNavkox3BgJiMA9BJmm-_fwd0jD_o,688 -pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,, -pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=E-9J-kAaUn76WMZ4PpzKUxM4C3yjY7mopOpbPIy3Dso,5700 -pip/_vendor/urllib3/poolmanager.py,sha256=csE6Bh6L0FJ3iNOHk2z8KhMT8Eiq976b6pk8I6vrOC8,16853 -pip/_vendor/urllib3/request.py,sha256=OfelFYzPnxGlU3amEz9uBLjCBOriwgJh4QC_aW9SF3U,5991 -pip/_vendor/urllib3/response.py,sha256=ta1jp4B5PGBWzoAV1s48WLuHCRICQnK7F9m_kyK4Z8g,25609 -pip/_vendor/urllib3/util/__init__.py,sha256=6Ran4oAVIy40Cu_oEPWnNV9bwF5rXx6G1DUZ7oehjPY,1044 -pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc,, -pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc,, -pip/_vendor/urllib3/util/connection.py,sha256=-AyqcRTuNUHuo5ndtsU0Og_nMyCGATC-kYqOUdBHwIQ,4639 -pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 -pip/_vendor/urllib3/util/request.py,sha256=H5_lrHvtwl2U2BbT1UYN9HpruNc1gsNFlz2njQmhPrQ,3705 -pip/_vendor/urllib3/util/response.py,sha256=028PNXDZhwBtnm2uXvnAHi_l9_AAGrAMH2Igh2AbgWg,2586 -pip/_vendor/urllib3/util/retry.py,sha256=kFQTesNiwPp6ZeQo9VHeUO7b8qA-_l3BnErCAOEPo4Q,15105 -pip/_vendor/urllib3/util/ssl_.py,sha256=4qqBDM82bufhqqEd0b-99sObz95XmEVEXDVi5iAyCeE,13172 -pip/_vendor/urllib3/util/timeout.py,sha256=7lHNrgL5YH2cI1j-yZnzV_J8jBlRVdmFhQaNyM1_2b8,9757 -pip/_vendor/urllib3/util/url.py,sha256=qCY_HHUXvo05wAsEERALgExtlgxLnAHSQ7ce1b-g3SM,6487 -pip/_vendor/urllib3/util/wait.py,sha256=p4BZo_Ukp5JF0Dn6jro7cUfqIjnU6WFtuoA6poaV5Jk,5403 -pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 -pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc,, -pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc,, -pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 -pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 -pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 -pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt deleted file mode 100644 index f5809cb4..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -pip = pip._internal:main -pip3 = pip._internal:main -pip3.7 = pip._internal:main - diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/INSTALLER similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/INSTALLER rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/INSTALLER diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/LICENSE.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/LICENSE.txt similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/LICENSE.txt rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/LICENSE.txt diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/METADATA similarity index 62% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/METADATA rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/METADATA index a91529c0..e2266f5a 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip-19.0.3.dist-info/METADATA +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/METADATA @@ -1,11 +1,14 @@ Metadata-Version: 2.1 Name: pip -Version: 19.0.3 +Version: 20.2 Summary: The PyPA recommended tool for installing Python packages. Home-page: https://pip.pypa.io/ Author: The pip developers -Author-email: pypa-dev@groups.google.com +Author-email: distutils-sig@python.org License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ Keywords: distutils easy_install egg setuptools wheel virtualenv Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable @@ -16,13 +19,13 @@ Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy -Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* pip - The Python Package Installer ================================== @@ -39,19 +42,26 @@ Please take a look at our documentation for how to install and use pip: * `Installation`_ * `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + * `Release notes`_ +* `Release process`_ + +In 2020, we're working on improvements to the heart of pip. Please `learn more and take our survey`_ to help us do it right. -If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms: +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: * `Issue tracking`_ * `Discourse channel`_ * `User IRC`_ -If you want to get involved head over to GitHub to get the source code and feel free to jump on the developer mailing lists and chat rooms: +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: * `GitHub page`_ -* `Dev mailing list`_ -* `Dev IRC`_ +* `Development documentation`_ +* `Development mailing list`_ +* `Development IRC`_ Code of Conduct --------------- @@ -59,17 +69,20 @@ Code of Conduct Everyone interacting in the pip project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. -.. _package installer: https://packaging.python.org/en/latest/current/ +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ .. _Python Package Index: https://pypi.org .. _Installation: https://pip.pypa.io/en/stable/installing.html .. _Usage: https://pip.pypa.io/en/stable/ .. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ .. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _learn more and take our survey: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging -.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev +.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ .. _User IRC: https://webchat.freenode.net/?channels=%23pypa -.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev +.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev .. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/RECORD new file mode 100644 index 00000000..588c0a67 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/RECORD @@ -0,0 +1,751 @@ +../../Scripts/pip.exe,sha256=QfBrCATEqinRlZnPkktPgyrdPFOrZjSlykCSN1R26Yo,106324 +../../Scripts/pip3.7.exe,sha256=QfBrCATEqinRlZnPkktPgyrdPFOrZjSlykCSN1R26Yo,106324 +../../Scripts/pip3.exe,sha256=QfBrCATEqinRlZnPkktPgyrdPFOrZjSlykCSN1R26Yo,106324 +pip-20.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-20.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 +pip-20.2.dist-info/METADATA,sha256=MmWpr-dzJVIUdbKOi-_LpiuTlvQ8ObgQi2WsXhnNXik,3706 +pip-20.2.dist-info/RECORD,, +pip-20.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pip-20.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125 +pip-20.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=uX9-VTiqu9kizIhD0eO5ozQ0kqAMlxo92GmK6wt25Iw,453 +pip/__main__.py,sha256=bqCAM1cj1HwHCDx3WJa-LJxOBXimGxE8OjBqAvnhVg0,911 +pip/__pycache__/__init__.cpython-37.pyc,, +pip/__pycache__/__main__.cpython-37.pyc,, +pip/_internal/__init__.py,sha256=2si23JBW1erg19xIJ8CD6tfGknz0ijtXmzuXjGfGMGE,495 +pip/_internal/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/__pycache__/build_env.cpython-37.pyc,, +pip/_internal/__pycache__/cache.cpython-37.pyc,, +pip/_internal/__pycache__/configuration.cpython-37.pyc,, +pip/_internal/__pycache__/exceptions.cpython-37.pyc,, +pip/_internal/__pycache__/locations.cpython-37.pyc,, +pip/_internal/__pycache__/main.cpython-37.pyc,, +pip/_internal/__pycache__/pyproject.cpython-37.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-37.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-37.pyc,, +pip/_internal/build_env.py,sha256=9_UaQ2fpsBvpKAji27f7bPAi2v3mb0cBvDYcejwFKNM,8088 +pip/_internal/cache.py,sha256=pT17VVxgzZK32aqY5FRS8GyAI73LKzNMF8ZelQ7Ojm0,12249 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-37.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-37.pyc,, +pip/_internal/cli/__pycache__/main.cpython-37.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-37.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-37.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-37.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-37.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc,, +pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547 +pip/_internal/cli/base_command.py,sha256=BWTztM4b6h8hodDHDKjgJ82jaSeru2AILAJxi1d_IP8,8810 +pip/_internal/cli/cmdoptions.py,sha256=M_BtuqeyRpZAUUYytts3pguBCF2RaGukVpDPE0niroI,28782 +pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975 +pip/_internal/cli/main.py,sha256=Hxc9dZyW3xiDsYZX-_J2cGXT5DWNLNn_Y7o9oUme-Ec,2616 +pip/_internal/cli/main_parser.py,sha256=voAtjo4WVPIYeu7Fqabva9SXaB3BjG0gH93GBfe6jHQ,2843 +pip/_internal/cli/parser.py,sha256=4FfwW8xB84CrkLs35ud90ZkhCcWyVkx17XD6j3XCW7c,9480 +pip/_internal/cli/progress_bars.py,sha256=J1zykt2LI4gbBeXorfYRmYV5FgXhcW4x3r6xE_a7Z7c,9121 +pip/_internal/cli/req_command.py,sha256=Eiz8TVzeqzG-40t7qLC1vO-vzjCRvX9C-qXMyfw9D1I,15132 +pip/_internal/cli/spinners.py,sha256=PS9s53LB5aDPelIn8FhKerK3bOdgeefFH5wSWJ2PCzI,5509 +pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/_internal/commands/__init__.py,sha256=yoLAnmEXjoQgYfDuwsuWG3RzzD19oeHobGEhmpIYsB4,4100 +pip/_internal/commands/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-37.pyc,, +pip/_internal/commands/__pycache__/check.cpython-37.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-37.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-37.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-37.pyc,, +pip/_internal/commands/__pycache__/download.cpython-37.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-37.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-37.pyc,, +pip/_internal/commands/__pycache__/help.cpython-37.pyc,, +pip/_internal/commands/__pycache__/install.cpython-37.pyc,, +pip/_internal/commands/__pycache__/list.cpython-37.pyc,, +pip/_internal/commands/__pycache__/search.cpython-37.pyc,, +pip/_internal/commands/__pycache__/show.cpython-37.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/commands/cache.py,sha256=U3rLjls0AMMO8PxnhXVwIp7Biyvns8-gBThKTH3tX7Y,5676 +pip/_internal/commands/check.py,sha256=fqRrz2uKPC8Qsx2rgLygAD2Rbr-qxp1Q55zUoyZzB9Q,1677 +pip/_internal/commands/completion.py,sha256=ObssM77quf61qvbuSE6XLwUBdm_WcWIvXFI-Hy1RBsI,3081 +pip/_internal/commands/configuration.py,sha256=IN2QBF653sRiRU7-pHTpnZ6_gyiXNKUQkLiLaNRLKNw,9344 +pip/_internal/commands/debug.py,sha256=otBZnpnostX2kmYyOl6g6CeCLmk6H00Tsj2CDsCtFXw,7314 +pip/_internal/commands/download.py,sha256=EKFlj_ceGUEJj6yCDw7P6w7yUoB16IcNHhT2qnCFDNQ,4918 +pip/_internal/commands/freeze.py,sha256=vLBBP1d8wgEXrmlh06hbz_x_Q1mWHUdiWDa9NP2eKLE,3452 +pip/_internal/commands/hash.py,sha256=v2nYCiEsEI9nEam1p6GwdG8xyj5gFv-4WrqvNexKmeY,1843 +pip/_internal/commands/help.py,sha256=ryuMDt2tc7ic3NJYMjjoNRH5r6LrB2yQVZvehAm8bLs,1270 +pip/_internal/commands/install.py,sha256=h2L8vS6t2DbGAdttkdZmMucK2eJG2CYvcwhDa7AdKrQ,28683 +pip/_internal/commands/list.py,sha256=jXkHHvScGVlenAjlOndIoqLxwKXwDs2RUcQeQS8X_eg,11281 +pip/_internal/commands/search.py,sha256=VnOvN6KjlUDe9cQ0MNFlgu5M1Sg-W54JU1KW5DvnmrA,5716 +pip/_internal/commands/show.py,sha256=r69-G8HIepDKm4SeyeHj0Ez1P9xoihrpVUyXm6NmXYY,6996 +pip/_internal/commands/uninstall.py,sha256=Ys8hwFsg0kvvGtLGYG3ibL5BKvURhlSlCX50ZQ-hsHk,3311 +pip/_internal/commands/wheel.py,sha256=-HSISE5AV29I752Aqw4DdmulrGd8rB_ZTOdpbJ6T8iM,6419 +pip/_internal/configuration.py,sha256=-Gxz2J-KuvxiqWIJ9F-XnYVZ5lKhNk7VO6ondEbH4EM,14115 +pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959 +pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-37.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425 +pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760 +pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086 +pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294 +pip/_internal/exceptions.py,sha256=ZVpArxQrSlm4qAMtHaY3nHvG_t5eSi3WCnMowdm_m8I,12637 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/index/__pycache__/collector.cpython-37.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-37.pyc,, +pip/_internal/index/collector.py,sha256=rMdGdAABOrvIl0DYlCMWXr7mIoqrU2VGeQpCuWiPu1Q,22838 +pip/_internal/index/package_finder.py,sha256=ISieDd20dOSndMNybafCu3pO2JR3BKOfHv92Bes0j0Q,37364 +pip/_internal/locations.py,sha256=7YjzJy2CroQD8GBMemnHWRl9448BSIt0lfH98B-Dkd8,6732 +pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-37.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-37.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-37.pyc,, +pip/_internal/models/__pycache__/index.cpython-37.pyc,, +pip/_internal/models/__pycache__/link.cpython-37.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-37.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-37.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-37.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/models/candidate.py,sha256=gACeCSHTIaWuB6RAeLmGJnbFFbKfp_47UERDoC_ldOU,1195 +pip/_internal/models/direct_url.py,sha256=MnBLPci1hE9Ndh6d3m0LAqB7hX3ci80CCJTE5eerFaQ,6900 +pip/_internal/models/format_control.py,sha256=RdnnmXxVJppCZWzWEmFTr-zD_m3G0izPLqJi6Iop75M,2823 +pip/_internal/models/index.py,sha256=carvxxaT7mJyoEkptaECHUZiNaA6R5NrsGF55zawNn8,1161 +pip/_internal/models/link.py,sha256=FMlxvqKmLoj7xTQSgKqfO2ehE1WcgD4C5DmEBuC_Qos,7470 +pip/_internal/models/scheme.py,sha256=EhPkT_6G0Md84JTLSVopYsp5H_K6BREYmFvU8H6wMK8,778 +pip/_internal/models/search_scope.py,sha256=Lum0mY4_pdR9DDBy6HV5xHGIMPp_kU8vMsqYKFHZip4,4751 +pip/_internal/models/selection_prefs.py,sha256=pgNjTfgePPiX1R5S2S8Yc6odOfU9NzG7YP_m_gnS0kw,2044 +pip/_internal/models/target_python.py,sha256=R7tAXI15B_cgw7Fgnq5cI9F-44goUZncH9JMtE8pXRw,4034 +pip/_internal/models/wheel.py,sha256=FTfzVb4WIbfIehxhdlAVvCil_MQ0-W44oyN56cE6NHc,2772 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/network/__pycache__/auth.cpython-37.pyc,, +pip/_internal/network/__pycache__/cache.cpython-37.pyc,, +pip/_internal/network/__pycache__/download.cpython-37.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-37.pyc,, +pip/_internal/network/__pycache__/session.cpython-37.pyc,, +pip/_internal/network/__pycache__/utils.cpython-37.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-37.pyc,, +pip/_internal/network/auth.py,sha256=W9WhWL2EUr6OJQ2SecDmcCvHg3_hIT-csbsne8Lk58k,11610 +pip/_internal/network/cache.py,sha256=6cCD7XNrqh1d1lOSY5U-0ZXOG1YwEgMYs-VhRZVyzMA,2329 +pip/_internal/network/download.py,sha256=VTGDO01_nX-5MCdatd4Icv0F88_M8N3WnW6BevA6a0o,5151 +pip/_internal/network/lazy_wheel.py,sha256=fdXGboeP1eEZ0nLVZHqGpDDFGjiBYdSgGnkUgu92SyA,7937 +pip/_internal/network/session.py,sha256=Zs0uiyPxTpfpgSv-ZI9hK9TjasmTplBuBivOTcUiJME,15208 +pip/_internal/network/utils.py,sha256=ZPHg7u6DEcg2EvILIdPECnvPLp21OPHxNVmeXfMy-n0,4172 +pip/_internal/network/xmlrpc.py,sha256=PFCiX_nnwYxC8SFIf7J3trP40ECGjA6fl2-IVNhbkPM,1882 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/__pycache__/check.cpython-37.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-37.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-37.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-37.pyc,, +pip/_internal/operations/build/metadata.py,sha256=2aILgWCQTF1aIhWuCH8TTSjv_kYmA3x1262fT2FQ6pQ,1254 +pip/_internal/operations/build/metadata_legacy.py,sha256=VgzBTk8naIO8-8N_ifEYF7ZAxWUDhphWVIaVlZ2FqYM,2011 +pip/_internal/operations/build/wheel.py,sha256=33vdkxTO-gNqrtWH1eNL_uZo4Irax85moDx2o9zae3M,1465 +pip/_internal/operations/build/wheel_legacy.py,sha256=N1aqNZyGURBX0Bj6wPmB0t4866oMbxoHUpC9pz6FyT0,3356 +pip/_internal/operations/check.py,sha256=JYDsVLvpFyJuJq0ttStgg8TRKbc0myYFAMnfnnQOREM,5215 +pip/_internal/operations/freeze.py,sha256=_vJSZwHBNzBV0GpRUSXhUJz3BrGFdcT2aTcWxH1L4P0,10373 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-37.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488 +pip/_internal/operations/install/legacy.py,sha256=zu3Gw54dgHtluyW5n8j5qKcAScidQXJvqB8fb0oLB-4,4281 +pip/_internal/operations/install/wheel.py,sha256=ea2nlXQbmnaVvsEtCW0hTwUEwGQJw1xg0bb2ZCw2_Yo,30057 +pip/_internal/operations/prepare.py,sha256=Rt7Yh7w10_Q-vI3b7R1wkt2R6XPX8YVUdODk-TaGI9c,19903 +pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400 +pip/_internal/req/__init__.py,sha256=s-E5Vxxqqpcs7xfY5gY69oHogsWJ4sLbnUiDoWmkHOU,3133 +pip/_internal/req/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc,, +pip/_internal/req/constructors.py,sha256=LrSHbRHu52-h6HM1qJKG68o1Jw5q8MvJGfr4As6j2uU,16387 +pip/_internal/req/req_file.py,sha256=p7n3Y0q275Eisqfxd0vtfnxYvlT6TCCY0tj75p-yiOY,19448 +pip/_internal/req/req_install.py,sha256=HZselo7A2jCV6sYIBe-coNF85rb6QTgBz1-ZW_AMIzU,33645 +pip/_internal/req/req_set.py,sha256=dxcfbieWYfYkTJNE07U8xaO40zLxl8BhWOcIHVFTmoo,7886 +pip/_internal/req/req_tracker.py,sha256=qWaiejNK6o6cqeyTOIGKIU1CoyrXCcqgMHYi3cqelOA,4690 +pip/_internal/req/req_uninstall.py,sha256=opMGDGb7ZaFippRbaarJaljtzl2CNZmBGEUSnTubE-A,23706 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-37.pyc,, +pip/_internal/resolution/base.py,sha256=xi72YmIS-lEjyK13PN_3qkGGthA4yGoK0C6qWynyHrE,682 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-37.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=d-qW6UUxbZqKyXmX2bqnW5C8UtnO0ZcsQuKw_QXualc,18755 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-37.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=dDdffs_0baMLJjQjR7aKlbfuvtbja-lC6f6zZveFzKM,2323 +pip/_internal/resolution/resolvelib/candidates.py,sha256=mJ5xy1932fFQMrrqg_OoQmCY9TWoM3C4JI_yAI2cjiU,20116 +pip/_internal/resolution/resolvelib/factory.py,sha256=DueukAQ1LRAwv-_JyjgIGIrw_a44RRBlXUoeFstFLQM,17169 +pip/_internal/resolution/resolvelib/provider.py,sha256=n7vfjL1UotxYYkQXNh8orjMOR1G9NNSwoGb2yK9d9VE,6081 +pip/_internal/resolution/resolvelib/requirements.py,sha256=lGvoHRhkusRfaz4cFxYBoQNqxS6TeuO3K68qlui6g-0,4511 +pip/_internal/resolution/resolvelib/resolver.py,sha256=b05AK409WdUkCQKhNp0Uh61iMw4a2hFR8fg27dQnZ-g,10077 +pip/_internal/self_outdated_check.py,sha256=q6_nqUHPpt-DScwD97h7FCSqd4nI1s-xkpOI4I5Za3Y,6779 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-37.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-37.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-37.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-37.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-37.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-37.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-37.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-37.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-37.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-37.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-37.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-37.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-37.pyc,, +pip/_internal/utils/__pycache__/models.cpython-37.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-37.pyc,, +pip/_internal/utils/__pycache__/parallel.cpython-37.pyc,, +pip/_internal/utils/__pycache__/pkg_resources.cpython-37.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-37.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc,, +pip/_internal/utils/__pycache__/typing.cpython-37.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-37.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-37.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-37.pyc,, +pip/_internal/utils/appdirs.py,sha256=RZzUG-Bkh2b-miX0DSZ3v703_-bgK-v0PfWCCjwVE9g,1349 +pip/_internal/utils/compat.py,sha256=GoCSUMoUmTGeg5irQGLDZ7v12As87yHrMzBXEke-njg,8865 +pip/_internal/utils/compatibility_tags.py,sha256=EtBJj-pstj_U0STUZ8FjlG7YDTjuRZUy6GY1cM86yv8,5439 +pip/_internal/utils/datetime.py,sha256=KL-vIdGU9JIpGB5NYkmwXWkH-G_2mvvABlmRtoSZsao,295 +pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318 +pip/_internal/utils/direct_url_helpers.py,sha256=bZCBNwPQVyZpYGjX_VcomvVvRHvKw-9JzEV-Ft09LQc,4359 +pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350 +pip/_internal/utils/encoding.py,sha256=wHDJ25yCT_T4ySscCL3P978OpLrfDCpitg8D64IEXMY,1284 +pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152 +pip/_internal/utils/filesystem.py,sha256=-fU3XteCAIJwf_9FvCZU7vhywvt3nuf_cqkCdwgy1Y8,6943 +pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571 +pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297 +pip/_internal/utils/hashes.py,sha256=xHmrqNwC1eBN0oY0R_RXLJLXGvFdo5gwmbz_pas94k8,4358 +pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810 +pip/_internal/utils/logging.py,sha256=YIfuDUEkmdn9cIRQ_Ec8rgXs1m5nOwDECtZqM4CBH5U,13093 +pip/_internal/utils/misc.py,sha256=17HkwcfxBA8Y8-rD-rM39gB1vQ2U9-EkigdclDe61eo,27394 +pip/_internal/utils/models.py,sha256=HqiBVtTbW_b_Umvj2fjhDWOHo2RKhPwSz4iAYkQZ688,1201 +pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035 +pip/_internal/utils/parallel.py,sha256=7az3aaTMCkqpaLFbpYYOvk0rj7Hu5YH1NPXXomqjgf4,3404 +pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254 +pip/_internal/utils/setuptools_build.py,sha256=E1KswI7wfNnCDE5R6G8c9ZbByENpu7NqocjY26PCQDw,5058 +pip/_internal/utils/subprocess.py,sha256=UkPe89gcjxBMx73uutoeJXgD3kwdlL6YO16BkjDdVSI,9924 +pip/_internal/utils/temp_dir.py,sha256=blmG0jEvEgdxbYUt_V15bgcTIJIrxZwAw8QZlCTJYDE,8378 +pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401 +pip/_internal/utils/unpacking.py,sha256=YFAckhqqvmehA8Kan5vd3b1kN_9TafqmOk4b-yz4fho,9488 +pip/_internal/utils/urls.py,sha256=q2rw1kMiiig_XZcoyJSsWMJQqYw-2wUmrMoST4mCW_I,1527 +pip/_internal/utils/virtualenv.py,sha256=iVJ8ZlbNtGon6I4uZFsY2SidrUf1vt3YHrgS5CuU98w,3553 +pip/_internal/utils/wheel.py,sha256=wFzn3h8GqYvgsyWPZtUyn0Rb3MJzmtyP3owMOhKnmL0,7303 +pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617 +pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc,, +pip/_internal/vcs/bazaar.py,sha256=5rRR02uDZTLaxQT-R5Obd8FZDOMlShqYds-pwVSJJs8,3887 +pip/_internal/vcs/git.py,sha256=kvB729wrKY0OWMSgOS1pUly4LosZp8utrd3kOQsWalA,13985 +pip/_internal/vcs/mercurial.py,sha256=FzCGmYzVZvB-vyM73fKcQk2B4jMNXGnXlQ2bJ7nmglM,5162 +pip/_internal/vcs/subversion.py,sha256=JZs3JnAX4flHOAXnoavs5viA8vZr00_xt_yXYf-9L2A,12247 +pip/_internal/vcs/versioncontrol.py,sha256=WpxeTRC0NoGB2uXJdmfq4pPxY-p7sk1rV_WkxMxgzQA,25966 +pip/_internal/wheel_builder.py,sha256=6w1VPXrpUvCCPlV0cI1wNaCqNz4laF6B6whvaxl9cns,9522 +pip/_vendor/__init__.py,sha256=CsxnpYPbi_2agrDI79iQrCmQeZRcwwIF0C6cm_1RynU,4588 +pip/_vendor/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/__pycache__/appdirs.cpython-37.pyc,, +pip/_vendor/__pycache__/contextlib2.cpython-37.pyc,, +pip/_vendor/__pycache__/distro.cpython-37.pyc,, +pip/_vendor/__pycache__/ipaddress.cpython-37.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-37.pyc,, +pip/_vendor/__pycache__/retrying.cpython-37.pyc,, +pip/_vendor/__pycache__/six.cpython-37.pyc,, +pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907 +pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295 +pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882 +pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856 +pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695 +pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149 +pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533 +pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070 +pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091 +pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690 +pip/_vendor/certifi/__init__.py,sha256=u1E_DrSGj_nnEkK5VglvEqP8D80KpghLVWL0A_pq41A,62 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-37.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=GhT24f0R7_9y4YY_hkXwkO7BthZhRGDCEMO348E9S14,282394 +pip/_vendor/certifi/core.py,sha256=jBrwKEWpG0IKcuozK0BQ2HHGp8adXAOyBPC7ddgR6vM,2315 +pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-37.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 +pip/_vendor/colorama/__init__.py,sha256=DqjXH9URVP3IJwmMt7peYw50ns1RNAymIB9-XdPEFV8,239 +pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 +pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462 +pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 +pip/_vendor/contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915 +pip/_vendor/distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581 +pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-37.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc,, +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854 +pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628 +pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408 +pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059 +pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066 +pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387 +pip/_vendor/distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962 +pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766 +pip/_vendor/distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180 +pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768 +pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984 +pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845 +pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391 +pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112 +pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840 +pip/_vendor/distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144 +pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628 +pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 +pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc,, +pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 +pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 +pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 +pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc,, +pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 +pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc,, +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 +pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc,, +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc,, +pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc,, +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc,, +pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +pip/_vendor/idna/core.py,sha256=jCoaLb3bA2tS_DDx9PpGuNTEZZN2jAzB369aP-IHYRE,11951 +pip/_vendor/idna/idnadata.py,sha256=gmzFwZWjdms3kKZ_M_vwz7-LP_SCgYfSeE03B21Qpsk,42350 +pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +pip/_vendor/idna/package_data.py,sha256=bxBjpLnE06_1jSYKEy5svOMu1zM3OMztXVUb1tPlcp0,22 +pip/_vendor/idna/uts46data.py,sha256=lMdw2zdjkH1JUWXPPEfFUSYT3Fyj60bBmfLvvy5m7ko,202084 +pip/_vendor/ipaddress.py,sha256=-0RmurI31XgAaN20WCi0zrcuoat90nNA70_6yGlx2PU,79875 +pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 +pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-37.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc,, +pip/_vendor/msgpack/_version.py,sha256=hu7lzmZ_ClOaOOmRsWb4xomhzQ4UIsLsvv8KY6UysHE,20 +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=nV19BzE9Be8SJHrxxYJHFbvEHJaXcP3avRkHVp5wovM,6034 +pip/_vendor/msgpack/fallback.py,sha256=Z8V3iYUUPqKVy4WWTk64Vq3G0PylQIOmlWvgnMhmkdU,37133 +pip/_vendor/packaging/__about__.py,sha256=PNMsaZn4UcCHyubgROH1bl6CluduPjI5kFrSp_Zgklo,736 +pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/_typing.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-37.pyc,, +pip/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128 +pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022 +pip/_vendor/packaging/_typing.py,sha256=VgA0AAvsc97KB5nF89zoudOyCMEsV7FlaXzZbYqEkzA,1824 +pip/_vendor/packaging/markers.py,sha256=V_RdoQqOUbSfy7y9o2vRk7BkzAh3yneC82cuWpKrqOg,9491 +pip/_vendor/packaging/requirements.py,sha256=F93hkn7i8NKRZP-FtdTIlhz1PUsRjhe6eRbsBXX0Uh4,4903 +pip/_vendor/packaging/specifiers.py,sha256=uYp9l13F0LcknS6d4N60ytiBgFmIhKideOq9AnsxTco,31944 +pip/_vendor/packaging/tags.py,sha256=NKMS37Zo_nWrZxgsD6zbXsXgc9edn9m160cBiLmHJdE,24067 +pip/_vendor/packaging/utils.py,sha256=RShlvnjO2CtYSD8uri32frMMFMTmB-3ihsq1-ghzLEw,1811 +pip/_vendor/packaging/version.py,sha256=Cnbm-OO9D_qd8ZTFxzFcjSavexSYFZmyeaoPvMsjgPc,15470 +pip/_vendor/pep517/__init__.py,sha256=r5uA106NGJa3slspaD2m32aFpFUiZX-mZ9vIlzAEOp4,84 +pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/build.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/check.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/dirtools.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/meta.cpython-37.pyc,, +pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc,, +pip/_vendor/pep517/_in_process.py,sha256=XrKOTURJdia5R7i3i_OQmS89LASFXE3HQXfX63qZBIE,8438 +pip/_vendor/pep517/build.py,sha256=DN4ouyj_bd00knOKqv0KHRtN0-JezJoNNZQmcDi4juk,3335 +pip/_vendor/pep517/check.py,sha256=YoaNE3poJGpz96biVCYwtcDshwEGE2HRU5KKya9yfpY,5961 +pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 +pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780 +pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 +pip/_vendor/pep517/envbuild.py,sha256=szKUFlO50X1ahQfXwz4hD9V2VE_bz9MLVPIeidsFo4w,6041 +pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 +pip/_vendor/pep517/wrappers.py,sha256=yFU4Lp7TIYbmuVOTY-pXnlyGZ3F_grIi-JlLkpGN8Gk,10783 +pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc,, +pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 +pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857 +pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-37.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc,, +pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854 +pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372 +pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380 +pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394 +pip/_vendor/requests/__init__.py,sha256=orzv4-1uejMDc2v3LnTVneINGXiwqXSfrASoFBsYblE,4465 +pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-37.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-37.pyc,, +pip/_vendor/requests/__version__.py,sha256=Xwky1FMlMkJJGidBM50JC7FKcosWzkjIW-WhQGrBdFM,441 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548 +pip/_vendor/requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496 +pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045 +pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 +pip/_vendor/requests/exceptions.py,sha256=d9fJJw8YFBB9VzG9qhvxLuOx6be3c_Dwbck-dVUEAcs,3173 +pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578 +pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 +pip/_vendor/requests/models.py,sha256=_tKIbrscbGvaTdX1UHCwRaiYmPF9VBIuBeydr4Qx1Tg,34287 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=OBtwQs1vjkB1xamFdi_p5y8BVeX16BJoQcwSwx_Y3fI,29316 +pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 +pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 +pip/_vendor/requests/utils.py,sha256=VBs99cvV8Z29WGXeWZqHzZ80_nu1AwwjYzJfe0wQIvs,30176 +pip/_vendor/resolvelib/__init__.py,sha256=sqMOy4CbVJQiaG9bCPj0oAntGAVy-RWdPfVaC9XDIEQ,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-37.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-37.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=mtTkpr3Gf3OGvU1PD8YuvrJRhVbioxV82T-niFPoX3o,127 +pip/_vendor/resolvelib/providers.py,sha256=TZDCmL-Ic-R5JRIZY8G4FLG5xB2343B0DfuK7aw2Yqw,4547 +pip/_vendor/resolvelib/reporters.py,sha256=ZPSJnVfK8WvXTbX8jE0Nren0-_Hg9ym4epCUPtU8Y0U,1405 +pip/_vendor/resolvelib/resolvers.py,sha256=lQTGcc-2fgHbmdiLzeNDUxVmGc5ZFjkAL6JrVqnqJIw,15018 +pip/_vendor/resolvelib/structs.py,sha256=yrdhd-n7DercimPGclXe20rgqhlxw8PnxC0wmcXO19Y,2016 +pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 +pip/_vendor/six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159 +pip/_vendor/toml/__init__.py,sha256=rJ1pu933HgUtyeeNiusoPd5jJOPNhaKHhSSld3o8AQo,747 +pip/_vendor/toml/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/common.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/decoder.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/encoder.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/ordered.cpython-37.pyc,, +pip/_vendor/toml/__pycache__/tz.cpython-37.pyc,, +pip/_vendor/toml/common.py,sha256=ViBccAduP6eZNJAb1POhRhjOAi56TDsNgWJ1TjgXAug,242 +pip/_vendor/toml/decoder.py,sha256=atpXmyFCzNGiqhkcYLySBuJQkPeSHDzBz47sEaX1amw,38696 +pip/_vendor/toml/encoder.py,sha256=fPqLyFdPAam17X9SELz2TMp9affkfHCmgWZxRKcmzhY,9955 +pip/_vendor/toml/ordered.py,sha256=UWt5Eka90IWVBYdvLgY5PXnkBcVYpHjnw9T67rM85T8,378 +pip/_vendor/toml/tz.py,sha256=DrAgI3wZxZiGcLuV_l8ueA_nPrYoxQ3hZA9tJSjWRsQ,618 +pip/_vendor/urllib3/__init__.py,sha256=rdFZCO1L7e8861ZTvo8AiSKwxCe9SnWQUQwJ599YV9c,2683 +pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792 +pip/_vendor/urllib3/connection.py,sha256=Fln8a_bkegdNMkFoSOwyI0PJvL1OqzVUO6ifihKOTpc,14461 +pip/_vendor/urllib3/connectionpool.py,sha256=egdaX-Db_LVXifDxv3JY0dHIpQqDv0wC0_9Eeh8FkPM,35725 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956 +pip/_vendor/urllib3/contrib/appengine.py,sha256=gfdK4T7CRin7v9HRhHDbDh-Hbk66hHDWeoz7nV3PJo8,11034 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=9gm5kpC0ScbDCWobeCrh5LDqS8HgU8FNhmk5v8qQ5Bs,16582 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=vBDFjSnH2gWa-ztMKVaiwW46K1mlDZKqvo_VAonfdcY,32401 +pip/_vendor/urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036 +pip/_vendor/urllib3/exceptions.py,sha256=D2Jvab7M7m_n0rnmBmq481paoVT32VvVeB6VeQM0y-w,7172 +pip/_vendor/urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553 +pip/_vendor/urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418 +pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536 +pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688 +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,, +pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=rvQDQviqQLtPJB6MfEgABnBFj3nXft7ZJ3Dx-BC0AQY,5696 +pip/_vendor/urllib3/poolmanager.py,sha256=iWEAIGrVNGoOmQyfiFwCqG-IyYy6GIQ-jJ9QCsX9li4,17861 +pip/_vendor/urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018 +pip/_vendor/urllib3/response.py,sha256=eo1Sfkn2x44FtjgP3qwwDsG9ak84spQAxEGy7Ovd4Pc,28221 +pip/_vendor/urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637 +pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497 +pip/_vendor/urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815 +pip/_vendor/urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573 +pip/_vendor/urllib3/util/retry.py,sha256=3wbv7SdzYNOxPcBiFkPCubTbK1_6vWSepznOXirhUfA,15543 +pip/_vendor/urllib3/util/ssl_.py,sha256=N7gqt2iqzKBsWGmc61YeKNSPri6Ns2iZ_MD5hV2y8tU,14523 +pip/_vendor/urllib3/util/timeout.py,sha256=3qawUo-TZq4q7tyeRToMIOdNGEOBjOOQVq7nHnLryP4,9947 +pip/_vendor/urllib3/util/url.py,sha256=S4YyAwWKJPjFFECC7l9Vp9EKqRH1XAb-uQFANn1Tak0,13981 +pip/_vendor/urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406 +pip/_vendor/vendor.txt,sha256=bWUiaRjMJhuUsqFZHEJkBH_6lJ_Avl9cOyszcI74IHs,437 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/WHEEL similarity index 70% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/WHEEL rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/WHEEL index c8240f03..ef99c6cf 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/WHEEL +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) +Generator: bdist_wheel (0.34.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/entry_points.txt new file mode 100644 index 00000000..d48bd8a8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.8 = pip._internal.cli.main:main + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/top_level.txt similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-19.0.3.dist-info/top_level.txt rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip-20.2.dist-info/top_level.txt diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__init__.py index f48c1ca6..b67e61d0 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__init__.py @@ -1 +1,18 @@ -__version__ = "19.0.3" +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + +__version__ = "20.2" + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__main__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__main__.py index 0c223f8c..7c2505fa 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__main__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/__main__.py @@ -3,6 +3,13 @@ from __future__ import absolute_import import os import sys +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip <command> +if sys.path[0] in ('', os.getcwd()): + sys.path.pop(0) + # If we are running from a wheel, add the wheel to sys.path # This allows the usage python pip-*.whl/pip install pip-*.whl if __package__ == '': @@ -13,7 +20,7 @@ if __package__ == '': path = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, path) -from pip._internal import main as _main # isort:skip # noqa +from pip._internal.cli.main import main as _main # isort:skip # noqa if __name__ == '__main__': sys.exit(_main()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/__init__.py index 276124df..264c2cab 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/__init__.py @@ -1,78 +1,17 @@ -#!/usr/bin/env python -from __future__ import absolute_import +import pip._internal.utils.inject_securetransport # noqa +from pip._internal.utils.typing import MYPY_CHECK_RUNNING -import locale -import logging -import os -import warnings - -import sys - -# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, -# but if invoked (i.e. imported), it will issue a warning to stderr if socks -# isn't available. requests unconditionally imports urllib3's socks contrib -# module, triggering this warning. The warning breaks DEP-8 tests (because of -# the stderr output) and is just plain annoying in normal usage. I don't want -# to add socks as yet another dependency for pip, nor do I want to allow-stder -# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to -# be done before the import of pip.vcs. -from pip._vendor.urllib3.exceptions import DependencyWarning -warnings.filterwarnings("ignore", category=DependencyWarning) # noqa - -# We want to inject the use of SecureTransport as early as possible so that any -# references or sessions or what have you are ensured to have it, however we -# only want to do this in the case that we're running on macOS and the linked -# OpenSSL is too old to handle TLSv1.2 -try: - import ssl -except ImportError: - pass -else: - # Checks for OpenSSL 1.0.1 on MacOS - if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - pass - else: - securetransport.inject_into_urllib3() - -from pip._internal.cli.autocompletion import autocomplete -from pip._internal.cli.main_parser import parse_command -from pip._internal.commands import commands_dict -from pip._internal.exceptions import PipError -from pip._internal.utils import deprecation -from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa -from pip._vendor.urllib3.exceptions import InsecureRequestWarning - -logger = logging.getLogger(__name__) - -# Hide the InsecureRequestWarning from urllib3 -warnings.filterwarnings("ignore", category=InsecureRequestWarning) +if MYPY_CHECK_RUNNING: + from typing import Optional, List def main(args=None): - if args is None: - args = sys.argv[1:] - - # Configure our deprecation warnings to be sent through loggers - deprecation.install_warning_logger() - - autocomplete() + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. - try: - cmd_name, cmd_args = parse_command(args) - except PipError as exc: - sys.stderr.write("ERROR: %s" % exc) - sys.stderr.write(os.linesep) - sys.exit(1) + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper - # Needed for locale.getpreferredencoding(False) to work - # in pip._internal.utils.encoding.auto_decode - try: - locale.setlocale(locale.LC_ALL, '') - except locale.Error as e: - # setlocale can apparently crash if locale are uninitialized - logger.debug("Ignoring error %s when setting locale", e) - command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args)) - return command.main(cmd_args) + return _wrapper(args) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/build_env.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/build_env.py index d744cc78..28d1ad68 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/build_env.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/build_env.py @@ -12,14 +12,15 @@ from sysconfig import get_paths from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet from pip import __file__ as pip_location -from pip._internal.utils.misc import call_subprocess -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner if MYPY_CHECK_RUNNING: - from typing import Tuple, Set, Iterable, Optional, List # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 + from types import TracebackType + from typing import Tuple, Set, Iterable, Optional, List, Type + from pip._internal.index.package_finder import PackageFinder logger = logging.getLogger(__name__) @@ -50,11 +51,12 @@ class BuildEnvironment(object): def __init__(self): # type: () -> None - self._temp_dir = TempDirectory(kind="build-env") - self._temp_dir.create() + temp_dir = TempDirectory( + kind=tempdir_kinds.BUILD_ENV, globally_managed=True + ) self._prefixes = OrderedDict(( - (name, _Prefix(os.path.join(self._temp_dir.path, name))) + (name, _Prefix(os.path.join(temp_dir.path, name))) for name in ('normal', 'overlay') )) @@ -73,7 +75,7 @@ class BuildEnvironment(object): get_python_lib(plat_specific=True), ) } - self._site_dir = os.path.join(self._temp_dir.path, 'site') + self._site_dir = os.path.join(temp_dir.path, 'site') if not os.path.exists(self._site_dir): os.mkdir(self._site_dir) with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: @@ -105,6 +107,7 @@ class BuildEnvironment(object): ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)) def __enter__(self): + # type: () -> None self._save_env = { name: os.environ.get(name, None) for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') @@ -123,17 +126,19 @@ class BuildEnvironment(object): 'PYTHONPATH': os.pathsep.join(pythonpath), }) - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None for varname, old_value in self._save_env.items(): if old_value is None: os.environ.pop(varname, None) else: os.environ[varname] = old_value - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() - def check_requirements(self, reqs): # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] """Return 2 sets: @@ -158,7 +163,7 @@ class BuildEnvironment(object): finder, # type: PackageFinder requirements, # type: Iterable[str] prefix_as_string, # type: str - message # type: Optional[str] + message # type: str ): # type: (...) -> None prefix = self._prefixes[prefix_as_string] @@ -177,22 +182,27 @@ class BuildEnvironment(object): formats = getattr(finder.format_control, format_control) args.extend(('--' + format_control.replace('_', '-'), ','.join(sorted(formats or {':none:'})))) - if finder.index_urls: - args.extend(['-i', finder.index_urls[0]]) - for extra_index in finder.index_urls[1:]: + + index_urls = finder.index_urls + if index_urls: + args.extend(['-i', index_urls[0]]) + for extra_index in index_urls[1:]: args.extend(['--extra-index-url', extra_index]) else: args.append('--no-index') for link in finder.find_links: args.extend(['--find-links', link]) - for _, host, _ in finder.secure_origins: + + for host in finder.trusted_hosts: args.extend(['--trusted-host', host]) if finder.allow_all_prereleases: args.append('--pre') + if finder.prefer_binary: + args.append('--prefer-binary') args.append('--') args.extend(requirements) with open_spinner(message) as spinner: - call_subprocess(args, show_stdout=False, spinner=spinner) + call_subprocess(args, spinner=spinner) class NoOpBuildEnvironment(BuildEnvironment): @@ -200,16 +210,32 @@ class NoOpBuildEnvironment(BuildEnvironment): """ def __init__(self): + # type: () -> None pass def __enter__(self): + # type: () -> None pass - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None pass def cleanup(self): + # type: () -> None pass - def install_requirements(self, finder, requirements, prefix, message): + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: str + ): + # type: (...) -> None raise NotImplementedError() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cache.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cache.py index eb295c4e..07db948b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cache.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cache.py @@ -1,27 +1,38 @@ """Cache Management """ -import errno import hashlib +import json import logging import os +from pip._vendor.packaging.tags import interpreter_name, interpreter_version from pip._vendor.packaging.utils import canonicalize_name -from pip._internal.download import path_to_url +from pip._internal.exceptions import InvalidWheelFilename from pip._internal.models.link import Link -from pip._internal.utils.compat import expanduser -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import InvalidWheelFilename, Wheel +from pip._internal.utils.urls import path_to_url if MYPY_CHECK_RUNNING: - from typing import Optional, Set, List, Any # noqa: F401 - from pip._internal.index import FormatControl # noqa: F401 + from typing import Optional, Set, List, Any, Dict + + from pip._vendor.packaging.tags import Tag + + from pip._internal.models.format_control import FormatControl logger = logging.getLogger(__name__) +def _hash_dict(d): + # type: (Dict[str, str]) -> str + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + class Cache(object): """An abstract class - provides cache directories for data from links @@ -36,16 +47,19 @@ class Cache(object): def __init__(self, cache_dir, format_control, allowed_formats): # type: (str, FormatControl, Set[str]) -> None super(Cache, self).__init__() - self.cache_dir = expanduser(cache_dir) if cache_dir else None + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None self.format_control = format_control self.allowed_formats = allowed_formats _valid_formats = {"source", "binary"} assert self.allowed_formats.union(_valid_formats) == _valid_formats - def _get_cache_path_parts(self, link): + def _get_cache_path_parts_legacy(self, link): # type: (Link) -> List[str] """Get parts of part that must be os.path.joined with cache_dir + + Legacy cache key (pip < 20) for compatibility with older caches. """ # We want to generate an url to use as our cache key, we don't want to @@ -69,30 +83,72 @@ class Cache(object): return parts - def _get_candidates(self, link, package_name): - # type: (Link, Optional[str]) -> List[Any] + def _get_cache_path_parts(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, canonical_package_name): + # type: (Link, str) -> List[Any] can_not_cache = ( not self.cache_dir or - not package_name or + not canonical_package_name or not link ) if can_not_cache: return [] - canonical_name = canonicalize_name(package_name) formats = self.format_control.get_allowed_formats( - canonical_name + canonical_package_name ) if not self.allowed_formats.intersection(formats): return [] - root = self.get_path_for_link(link) - try: - return os.listdir(root) - except OSError as err: - if err.errno in {errno.ENOENT, errno.ENOTDIR}: - return [] - raise + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + # TODO remove legacy path lookup in pip>=21 + legacy_path = self.get_path_for_link_legacy(link) + if os.path.isdir(legacy_path): + for candidate in os.listdir(legacy_path): + candidates.append((candidate, legacy_path)) + return candidates + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + raise NotImplementedError() def get_path_for_link(self, link): # type: (Link) -> str @@ -100,24 +156,18 @@ class Cache(object): """ raise NotImplementedError() - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link """Returns a link to a cached item if it exists, otherwise returns the passed link. """ raise NotImplementedError() - def _link_for_candidate(self, link, candidate): - # type: (Link, str) -> Link - root = self.get_path_for_link(link) - path = os.path.join(root, candidate) - - return Link(path_to_url(path)) - - def cleanup(self): - # type: () -> None - pass - class SimpleWheelCache(Cache): """A cache of wheels for future installs. @@ -129,6 +179,12 @@ class SimpleWheelCache(Cache): cache_dir, format_control, {"binary"} ) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + parts = self._get_cache_path_parts_legacy(link) + assert self.cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + def get_path_for_link(self, link): # type: (Link) -> str """Return a directory to store cached wheels for link @@ -146,28 +202,53 @@ class SimpleWheelCache(Cache): :param link: The link of the sdist for which this will cache wheels. """ parts = self._get_cache_path_parts(link) - + assert self.cache_dir # Store wheels within the root cache_dir return os.path.join(self.cache_dir, "wheels", *parts) - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link candidates = [] - for wheel_name in self._get_candidates(link, package_name): + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates( + link, canonical_package_name + ): try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue - if not wheel.supported(): + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, link, package_name, + ) + continue + if not wheel.supported(supported_tags): # Built for a different python/arch/etc continue - candidates.append((wheel.support_index_min(), wheel_name)) + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) if not candidates: return link - return self._link_for_candidate(link, min(candidates)[1]) + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) class EphemWheelCache(SimpleWheelCache): @@ -176,16 +257,24 @@ class EphemWheelCache(SimpleWheelCache): def __init__(self, format_control): # type: (FormatControl) -> None - self._temp_dir = TempDirectory(kind="ephem-wheel-cache") - self._temp_dir.create() + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) super(EphemWheelCache, self).__init__( self._temp_dir.path, format_control ) - def cleanup(self): - # type: () -> None - self._temp_dir.cleanup() + +class CacheEntry(object): + def __init__( + self, + link, # type: Link + persistent, # type: bool + ): + self.link = link + self.persistent = persistent class WheelCache(Cache): @@ -203,6 +292,10 @@ class WheelCache(Cache): self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link_legacy(link) + def get_path_for_link(self, link): # type: (Link) -> str return self._wheel_cache.get_path_for_link(link) @@ -211,14 +304,43 @@ class WheelCache(Cache): # type: (Link) -> str return self._ephem_cache.get_path_for_link(link) - def get(self, link, package_name): - # type: (Link, Optional[str]) -> Link - retval = self._wheel_cache.get(link, package_name) - if retval is link: - retval = self._ephem_cache.get(link, package_name) - return retval - - def cleanup(self): - # type: () -> None - self._wheel_cache.cleanup() - self._ephem_cache.cleanup() + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Optional[CacheEntry] + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/autocompletion.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/autocompletion.py index 0a04199e..329de602 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/autocompletion.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/autocompletion.py @@ -4,13 +4,19 @@ import optparse import os import sys +from itertools import chain from pip._internal.cli.main_parser import create_main_parser -from pip._internal.commands import commands_dict, get_summaries +from pip._internal.commands import commands_dict, create_command from pip._internal.utils.misc import get_installed_distributions +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Iterable, List, Optional def autocomplete(): + # type: () -> None """Entry Point for completion of main and subcommand options. """ # Don't complete if user hasn't sourced bash_completion file. @@ -23,17 +29,18 @@ def autocomplete(): except IndexError: current = '' - subcommands = [cmd for cmd, summary in get_summaries()] + parser = create_main_parser() + subcommands = list(commands_dict) options = [] - # subcommand - try: - subcommand_name = [w for w in cwords if w in subcommands][0] - except IndexError: - subcommand_name = None - parser = create_main_parser() + # subcommand + subcommand_name = None # type: Optional[str] + for word in cwords: + if word in subcommands: + subcommand_name = word + break # subcommand options - if subcommand_name: + if subcommand_name is not None: # special case: 'help' subcommand has no options if subcommand_name == 'help': sys.exit(1) @@ -54,7 +61,7 @@ def autocomplete(): print(dist) sys.exit(1) - subcommand = commands_dict[subcommand_name]() + subcommand = create_command(subcommand_name) for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: @@ -73,8 +80,8 @@ def autocomplete(): # get completion files and directories if ``completion_type`` is # ``<file>``, ``<dir>`` or ``<path>`` if completion_type: - options = auto_complete_paths(current, completion_type) - options = ((opt, 0) for opt in options) + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] for option in options: opt_label = option[0] # append '=' to options which require args @@ -86,22 +93,25 @@ def autocomplete(): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) - opts = (o for it in opts for o in it) + flattened_opts = chain.from_iterable(opts) if current.startswith('-'): - for opt in opts: + for opt in flattened_opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts else: # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, opts) + completion_type = get_path_completion_type(cwords, cword, + flattened_opts) if completion_type: - subcommands = auto_complete_paths(current, completion_type) + subcommands = list(auto_complete_paths(current, + completion_type)) print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1) def get_path_completion_type(cwords, cword, opts): + # type: (List[str], int, Iterable[Any]) -> Optional[str] """Get the type of path completion (``file``, ``dir``, ``path`` or None) :param cwords: same as the environmental variable ``COMP_WORDS`` @@ -110,7 +120,7 @@ def get_path_completion_type(cwords, cword, opts): :return: path completion type (``file``, ``dir``, ``path`` or None) """ if cword < 2 or not cwords[cword - 2].startswith('-'): - return + return None for opt in opts: if opt.help == optparse.SUPPRESS_HELP: continue @@ -120,9 +130,11 @@ def get_path_completion_type(cwords, cword, opts): x in ('path', 'file', 'dir') for x in opt.metavar.split('/')): return opt.metavar + return None def auto_complete_paths(current, completion_type): + # type: (str, str) -> Iterable[str] """If ``completion_type`` is ``file`` or ``path``, list all regular files and directories starting with ``current``; otherwise only list directories starting with ``current``. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/base_command.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/base_command.py index 3ceea499..c3b6a856 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/base_command.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/base_command.py @@ -1,4 +1,5 @@ """Base Command class, and related routines""" + from __future__ import absolute_import, print_function import logging @@ -10,65 +11,75 @@ import sys import traceback from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, ) from pip._internal.cli.status_codes import ( - ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, ) -from pip._internal.download import PipSession from pip._internal.exceptions import ( - BadCommand, CommandError, InstallationError, PreviousBuildDirError, + BadCommand, + CommandError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + SubProcessError, UninstallationError, ) -from pip._internal.index import PackageFinder -from pip._internal.locations import running_under_virtualenv -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, -) -from pip._internal.req.req_file import parse_requirements from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filesystem import check_path_owner from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging -from pip._internal.utils.misc import ( - get_prog, normalize_path, redact_password_from_url, +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import ( + global_tempdir_manager, + tempdir_registry, ) -from pip._internal.utils.outdated import pip_version_check from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv if MYPY_CHECK_RUNNING: - from typing import Optional, List, Tuple, Any # noqa: F401 - from optparse import Values # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.req.req_set import RequirementSet # noqa: F401 + from typing import List, Optional, Tuple, Any + from optparse import Values + + from pip._internal.utils.temp_dir import ( + TempDirectoryTypeRegistry as TempDirRegistry + ) __all__ = ['Command'] logger = logging.getLogger(__name__) -class Command(object): - name = None # type: Optional[str] - usage = None # type: Optional[str] - hidden = False # type: bool +class Command(CommandContextMixIn): + usage = None # type: str ignore_require_venv = False # type: bool - def __init__(self, isolated=False): - # type: (bool) -> None + def __init__(self, name, summary, isolated=False): + # type: (str, str, bool) -> None + super(Command, self).__init__() parser_kw = { 'usage': self.usage, - 'prog': '%s %s' % (get_prog(), self.name), + 'prog': '{} {}'.format(get_prog(), name), 'formatter': UpdatingDefaultsHelpFormatter(), 'add_help_option': False, - 'name': self.name, + 'name': name, 'description': self.__doc__, 'isolated': isolated, } + self.name = name + self.summary = summary self.parser = ConfigOptionParser(**parser_kw) + self.tempdir_registry = None # type: Optional[TempDirRegistry] + # Commands should add options to this option group - optgroup_name = '%s Options' % self.name.capitalize() + optgroup_name = '{} Options'.format(self.name.capitalize()) self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) # Add the general options @@ -78,54 +89,49 @@ class Command(object): ) self.parser.add_option_group(gen_opts) - def run(self, options, args): - # type: (Values, List[Any]) -> Any - raise NotImplementedError - - def _build_session(self, options, retries=None, timeout=None): - # type: (Values, Optional[int], Optional[int]) -> PipSession - session = PipSession( - cache=( - normalize_path(os.path.join(options.cache_dir, "http")) - if options.cache_dir else None - ), - retries=retries if retries is not None else options.retries, - insecure_hosts=options.trusted_hosts, - ) - - # Handle custom ca-bundles from the user - if options.cert: - session.verify = options.cert - - # Handle SSL client certificate - if options.client_cert: - session.cert = options.client_cert - - # Handle timeouts - if options.timeout or timeout: - session.timeout = ( - timeout if timeout is not None else options.timeout - ) + self.add_options() - # Handle configured proxies - if options.proxy: - session.proxies = { - "http": options.proxy, - "https": options.proxy, - } + def add_options(self): + # type: () -> None + pass - # Determine if we can prompt the user for authentication or not - session.auth.prompting = not options.no_input + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, 'no_index') - return session + def run(self, options, args): + # type: (Values, List[Any]) -> int + raise NotImplementedError def parse_args(self, args): - # type: (List[str]) -> Tuple + # type: (List[str]) -> Tuple[Any, Any] # factored out for testability return self.parser.parse_args(args) def main(self, args): # type: (List[str]) -> int + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args): + # type: (List[str]) -> int + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + options, args = self.parse_args(args) # Set verbosity so that it can be used elsewhere. @@ -137,23 +143,20 @@ class Command(object): user_log_file=options.log, ) - if sys.version_info[:2] == (3, 4): - deprecated( - "Python 3.4 support has been deprecated. pip 19.1 will be the " - "last one supporting it. Please upgrade your Python as Python " - "3.4 won't be maintained after March 2019 (cf PEP 429).", - replacement=None, - gone_in='19.2', - ) - elif sys.version_info[:2] == (2, 7): + if ( + sys.version_info[:2] == (2, 7) and + not options.no_python_version_warning + ): message = ( - "A future version of pip will drop support for Python 2.7." + "pip 21.0 will drop support for Python 2.7 in January 2021. " + "More details about Python 2 support in pip can be found at " + "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa ) if platform.python_implementation() == "CPython": message = ( - "Python 2.7 will reach the end of its life on January " + "Python 2.7 reached the end of its life on January " "1st, 2020. Please upgrade your Python as Python 2.7 " - "won't be maintained after that date. " + "is no longer maintained. " ) + message deprecated(message, replacement=None, gone_in=None) @@ -175,24 +178,57 @@ class Command(object): ) sys.exit(VIRTUALENV_NOT_FOUND) + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + if getattr(options, "build_dir", None): + deprecated( + reason=( + "The -b/--build/--build-dir/--build-directory " + "option is deprecated." + ), + replacement=( + "use the TMPDIR/TEMP/TMP environment variable, " + "possibly combined with --no-clean" + ), + gone_in="20.3", + issue=8333, + ) + + if 'resolver' in options.unstable_features: + logger.critical( + "--unstable-feature=resolver is no longer supported, and " + "has been replaced with --use-feature=2020-resolver instead." + ) + sys.exit(ERROR) + try: status = self.run(options, args) - # FIXME: all commands should return an exit status - # and when it is done, isinstance is not needed anymore - if isinstance(status, int): - return status + assert isinstance(status, int) + return status except PreviousBuildDirError as exc: logger.critical(str(exc)) logger.debug('Exception information:', exc_info=True) return PREVIOUS_BUILD_DIR_ERROR - except (InstallationError, UninstallationError, BadCommand) as exc: + except (InstallationError, UninstallationError, BadCommand, + SubProcessError, NetworkConnectionError) as exc: logger.critical(str(exc)) logger.debug('Exception information:', exc_info=True) return ERROR except CommandError as exc: - logger.critical('ERROR: %s', exc) + logger.critical('%s', exc) logger.debug('Exception information:', exc_info=True) return ERROR @@ -214,128 +250,4 @@ class Command(object): return UNKNOWN_ERROR finally: - allow_version_check = ( - # Does this command have the index_group options? - hasattr(options, "no_index") and - # Is this command allowed to perform this check? - not (options.disable_pip_version_check or options.no_index) - ) - # Check if we're using the latest version of pip available - if allow_version_check: - session = self._build_session( - options, - retries=0, - timeout=min(5, options.timeout) - ) - with session: - pip_version_check(session, options) - - # Shutdown the logging module - logging.shutdown() - - return SUCCESS - - -class RequirementCommand(Command): - - @staticmethod - def populate_requirement_set(requirement_set, # type: RequirementSet - args, # type: List[str] - options, # type: Values - finder, # type: PackageFinder - session, # type: PipSession - name, # type: str - wheel_cache # type: Optional[WheelCache] - ): - # type: (...) -> None - """ - Marshal cmd line args into a requirement set. - """ - # NOTE: As a side-effect, options.require_hashes and - # requirement_set.require_hashes may be updated - - for filename in options.constraints: - for req_to_add in parse_requirements( - filename, - constraint=True, finder=finder, options=options, - session=session, wheel_cache=wheel_cache): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in args: - req_to_add = install_req_from_line( - req, None, isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for req in options.editables: - req_to_add = install_req_from_editable( - req, - isolated=options.isolated_mode, - use_pep517=options.use_pep517, - wheel_cache=wheel_cache - ) - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - - for filename in options.requirements: - for req_to_add in parse_requirements( - filename, - finder=finder, options=options, session=session, - wheel_cache=wheel_cache, - use_pep517=options.use_pep517): - req_to_add.is_direct = True - requirement_set.add_requirement(req_to_add) - # If --require-hashes was a line in a requirements file, tell - # RequirementSet about it: - requirement_set.require_hashes = options.require_hashes - - if not (args or options.editables or options.requirements): - opts = {'name': name} - if options.find_links: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(maybe you meant "pip %(name)s %(links)s"?)' % - dict(opts, links=' '.join(options.find_links))) - else: - raise CommandError( - 'You must give at least one requirement to %(name)s ' - '(see "pip help %(name)s")' % opts) - - def _build_package_finder( - self, - options, # type: Values - session, # type: PipSession - platform=None, # type: Optional[str] - python_versions=None, # type: Optional[List[str]] - abi=None, # type: Optional[str] - implementation=None # type: Optional[str] - ): - # type: (...) -> PackageFinder - """ - Create a package finder appropriate to this requirement command. - """ - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug( - 'Ignoring indexes: %s', - ','.join(redact_password_from_url(url) for url in index_urls), - ) - index_urls = [] - - return PackageFinder( - find_links=options.find_links, - format_control=options.format_control, - index_urls=index_urls, - trusted_hosts=options.trusted_hosts, - allow_all_prereleases=options.pre, - session=session, - platform=platform, - versions=python_versions, - abi=abi, - implementation=implementation, - prefer_binary=options.prefer_binary, - ) + self.handle_pip_version_check(options) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/cmdoptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/cmdoptions.py index 5cf5ee97..ed42c5f5 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/cmdoptions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/cmdoptions.py @@ -5,31 +5,38 @@ The principle here is to define options once, but *not* instantiate them globally. One reason being that options with action='append' can carry state between parses. pip parses general options twice internally, and shouldn't pass on state. To be consistent, all options will follow this design. - """ + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import +import os import textwrap import warnings from distutils.util import strtobool from functools import partial from optparse import SUPPRESS_HELP, Option, OptionGroup +from textwrap import dedent +from pip._internal.cli.progress_bars import BAR_TYPES from pip._internal.exceptions import CommandError -from pip._internal.locations import USER_CACHE_DIR, src_prefix +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix from pip._internal.models.format_control import FormatControl from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython from pip._internal.utils.hashes import STRONG_HASHES from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import BAR_TYPES if MYPY_CHECK_RUNNING: - from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401 - from optparse import OptionParser, Values # noqa: F401 - from pip._internal.cli.parser import ConfigOptionParser # noqa: F401 + from typing import Any, Callable, Dict, Optional, Tuple + from optparse import OptionParser, Values + from pip._internal.cli.parser import ConfigOptionParser def raise_option_error(parser, option, msg): + # type: (OptionParser, Option, str) -> None """ Raise an option parsing error using parser.error(). @@ -68,14 +75,15 @@ def check_install_build_global(options, check_options=None): check_options = options def getname(n): + # type: (str) -> Optional[Any] return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control control.disallow_binaries() warnings.warn( - 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', stacklevel=2, + 'Disabling all use of wheels due to the use of --build-option ' + '/ --global-option / --install-option.', stacklevel=2, ) @@ -101,7 +109,7 @@ def check_dist_restriction(options, check_target=False): # Installations or downloads using dist restrictions must not combine # source distributions and dist-specific wheels, as they are not - # gauranteed to be locally compatible. + # guaranteed to be locally compatible. if dist_restriction_set and sdist_dependencies_allowed: raise CommandError( "When restricting platform and interpreter constraints using " @@ -119,6 +127,17 @@ def check_dist_restriction(options, check_target=False): ) +def _path_option_check(option, opt, value): + # type: (Option, str, str) -> str + return os.path.expanduser(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path",) + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["path"] = _path_option_check + + ########### # options # ########### @@ -206,10 +225,11 @@ progress_bar = partial( ) # type: Callable[..., Option] log = partial( - Option, + PipOption, "--log", "--log-file", "--local-log", dest="log", metavar="path", + type="path", help="Path to a verbose appending log." ) # type: Callable[..., Option] @@ -220,7 +240,7 @@ no_input = partial( dest='no_input', action='store_true', default=False, - help=SUPPRESS_HELP + help="Disable prompting for input." ) # type: Callable[..., Option] proxy = partial( @@ -252,16 +272,6 @@ timeout = partial( help='Set the socket timeout (default %default seconds).', ) # type: Callable[..., Option] -skip_requirements_regex = partial( - Option, - # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - def exists_action(): # type: () -> Option @@ -275,24 +285,24 @@ def exists_action(): action='append', metavar='action', help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).", + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", ) cert = partial( - Option, + PipOption, '--cert', dest='cert', - type='str', + type='path', metavar='path', help="Path to alternate CA bundle.", ) # type: Callable[..., Option] client_cert = partial( - Option, + PipOption, '--client-cert', dest='client_cert', - type='str', + type='path', default=None, metavar='path', help="Path to SSL client certificate, a single file containing the " @@ -305,7 +315,7 @@ index_url = partial( dest='index_url', metavar='URL', default=PyPI.simple_url, - help="Base URL of Python Package Index (default %default). " + help="Base URL of the Python Package Index (default %default). " "This should point to a repository compliant with PEP 503 " "(the simple repository API) or a local directory laid out " "in the same format.", @@ -313,6 +323,7 @@ index_url = partial( def extra_index_url(): + # type: () -> Option return Option( '--extra-index-url', dest='extra_index_urls', @@ -343,9 +354,11 @@ def find_links(): action='append', default=[], metavar='url', - help="If a url or path to an html file, then parse for links to " - "archives. If a local path or file:// url that's a directory, " - "then look for archives in the directory listing.", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", ) @@ -357,8 +370,8 @@ def trusted_host(): action="append", metavar="HOSTNAME", default=[], - help="Mark this host as trusted, even though it does not have valid " - "or any HTTPS.", + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", ) @@ -401,12 +414,21 @@ def editable(): ) +def _handle_src(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + src = partial( - Option, + PipOption, '--src', '--source', '--source-dir', '--source-directory', dest='src_dir', + type='path', metavar='dir', - default=src_prefix, + default=get_src_prefix(), + action='callback', + callback=_handle_src, help='Directory to check out editable projects into. ' 'The default in a virtualenv is "<venv path>/src". ' 'The default for global installs is "<current dir>/src".' @@ -442,12 +464,12 @@ def no_binary(): "--no-binary", dest="format_control", action="callback", callback=_handle_no_binary, type="str", default=format_control, - help="Do not use binary packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all binary packages, :none: to empty the set, or one or " - "more package names with commas between them. Note that some " - "packages are tricky to compile and may fail to install when " - "this option is used on them.", + help='Do not use binary packages. Can be supplied multiple times, and ' + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + 'the colons), or one or more package names with commas between ' + 'them (no colons). Note that some packages are tricky to compile ' + 'and may fail to install when this option is used on them.', ) @@ -458,12 +480,12 @@ def only_binary(): "--only-binary", dest="format_control", action="callback", callback=_handle_only_binary, type="str", default=format_control, - help="Do not use source packages. Can be supplied multiple times, and " - "each time adds to the existing value. Accepts either :all: to " - "disable all source packages, :none: to empty the set, or one or " - "more package names with commas between them. Packages without " - "binary distributions will fail to install when this option is " - "used on them.", + help='Do not use source packages. Can be supplied multiple times, and ' + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + 'or more package names with commas between them. Packages ' + 'without binary distributions will fail to install when this ' + 'option is used on them.', ) @@ -478,18 +500,69 @@ platform = partial( ) # type: Callable[..., Option] +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value): + # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split('.') + if len(parts) > 3: + return ((), 'at most three version parts are allowed') + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), 'each version part must be an integer') + + return (version_info, None) + + +def _handle_python_version(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = ( + 'invalid --python-version value: {!r}: {}'.format( + value, error_msg, + ) + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + python_version = partial( Option, '--python-version', dest='python_version', metavar='python_version', + action='callback', + callback=_handle_python_version, type='str', default=None, - help=("Only use wheels compatible with Python " - "interpreter version <version>. If not specified, then the " - "current system interpreter minor version is used. A major " - "version (e.g. '2') can be specified to match all " - "minor revs of that major version. A minor version " - "(e.g. '34') can also be specified."), + help=dedent("""\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """), ) # type: Callable[..., Option] @@ -522,6 +595,26 @@ abi = partial( ) # type: Callable[..., Option] +def add_target_python_options(cmd_opts): + # type: (OptionGroup) -> None + cmd_opts.add_option(platform()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abi()) + + +def make_target_python(options): + # type: (Values) -> TargetPython + target_python = TargetPython( + platform=options.platform, + py_version_info=options.python_version, + abi=options.abi, + implementation=options.implementation, + ) + + return target_python + + def prefer_binary(): # type: () -> Option return Option( @@ -534,16 +627,18 @@ def prefer_binary(): cache_dir = partial( - Option, + PipOption, "--cache-dir", dest="cache_dir", default=USER_CACHE_DIR, metavar="dir", + type='path', help="Store the cache data in <dir>." ) # type: Callable[..., Option] -def no_cache_dir_callback(option, opt, value, parser): +def _handle_no_cache_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-cache-dir option. @@ -575,7 +670,7 @@ no_cache = partial( "--no-cache-dir", dest="cache_dir", action="callback", - callback=no_cache_dir_callback, + callback=_handle_no_cache_dir, help="Disable the cache.", ) # type: Callable[..., Option] @@ -588,12 +683,24 @@ no_deps = partial( help="Don't install package dependencies.", ) # type: Callable[..., Option] + +def _handle_build_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + if value: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + build_dir = partial( - Option, + PipOption, '-b', '--build', '--build-dir', '--build-directory', dest='build_dir', + type='path', metavar='dir', - help='Directory to unpack packages into and build in. Note that ' + action='callback', + callback=_handle_build_dir, + help='(DEPRECATED) ' + 'Directory to unpack packages into and build in. Note that ' 'an initial build still takes place in a temporary directory. ' 'The location of temporary directories can be controlled by setting ' 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' @@ -620,7 +727,8 @@ no_build_isolation = partial( ) # type: Callable[..., Option] -def no_use_pep517_callback(option, opt, value, parser): +def _handle_no_use_pep517(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-use-pep517 option. @@ -658,7 +766,7 @@ no_use_pep517 = partial( '--no-use-pep517', dest='use_pep517', action='callback', - callback=no_use_pep517_callback, + callback=_handle_no_use_pep517, default=None, help=SUPPRESS_HELP ) # type: Any @@ -714,31 +822,21 @@ disable_pip_version_check = partial( ) # type: Callable[..., Option] -# Deprecated, Remove later -always_unzip = partial( - Option, - '-Z', '--always-unzip', - dest='always_unzip', - action='store_true', - help=SUPPRESS_HELP, -) # type: Callable[..., Option] - - -def _merge_hash(option, opt_str, value, parser): +def _handle_merge_hash(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None """Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.""" if not parser.values.hashes: - parser.values.hashes = {} # type: ignore + parser.values.hashes = {} try: algo, digest = value.split(':', 1) except ValueError: - parser.error('Arguments to %s must be a hash name ' - 'followed by a value, like --hash=sha256:abcde...' % - opt_str) + parser.error('Arguments to {} must be a hash name ' # noqa + 'followed by a value, like --hash=sha256:' + 'abcde...'.format(opt_str)) if algo not in STRONG_HASHES: - parser.error('Allowed hash algorithms for %s are %s.' % - (opt_str, ', '.join(STRONG_HASHES))) + parser.error('Allowed hash algorithms for {} are {}.'.format( # noqa + opt_str, ', '.join(STRONG_HASHES))) parser.values.hashes.setdefault(algo, []).append(digest) @@ -749,7 +847,7 @@ hash = partial( # __dict__ copying in process_line(). dest='hashes', action='callback', - callback=_merge_hash, + callback=_handle_merge_hash, type='string', help="Verify that the package's archive matches this " 'hash before installing. Example: --hash=sha256:abcdef...', @@ -768,6 +866,71 @@ require_hashes = partial( ) # type: Callable[..., Option] +list_path = partial( + PipOption, + '--path', + dest='path', + type='path', + action='append', + help='Restrict to the specified installation path for listing ' + 'packages (can be used multiple times).' +) # type: Callable[..., Option] + + +def check_list_path_option(options): + # type: (Values) -> None + if options.path and (options.user or options.local): + raise CommandError( + "Cannot combine '--path' with '--user' or '--local'" + ) + + +no_python_version_warning = partial( + Option, + '--no-python-version-warning', + dest='no_python_version_warning', + action='store_true', + default=False, + help='Silence deprecation warnings for upcoming unsupported Pythons.', +) # type: Callable[..., Option] + + +unstable_feature = partial( + Option, + '--unstable-feature', + dest='unstable_features', + metavar='feature', + action='append', + default=[], + choices=['resolver'], + help=SUPPRESS_HELP, # TODO: drop this in pip 20.3 +) # type: Callable[..., Option] + +use_new_feature = partial( + Option, + '--use-feature', + dest='features_enabled', + metavar='feature', + action='append', + default=[], + choices=['2020-resolver', 'fast-deps'], + help='Enable new functionality, that may be backward incompatible.', +) # type: Callable[..., Option] + +use_deprecated_feature = partial( + Option, + '--use-deprecated', + dest='deprecated_features_enabled', + metavar='feature', + action='append', + default=[], + choices=[], + help=( + 'Enable deprecated functionality, that will be removed in the future.' + ), +) # type: Callable[..., Option] + + ########## # groups # ########## @@ -786,7 +949,6 @@ general_group = { proxy, retries, timeout, - skip_requirements_regex, exists_action, trusted_host, cert, @@ -795,6 +957,10 @@ general_group = { no_cache, disable_pip_version_check, no_color, + no_python_version_warning, + unstable_feature, + use_new_feature, + use_deprecated_feature, ] } # type: Dict[str, Any] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/command_context.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 00000000..d1a64a77 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,36 @@ +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, ContextManager, TypeVar + + _T = TypeVar('_T', covariant=True) + + +class CommandContextMixIn(object): + def __init__(self): + # type: () -> None + super(CommandContextMixIn, self).__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self): + # type: () -> Iterator[None] + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider): + # type: (ContextManager[_T]) -> _T + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main.py new file mode 100644 index 00000000..172f30dd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,75 @@ +"""Primary application entrypoint. +""" +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + +def main(args=None): + # type: (Optional[List[str]]) -> int + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write("ERROR: {}".format(exc)) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main_parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main_parser.py index b17c7492..08c82c1f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main_parser.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/main_parser.py @@ -4,20 +4,18 @@ import os import sys -from pip import __version__ from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ( - ConfigOptionParser, UpdatingDefaultsHelpFormatter, -) -from pip._internal.commands import ( - commands_dict, get_similar_commands, get_summaries, + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, ) +from pip._internal.commands import commands_dict, get_similar_commands from pip._internal.exceptions import CommandError -from pip._internal.utils.misc import get_prog +from pip._internal.utils.misc import get_pip_version, get_prog from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Tuple, List # noqa: F401 + from typing import Tuple, List __all__ = ["create_main_parser", "parse_command"] @@ -39,12 +37,7 @@ def create_main_parser(): parser = ConfigOptionParser(**parser_kw) parser.disable_interspersed_args() - pip_pkg_dir = os.path.abspath(os.path.join( - os.path.dirname(__file__), "..", "..", - )) - parser.version = 'pip %s from %s (python %s)' % ( - __version__, pip_pkg_dir, sys.version[:3], - ) + parser.version = get_pip_version() # add the general options gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) @@ -54,8 +47,10 @@ def create_main_parser(): parser.main = True # type: ignore # create command listing for description - command_summaries = get_summaries() - description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + description = [''] + [ + '{name:27} {command_info.summary}'.format(**locals()) + for name, command_info in commands_dict.items() + ] parser.description = '\n'.join(description) return parser @@ -91,9 +86,9 @@ def parse_command(args): if cmd_name not in commands_dict: guess = get_similar_commands(cmd_name) - msg = ['unknown command "%s"' % cmd_name] + msg = ['unknown command "{}"'.format(cmd_name)] if guess: - msg.append('maybe you meant "%s"' % guess) + msg.append('maybe you meant "{}"'.format(guess)) raise CommandError(' - '.join(msg)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/parser.py index e1eaac42..04e00b72 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/parser.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/parser.py @@ -1,4 +1,8 @@ """Base option parser setup""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -27,14 +31,14 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) def format_option_strings(self, option): - return self._format_option_strings(option, ' <%s>', ', ') + return self._format_option_strings(option) - def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '): """ Return a comma-separated list of option strings and metavars. :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') - :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param mvarfmt: metavar format string :param optsep: separator """ opts = [] @@ -48,7 +52,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): if option.takes_value(): metavar = option.metavar or option.dest.lower() - opts.append(mvarfmt % metavar.lower()) + opts.append(mvarfmt.format(metavar.lower())) return ''.join(opts) @@ -62,7 +66,8 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): Ensure there is only one newline between usage and the first heading if there is no description. """ - msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + msg = '\nUsage: {}\n'.format( + self.indent_lines(textwrap.dedent(usage), " ")) return msg def format_description(self, description): @@ -78,7 +83,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter): description = description.rstrip() # dedent, then reindent description = self.indent_lines(textwrap.dedent(description), " ") - description = '%s:\n%s\n' % (label, description) + description = '{}:\n{}\n'.format(label, description) return description else: return '' @@ -146,7 +151,7 @@ class ConfigOptionParser(CustomOptionParser): try: return option.check_value(key, val) except optparse.OptionValueError as exc: - print("An error occurred during configuration: %s" % exc) + print("An error occurred during configuration: {}".format(exc)) sys.exit(3) def _get_ordered_configuration_items(self): @@ -245,7 +250,7 @@ class ConfigOptionParser(CustomOptionParser): def error(self, msg): self.print_usage(sys.stderr) - self.exit(UNKNOWN_ERROR, "%s\n" % msg) + self.exit(UNKNOWN_ERROR, "{}\n".format(msg)) def invalid_config_error_message(action, key, val): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/progress_bars.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 00000000..69338552 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,280 @@ +from __future__ import division + +import itertools +import sys +from signal import SIGINT, default_int_handler, signal + +from pip._vendor import six +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, List + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +def _select_progress_class(preferred, fallback): + # type: (Bar, Bar) -> Bar + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + """ + Save the original SIGINT handler for later. + """ + # https://github.com/python/mypy/issues/5887 + super(InterruptibleMixin, self).__init__( # type: ignore + *args, + **kwargs + ) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + # type: () -> None + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() # type: ignore + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): # type: ignore + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + + def update(self): + # type: () -> None + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # https://github.com/python/mypy/issues/5887 + super(DownloadProgressMixin, self).__init__( # type: ignore + *args, + **kwargs + ) + self.message = (" " * ( + get_indentation() + 2 + )) + self.message # type: str + + @property + def downloaded(self): + # type: () -> str + return format_size(self.index) # type: ignore + + @property + def download_speed(self): + # type: () -> str + # Avoid zero division errors... + if self.avg == 0.0: # type: ignore + return "..." + return format_size(1 / self.avg) + "/s" # type: ignore + + @property + def pretty_eta(self): + # type: () -> str + if self.eta: # type: ignore + return "eta {}".format(self.eta_td) # type: ignore + return "" + + def iter(self, it): # type: ignore + for x in it: + yield x + # B305 is incorrectly raised here + # https://github.com/PyCQA/flake8-bugbear/issues/59 + self.next(len(x)) # noqa: B305 + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: # type: ignore + self.hide_cursor = False + + # https://github.com/python/mypy/issues/5887 + super(WindowsMixin, self).__init__(*args, **kwargs) # type: ignore + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) # type: ignore + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, + _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): + pass + + +class DownloadBar(BaseDownloadProgressBar, + Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, + FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, + BlueEmojiBar): + pass + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + # type: () -> str + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + # type: () -> None + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) +} + + +def DownloadProgressProvider(progress_bar, max=None): # type: ignore + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/req_command.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 00000000..78b5ce6a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,402 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +from functools import partial + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.download import Downloader +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.temp_dir import tempdir_kinds +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, List, Optional, Tuple + + from pip._internal.cache import WheelCache + from pip._internal.models.target_python import TargetPython + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.resolution.base import BaseResolver + from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + ) + + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + def __init__(self): + # type: () -> None + super(SessionCommandMixin, self).__init__() + self._session = None # Optional[PipSession] + + @classmethod + def _get_index_urls(cls, options): + # type: (Values) -> Optional[List[str]] + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options): + # type: (Values) -> PipSession + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session(self, options, retries=None, timeout=None): + # type: (Values, Optional[int], Optional[int]) -> PipSession + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, 'no_index') + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def with_cleanup(func): + # type: (Any) -> Any + """Decorator for common logic related to managing temporary + directories. + """ + def configure_tempdir_registry(registry): + # type: (TempDirectoryTypeRegistry) -> None + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper(self, options, args): + # type: (RequirementCommand, Values, List[Any]) -> Optional[int] + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + + def __init__(self, *args, **kw): + # type: (Any, Any) -> None + super(RequirementCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def make_requirement_preparer( + temp_build_dir, # type: TempDirectory + options, # type: Values + req_tracker, # type: RequirementTracker + session, # type: PipSession + finder, # type: PackageFinder + use_user_site, # type: bool + download_dir=None, # type: str + wheel_download_dir=None, # type: str + ): + # type: (...) -> RequirementPreparer + """ + Create a RequirementPreparer instance for the given parameters. + """ + downloader = Downloader(session, progress_bar=options.progress_bar) + + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + wheel_download_dir=wheel_download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + downloader=downloader, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + ) + + @staticmethod + def make_resolver( + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + options, # type: Values + wheel_cache=None, # type: Optional[WheelCache] + use_user_site=False, # type: bool + ignore_installed=True, # type: bool + ignore_requires_python=False, # type: bool + force_reinstall=False, # type: bool + upgrade_strategy="to-satisfy-only", # type: str + use_pep517=None, # type: Optional[bool] + py_version_info=None # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> BaseResolver + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if '2020-resolver' in options.features_enabled: + import pip._internal.resolution.resolvelib.resolver + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + lazy_wheel='fast-deps' in options.features_enabled, + ) + import pip._internal.resolution.legacy.resolver + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args, # type: List[str] + options, # type: Values + finder, # type: PackageFinder + session, # type: PipSession + ): + # type: (...) -> List[InstallRequirement] + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements = [] # type: List[InstallRequirement] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, None, isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, + finder=finder, options=options, session=session): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {'name': self.name} + if options.find_links: + raise CommandError( + 'You must give at least one requirement to {name} ' + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=' '.join(options.find_links)))) + else: + raise CommandError( + 'You must give at least one requirement to {name} ' + '(see "pip help {name}")'.format(**opts)) + + return requirements + + @staticmethod + def trace_basic_info(finder): + # type: (PackageFinder) -> None + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options, # type: Values + session, # type: PipSession + target_python=None, # type: Optional[TargetPython] + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> PackageFinder + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/spinners.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 00000000..c6c4c5cd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,173 @@ +from __future__ import absolute_import, division + +import contextlib +import itertools +import logging +import sys +import time + +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, IO + +logger = logging.getLogger(__name__) + + +class SpinnerInterface(object): + def spin(self): + # type: () -> None + raise NotImplementedError() + + def finish(self, final_status): + # type: (str) -> None + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + # type: (str, IO[str], str, float) -> None + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + # type: (str) -> None + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message, min_update_interval_seconds=60): + # type: (str, float) -> None + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + # type: (str) -> None + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._update( + "finished with status '{final_status}'".format(**locals())) + self._finished = True + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + # type: (float) -> None + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 # type: float + + def ready(self): + # type: () -> bool + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + # type: () -> None + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message): + # type: (str) -> Iterator[SpinnerInterface] + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) # type: SpinnerInterface + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +@contextlib.contextmanager +def hidden_cursor(file): + # type: (IO[str]) -> Iterator[None] + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/__init__.py index c7d1da3d..6825fa6e 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/__init__.py @@ -1,57 +1,111 @@ """ Package containing all pip commands """ + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False +# There is currently a bug in python/typeshed mentioned at +# https://github.com/python/typeshed/issues/3906 which causes the +# return type of difflib.get_close_matches to be reported +# as List[Sequence[str]] whereas it should have been List[str] + from __future__ import absolute_import -from pip._internal.commands.completion import CompletionCommand -from pip._internal.commands.configuration import ConfigurationCommand -from pip._internal.commands.download import DownloadCommand -from pip._internal.commands.freeze import FreezeCommand -from pip._internal.commands.hash import HashCommand -from pip._internal.commands.help import HelpCommand -from pip._internal.commands.list import ListCommand -from pip._internal.commands.check import CheckCommand -from pip._internal.commands.search import SearchCommand -from pip._internal.commands.show import ShowCommand -from pip._internal.commands.install import InstallCommand -from pip._internal.commands.uninstall import UninstallCommand -from pip._internal.commands.wheel import WheelCommand +import importlib +from collections import OrderedDict, namedtuple from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Type # noqa: F401 - from pip._internal.cli.base_command import Command # noqa: F401 - -commands_order = [ - InstallCommand, - DownloadCommand, - UninstallCommand, - FreezeCommand, - ListCommand, - ShowCommand, - CheckCommand, - ConfigurationCommand, - SearchCommand, - WheelCommand, - HashCommand, - CompletionCommand, - HelpCommand, -] # type: List[Type[Command]] - -commands_dict = {c.name: c for c in commands_order} - - -def get_summaries(ordered=True): - """Yields sorted (command name, command summary) tuples.""" - - if ordered: - cmditems = _sort_commands(commands_dict, commands_order) - else: - cmditems = commands_dict.items() - - for name, command_class in cmditems: - yield (name, command_class.summary) + from typing import Any + from pip._internal.cli.base_command import Command + + +CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + +# The ordering matters for help display. +# Also, even though the module path starts with the same +# "pip._internal.commands" prefix in each case, we include the full path +# because it makes testing easier (specifically when modifying commands_dict +# in test setup / teardown by adding info for a FakeCommand class defined +# in a test-related module). +# Finally, we need to pass an iterable of pairs here rather than a dict +# so that the ordering won't be lost when using Python 2.7. +commands_dict = OrderedDict([ + ('install', CommandInfo( + 'pip._internal.commands.install', 'InstallCommand', + 'Install packages.', + )), + ('download', CommandInfo( + 'pip._internal.commands.download', 'DownloadCommand', + 'Download packages.', + )), + ('uninstall', CommandInfo( + 'pip._internal.commands.uninstall', 'UninstallCommand', + 'Uninstall packages.', + )), + ('freeze', CommandInfo( + 'pip._internal.commands.freeze', 'FreezeCommand', + 'Output installed packages in requirements format.', + )), + ('list', CommandInfo( + 'pip._internal.commands.list', 'ListCommand', + 'List installed packages.', + )), + ('show', CommandInfo( + 'pip._internal.commands.show', 'ShowCommand', + 'Show information about installed packages.', + )), + ('check', CommandInfo( + 'pip._internal.commands.check', 'CheckCommand', + 'Verify installed packages have compatible dependencies.', + )), + ('config', CommandInfo( + 'pip._internal.commands.configuration', 'ConfigurationCommand', + 'Manage local and global configuration.', + )), + ('search', CommandInfo( + 'pip._internal.commands.search', 'SearchCommand', + 'Search PyPI for packages.', + )), + ('cache', CommandInfo( + 'pip._internal.commands.cache', 'CacheCommand', + "Inspect and manage pip's wheel cache.", + )), + ('wheel', CommandInfo( + 'pip._internal.commands.wheel', 'WheelCommand', + 'Build wheels from your requirements.', + )), + ('hash', CommandInfo( + 'pip._internal.commands.hash', 'HashCommand', + 'Compute hashes of package archives.', + )), + ('completion', CommandInfo( + 'pip._internal.commands.completion', 'CompletionCommand', + 'A helper command used for command completion.', + )), + ('debug', CommandInfo( + 'pip._internal.commands.debug', 'DebugCommand', + 'Show information useful for debugging.', + )), + ('help', CommandInfo( + 'pip._internal.commands.help', 'HelpCommand', + 'Show help for commands.', + )), +]) # type: OrderedDict[str, CommandInfo] + + +def create_command(name, **kwargs): + # type: (str, **Any) -> Command + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command def get_similar_commands(name): @@ -66,14 +120,3 @@ def get_similar_commands(name): return close_commands[0] else: return False - - -def _sort_commands(cmddict, order): - def keyfn(key): - try: - return order.index(key[1]) - except ValueError: - # unordered items should come last - return 0xff - - return sorted(cmddict.items(), key=keyfn) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/cache.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 00000000..747277f6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,182 @@ +from __future__ import absolute_import + +import logging +import os +import textwrap + +import pip._internal.utils.filesystem as filesystem +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, List + + +logger = logging.getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ``<pattern>`` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [<pattern>] + %prog remove <pattern> + %prog purge + """ + + def run(self, options, args): + # type: (Values, List[Any]) -> int + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not " + "function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + logger.info(options.cache_dir) + + def get_cache_info(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + num_packages = len(self._find_wheels(options, '*')) + + cache_location = self._wheels_cache_dir(options) + cache_size = filesystem.format_directory_size(cache_location) + + message = textwrap.dedent(""" + Location: {location} + Size: {size} + Number of wheels: {package_count} + """).format( + location=cache_location, + package_count=num_packages, + size=cache_size, + ).strip() + + logger.info(message) + + def list_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if args: + pattern = args[0] + else: + pattern = '*' + + files = self._find_wheels(options, pattern) + + if not files: + logger.info('Nothing cached.') + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(' - {} ({})'.format(wheel, size)) + logger.info('Cache contents:\n') + logger.info('\n'.join(sorted(results))) + + def remove_cache_items(self, options, args): + # type: (Values, List[Any]) -> None + if len(args) > 1: + raise CommandError('Too many arguments') + + if not args: + raise CommandError('Please provide a pattern') + + files = self._find_wheels(options, args[0]) + if not files: + raise CommandError('No matching packages') + + for filename in files: + os.unlink(filename) + logger.debug('Removed %s', filename) + logger.info('Files removed: %s', len(files)) + + def purge_cache(self, options, args): + # type: (Values, List[Any]) -> None + if args: + raise CommandError('Too many arguments') + + return self.remove_cache_items(options, ['*']) + + def _wheels_cache_dir(self, options): + # type: (Values) -> str + return os.path.join(options.cache_dir, 'wheels') + + def _find_wheels(self, options, pattern): + # type: (Values, str) -> List[str] + wheel_dir = self._wheels_cache_dir(options) + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/check.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/check.py index 801cecc0..b557ca64 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/check.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/check.py @@ -1,28 +1,37 @@ import logging from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.operations.check import ( - check_package_set, create_package_set_from_installed, + check_package_set, + create_package_set_from_installed, ) +from pip._internal.utils.misc import write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING logger = logging.getLogger(__name__) +if MYPY_CHECK_RUNNING: + from typing import List, Any + from optparse import Values + class CheckCommand(Command): """Verify installed packages have compatible dependencies.""" - name = 'check' + usage = """ %prog [options]""" - summary = 'Verify installed packages have compatible dependencies.' def run(self, options, args): + # type: (Values, List[Any]) -> int + package_set, parsing_probs = create_package_set_from_installed() missing, conflicting = check_package_set(package_set) for project_name in missing: version = package_set[project_name].version for dependency in missing[project_name]: - logger.info( + write_output( "%s %s requires %s, which is not installed.", project_name, version, dependency[0], ) @@ -30,12 +39,13 @@ class CheckCommand(Command): for project_name in conflicting: version = package_set[project_name].version for dep_name, dep_version, req in conflicting[project_name]: - logger.info( + write_output( "%s %s has requirement %s, but you have %s %s.", project_name, version, req, dep_name, dep_version, ) if missing or conflicting or parsing_probs: - return 1 + return ERROR else: - logger.info("No broken requirements found.") + write_output("No broken requirements found.") + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/completion.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/completion.py index 2fcdd393..9b99f51f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/completion.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/completion.py @@ -4,32 +4,38 @@ import sys import textwrap from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS from pip._internal.utils.misc import get_prog +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + from optparse import Values BASE_COMPLETION = """ -# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +# pip {shell} completion start{script}# pip {shell} completion end """ COMPLETION_SCRIPTS = { 'bash': """ _pip_completion() - { - COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ COMP_CWORD=$COMP_CWORD \\ - PIP_AUTO_COMPLETE=1 $1 ) ) - } - complete -o default -F _pip_completion %(prog)s + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} """, 'zsh': """ - function _pip_completion { + function _pip_completion {{ local words cword read -Ac words read -cn cword reply=( $( COMP_WORDS="$words[*]" \\ COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] ) ) - } - compctl -K _pip_completion %(prog)s + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + }} + compctl -K _pip_completion {prog} """, 'fish': """ function __fish_complete_pip @@ -40,55 +46,53 @@ COMPLETION_SCRIPTS = { set -lx PIP_AUTO_COMPLETE 1 string split \\ -- (eval $COMP_WORDS[1]) end - complete -fa "(__fish_complete_pip)" -c %(prog)s + complete -fa "(__fish_complete_pip)" -c {prog} """, } class CompletionCommand(Command): """A helper command to be used for command completion.""" - name = 'completion' - summary = 'A helper command used for command completion.' - ignore_require_venv = True - def __init__(self, *args, **kw): - super(CompletionCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts + ignore_require_venv = True - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '--bash', '-b', action='store_const', const='bash', dest='shell', help='Emit completion code for bash') - cmd_opts.add_option( + self.cmd_opts.add_option( '--zsh', '-z', action='store_const', const='zsh', dest='shell', help='Emit completion code for zsh') - cmd_opts.add_option( + self.cmd_opts.add_option( '--fish', '-f', action='store_const', const='fish', dest='shell', help='Emit completion code for fish') - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int """Prints the completion code of the given shell""" shells = COMPLETION_SCRIPTS.keys() shell_options = ['--' + shell for shell in sorted(shells)] if options.shell in shells: script = textwrap.dedent( - COMPLETION_SCRIPTS.get(options.shell, '') % { - 'prog': get_prog(), - } + COMPLETION_SCRIPTS.get(options.shell, '').format( + prog=get_prog()) ) - print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS else: sys.stderr.write( - 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + 'ERROR: You must pass {}\n' .format(' or '.join(shell_options)) ) + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/configuration.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/configuration.py index 826c08dc..f9b3ab79 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/configuration.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/configuration.py @@ -4,32 +4,45 @@ import subprocess from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS -from pip._internal.configuration import Configuration, kinds +from pip._internal.configuration import ( + Configuration, + get_configuration_files, + kinds, +) from pip._internal.exceptions import PipError -from pip._internal.locations import venv_config_file -from pip._internal.utils.misc import get_prog +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Any, Optional + from optparse import Values + + from pip._internal.configuration import Kind logger = logging.getLogger(__name__) class ConfigurationCommand(Command): - """Manage local and global configuration. + """ + Manage local and global configuration. - Subcommands: + Subcommands: - list: List the active configuration (or from the file specified) - edit: Edit the configuration file in an editor - get: Get the value associated with name - set: Set the name=value - unset: Unset the value associated with name + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name + - debug: List the configuration files and values defined under them - If none of --user, --global and --venv are passed, a virtual - environment configuration file is used if one is active and the file - exists. Otherwise, all modifications happen on the to the user file by - default. + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen on the to the user file by + default. """ - name = 'config' + ignore_require_venv = True usage = """ %prog [<file-option>] list %prog [<file-option>] [--editor <editor-path>] edit @@ -37,15 +50,11 @@ class ConfigurationCommand(Command): %prog [<file-option>] get name %prog [<file-option>] set name value %prog [<file-option>] unset name + %prog [<file-option>] debug """ - summary = "Manage local and global configuration." - - def __init__(self, *args, **kwargs): - super(ConfigurationCommand, self).__init__(*args, **kwargs) - - self.configuration = None - + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '--editor', dest='editor', @@ -74,28 +83,31 @@ class ConfigurationCommand(Command): ) self.cmd_opts.add_option( - '--venv', - dest='venv_file', + '--site', + dest='site_file', action='store_true', default=False, - help='Use the virtualenv configuration file only' + help='Use the current environment configuration file only' ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int handlers = { "list": self.list_values, "edit": self.open_in_editor, "get": self.get_name, "set": self.set_name_value, - "unset": self.unset_name + "unset": self.unset_name, + "debug": self.list_config_values, } # Determine action if not args or args[0] not in handlers: - logger.error("Need an action ({}) to perform.".format( - ", ".join(sorted(handlers))) + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), ) return ERROR @@ -127,54 +139,97 @@ class ConfigurationCommand(Command): return SUCCESS def _determine_file(self, options, need_value): - file_options = { - kinds.USER: options.user_file, - kinds.GLOBAL: options.global_file, - kinds.VENV: options.venv_file - } - - if sum(file_options.values()) == 0: + # type: (Values, bool) -> Optional[Kind] + file_options = [key for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) if value] + + if not file_options: if not need_value: return None - # Default to user, unless there's a virtualenv file. - elif os.path.exists(venv_config_file): - return kinds.VENV + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE else: return kinds.USER - elif sum(file_options.values()) == 1: - # There's probably a better expression for this. - return [key for key in file_options if file_options[key]][0] + elif len(file_options) == 1: + return file_options[0] raise PipError( "Need exactly one file to operate upon " - "(--user, --venv, --global) to perform." + "(--user, --site, --global) to perform." ) def list_values(self, options, args): + # type: (Values, List[str]) -> None self._get_n_args(args, "list", n=0) for key, value in sorted(self.configuration.items()): - logger.info("%s=%r", key, value) + write_output("%s=%r", key, value) def get_name(self, options, args): + # type: (Values, List[str]) -> None key = self._get_n_args(args, "get [name]", n=1) value = self.configuration.get_value(key) - logger.info("%s", value) + write_output("%s", value) def set_name_value(self, options, args): + # type: (Values, List[str]) -> None key, value = self._get_n_args(args, "set [name] [value]", n=2) self.configuration.set_value(key, value) self._save_configuration() def unset_name(self, options, args): + # type: (Values, List[str]) -> None key = self._get_n_args(args, "unset [name]", n=1) self.configuration.unset_value(key) self._save_configuration() + def list_config_values(self, options, args): + # type: (Values, List[str]) -> None + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", + fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant): + # type: (Kind) -> None + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.\ + get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self): + # type: () -> None + """Get key-values pairs present as environment variables""" + write_output("%s:", 'env_var') + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = 'PIP_{}'.format(key.upper()) + write_output("%s=%r", env_var, value) + def open_in_editor(self, options, args): + # type: (Values, List[str]) -> None editor = self._determine_editor(options) fname = self.configuration.get_file_to_edit() @@ -190,6 +245,7 @@ class ConfigurationCommand(Command): ) def _get_n_args(self, args, example, n): + # type: (List[str], str, int) -> Any """Helper to make sure the command got the right number of arguments """ if len(args) != n: @@ -205,18 +261,19 @@ class ConfigurationCommand(Command): return args def _save_configuration(self): + # type: () -> None # We successfully ran a modifying command. Need to save the # configuration. try: self.configuration.save() except Exception: - logger.error( - "Unable to save configuration. Please report this as a bug.", - exc_info=1 + logger.exception( + "Unable to save configuration. Please report this as a bug." ) raise PipError("Internal Error.") def _determine_editor(self, options): + # type: (Values) -> str if options.editor is not None: return options.editor elif "VISUAL" in os.environ: diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/debug.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 00000000..ff369d7d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,229 @@ +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +import pip._vendor +from pip._vendor import pkg_resources +from pip._vendor.certifi import where + +from pip import __file__ as pip_location +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from types import ModuleType + from typing import List, Optional, Dict + from optparse import Values + from pip._internal.configuration import Configuration + +logger = logging.getLogger(__name__) + + +def show_value(name, value): + # type: (str, Optional[str]) -> None + logger.info('%s: %s', name, value) + + +def show_sys_implementation(): + # type: () -> None + logger.info('sys.implementation:') + if hasattr(sys, 'implementation'): + implementation = sys.implementation # type: ignore + implementation_name = implementation.name + else: + implementation_name = '' + + with indent_log(): + show_value('name', implementation_name) + + +def create_vendor_txt_map(): + # type: () -> Dict[str, str] + vendor_txt_path = os.path.join( + os.path.dirname(pip_location), + '_vendor', + 'vendor.txt' + ) + + with open(vendor_txt_path) as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [line.strip().split(' ', 1)[0] + for line in f.readlines() if '==' in line] + + # Transform into "module" -> version dict. + return dict(line.split('==', 1) for line in lines) # type: ignore + + +def get_module_from_module_name(module_name): + # type: (str) -> ModuleType + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower() + # PATCH: setuptools is actually only pkg_resources. + if module_name == 'setuptools': + module_name = 'pkg_resources' + + __import__( + 'pip._vendor.{}'.format(module_name), + globals(), + locals(), + level=0 + ) + return getattr(pip._vendor, module_name) + + +def get_vendor_version_from_module(module_name): + # type: (str) -> Optional[str] + module = get_module_from_module_name(module_name) + version = getattr(module, '__version__', None) + + if not version: + # Try to find version in debundled module info + # The type for module.__file__ is Optional[str] in + # Python 2, and str in Python 3. The type: ignore is + # added to account for Python 2, instead of a cast + # and should be removed once we drop Python 2 support + pkg_set = pkg_resources.WorkingSet( + [os.path.dirname(module.__file__)] # type: ignore + ) + package = pkg_set.find(pkg_resources.Requirement.parse(module_name)) + version = getattr(package, 'version', None) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions): + # type: (Dict[str, str]) -> None + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = '' + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ' (Unable to locate actual module version, using'\ + ' vendor.txt specified version)' + actual_version = expected_version + elif actual_version != expected_version: + extra_message = ' (CONFLICT: vendor.txt suggests version should'\ + ' be {})'.format(expected_version) + logger.info('%s==%s%s', module_name, actual_version, extra_message) + + +def show_vendor_versions(): + # type: () -> None + logger.info('vendored library versions:') + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options): + # type: (Values) -> None + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = '' + if formatted_target: + suffix = ' (target: {})'.format(formatted_target) + + msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + '...\n' + '[First {tag_limit} tags shown. Pass --verbose to show all.]' + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config): + # type: (Configuration) -> str + levels = set() + for key, _ in config.items(): + levels.add(key.split('.')[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ['install', 'wheel', 'download'] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return 'global' + + if 'global' in levels: + levels.remove('global') + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog <options>""" + ignore_require_venv = True + + def add_options(self): + # type: () -> None + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options, args): + # type: (Values, List[str]) -> int + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value('pip version', get_pip_version()) + show_value('sys.version', sys.version) + show_value('sys.executable', sys.executable) + show_value('sys.getdefaultencoding', sys.getdefaultencoding()) + show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) + show_value( + 'locale.getpreferredencoding', locale.getpreferredencoding(), + ) + show_value('sys.platform', sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE')) + show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE')) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/download.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/download.py index a57e4bc4..46e83712 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/download.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/download.py @@ -4,14 +4,17 @@ import logging import os from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List logger = logging.getLogger(__name__) @@ -28,7 +31,6 @@ class DownloadCommand(RequirementCommand): pip also supports downloading from "requirements files", which provide an easy way to specify a whole environment to be downloaded. """ - name = 'download' usage = """ %prog [options] <requirement specifier> [package-index-options] ... @@ -37,31 +39,25 @@ class DownloadCommand(RequirementCommand): %prog [options] <local project path> ... %prog [options] <archive url/path> ...""" - summary = 'Download packages.' - - def __init__(self, *args, **kw): - super(DownloadCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.pre()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option( '-d', '--dest', '--destination-dir', '--destination-directory', dest='download_dir', metavar='dir', @@ -69,10 +65,7 @@ class DownloadCommand(RequirementCommand): help=("Download packages into <dir>."), ) - cmd_opts.add_option(cmdoptions.platform()) - cmd_opts.add_option(cmdoptions.python_version()) - cmd_opts.add_option(cmdoptions.implementation()) - cmd_opts.add_option(cmdoptions.abi()) + cmdoptions.add_target_python_options(self.cmd_opts) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -80,97 +73,71 @@ class DownloadCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int + options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] - if options.python_version: - python_versions = [options.python_version] - else: - python_versions = None - cmdoptions.check_dist_restriction(options) - options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) - with self._build_session(options) as session: - finder = self._build_package_finder( - options=options, - session=session, - platform=options.platform, - python_versions=python_versions, - abi=options.abi, - implementation=options.implementation, - ) - build_delete = (not (options.no_clean or options.build_dir)) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="download" - ) as directory: - - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) - self.populate_requirement_set( - requirement_set, - args, - options, - finder, - session, - self.name, - None - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=options.download_dir, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=None, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=False, - ignore_installed=True, - isolated=options.isolated_mode, - ) - resolver.resolve(requirement_set) - - downloaded = ' '.join([ - req.name for req in requirement_set.successfully_downloaded - ]) - if downloaded: - logger.info('Successfully downloaded %s', downloaded) - - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - - return requirement_set + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + downloaded = ' '.join([req.name # type: ignore + for req in requirement_set.requirements.values() + if req.successfully_downloaded]) + if downloaded: + write_output('Successfully downloaded %s', downloaded) + + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/freeze.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/freeze.py index dc9c53a6..2071fbab 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/freeze.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/freeze.py @@ -3,13 +3,20 @@ from __future__ import absolute_import import sys from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS from pip._internal.models.format_control import FormatControl from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List + class FreezeCommand(Command): """ @@ -17,15 +24,13 @@ class FreezeCommand(Command): packages are listed in a case-insensitive sorted order. """ - name = 'freeze' + usage = """ %prog [options]""" - summary = 'Output installed packages in requirements format.' log_streams = ("ext://sys.stderr", "ext://sys.stderr") - def __init__(self, *args, **kw): - super(FreezeCommand, self).__init__(*args, **kw) - + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', @@ -56,12 +61,13 @@ class FreezeCommand(Command): action='store_true', default=False, help='Only output packages installed in user-site.') + self.cmd_opts.add_option(cmdoptions.list_path()) self.cmd_opts.add_option( '--all', dest='freeze_all', action='store_true', help='Do not skip these packages in the output:' - ' %s' % ', '.join(DEV_PKGS)) + ' {}'.format(', '.join(DEV_PKGS))) self.cmd_opts.add_option( '--exclude-editable', dest='exclude_editable', @@ -71,26 +77,27 @@ class FreezeCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int format_control = FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) skip = set(stdlib_pkgs) if not options.freeze_all: skip.update(DEV_PKGS) + cmdoptions.check_list_path_option(options) + freeze_kwargs = dict( requirement=options.requirements, find_links=options.find_links, local_only=options.local, user_only=options.user, - skip_regex=options.skip_requirements_regex, + paths=options.path, isolated=options.isolated_mode, wheel_cache=wheel_cache, skip=skip, exclude_editable=options.exclude_editable, ) - try: - for line in freeze(**freeze_kwargs): - sys.stdout.write(line + '\n') - finally: - wheel_cache.cleanup() + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/hash.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/hash.py index 423440e9..37831c39 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/hash.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/hash.py @@ -5,9 +5,14 @@ import logging import sys from pip._internal.cli.base_command import Command -from pip._internal.cli.status_codes import ERROR +from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES -from pip._internal.utils.misc import read_chunks +from pip._internal.utils.misc import read_chunks, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List logger = logging.getLogger(__name__) @@ -18,37 +23,38 @@ class HashCommand(Command): These can be used with --hash in a requirements file to do repeatable installs. - """ - name = 'hash' + usage = '%prog [options] <file> ...' - summary = 'Compute hashes of package archives.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(HashCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-a', '--algorithm', dest='algorithm', choices=STRONG_HASHES, action='store', default=FAVORITE_HASH, - help='The hash algorithm to use: one of %s' % - ', '.join(STRONG_HASHES)) + help='The hash algorithm to use: one of {}'.format( + ', '.join(STRONG_HASHES))) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: self.parser.print_usage(sys.stderr) return ERROR algorithm = options.algorithm for path in args: - logger.info('%s:\n--hash=%s:%s', - path, algorithm, _hash_of_file(path, algorithm)) + write_output('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + return SUCCESS def _hash_of_file(path, algorithm): + # type: (str, str) -> str """Return the hash digest of a file.""" with open(path, 'rb') as archive: hash = hashlib.new(algorithm) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/help.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/help.py index 49a81cbb..a2edc298 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/help.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/help.py @@ -3,18 +3,25 @@ from __future__ import absolute_import from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import CommandError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + from optparse import Values class HelpCommand(Command): """Show help for commands""" - name = 'help' + usage = """ %prog <command>""" - summary = 'Show help for commands.' ignore_require_venv = True def run(self, options, args): - from pip._internal.commands import commands_dict, get_similar_commands + # type: (Values, List[str]) -> int + from pip._internal.commands import ( + commands_dict, create_command, get_similar_commands, + ) try: # 'pip help' with no args is handled by pip.__init__.parseopt() @@ -25,13 +32,13 @@ class HelpCommand(Command): if cmd_name not in commands_dict: guess = get_similar_commands(cmd_name) - msg = ['unknown command "%s"' % cmd_name] + msg = ['unknown command "{}"'.format(cmd_name)] if guess: - msg.append('maybe you meant "%s"' % guess) + msg.append('maybe you meant "{}"'.format(guess)) raise CommandError(' - '.join(msg)) - command = commands_dict[cmd_name]() + command = create_command(cmd_name) command.parser.print_help() return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/install.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/install.py index 1c244d23..8c2c32fd 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/install.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/install.py @@ -5,34 +5,64 @@ import logging import operator import os import shutil +import site from optparse import SUPPRESS_HELP from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.cli.status_codes import ERROR -from pip._internal.exceptions import ( - CommandError, InstallationError, PreviousBuildDirError, -) -from pip._internal.locations import distutils_scheme, virtualenv_no_global +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import distutils_scheme from pip._internal.operations.check import check_install_conflicts -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet, install_given_reqs -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver -from pip._internal.utils.filesystem import check_path_owner +from pip._internal.req import install_given_reqs +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.datetime import today_is_later_than +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.misc import ( - ensure_dir, get_installed_version, + ensure_dir, + get_installed_version, + get_pip_version, protect_pip_from_modification_on_windows, + write_output, ) from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel import WheelBuilder +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import virtualenv_no_global +from pip._internal.wheel_builder import build, should_build_for_install_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Iterable, List, Optional + + from pip._internal.models.format_control import FormatControl + from pip._internal.operations.check import ConflictDetails + from pip._internal.req.req_install import InstallRequirement + from pip._internal.wheel_builder import BinaryAllowedPredicate + logger = logging.getLogger(__name__) +def get_check_binary_allowed(format_control): + # type: (FormatControl) -> BinaryAllowedPredicate + def check_binary_allowed(req): + # type: (InstallRequirement) -> bool + if req.use_pep517: + return True + canonical_name = canonicalize_name(req.name) + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + class InstallCommand(RequirementCommand): """ Install packages from: @@ -45,7 +75,6 @@ class InstallCommand(RequirementCommand): pip also supports installing from "requirements files", which provide an easy way to specify a whole environment to be installed. """ - name = 'install' usage = """ %prog [options] <requirement specifier> [package-index-options] ... @@ -54,20 +83,15 @@ class InstallCommand(RequirementCommand): %prog [options] [-e] <local project path> ... %prog [options] <archive url/path> ...""" - summary = 'Install packages.' + def add_options(self): + # type: () -> None + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) - def __init__(self, *args, **kw): - super(InstallCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.pre()) - - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( '-t', '--target', dest='target_dir', metavar='dir', @@ -77,12 +101,9 @@ class InstallCommand(RequirementCommand): '<dir>. Use --upgrade to replace existing packages in <dir> ' 'with new versions.' ) - cmd_opts.add_option(cmdoptions.platform()) - cmd_opts.add_option(cmdoptions.python_version()) - cmd_opts.add_option(cmdoptions.implementation()) - cmd_opts.add_option(cmdoptions.abi()) + cmdoptions.add_target_python_options(self.cmd_opts) - cmd_opts.add_option( + self.cmd_opts.add_option( '--user', dest='use_user_site', action='store_true', @@ -90,19 +111,19 @@ class InstallCommand(RequirementCommand): "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " "for full details.)") - cmd_opts.add_option( + self.cmd_opts.add_option( '--no-user', dest='use_user_site', action='store_false', help=SUPPRESS_HELP) - cmd_opts.add_option( + self.cmd_opts.add_option( '--root', dest='root_path', metavar='dir', default=None, help="Install everything relative to this alternate root " "directory.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--prefix', dest='prefix_path', metavar='dir', @@ -110,11 +131,11 @@ class InstallCommand(RequirementCommand): help="Installation prefix where lib, bin and other top-level " "folders are placed") - cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option( + self.cmd_opts.add_option( '-U', '--upgrade', dest='upgrade', action='store_true', @@ -123,7 +144,7 @@ class InstallCommand(RequirementCommand): 'upgrade-strategy used.' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--upgrade-strategy', dest='upgrade_strategy', default='only-if-needed', @@ -137,28 +158,32 @@ class InstallCommand(RequirementCommand): 'satisfy the requirements of the upgraded package(s).' ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--force-reinstall', dest='force_reinstall', action='store_true', help='Reinstall all packages even if they are already ' 'up-to-date.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-I', '--ignore-installed', dest='ignore_installed', action='store_true', - help='Ignore the installed packages (reinstalling instead).') + help='Ignore the installed packages, overwriting them. ' + 'This can break your system if the existing package ' + 'is of a different version or was installed ' + 'with a different package manager!' + ) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.install_options()) - cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) - cmd_opts.add_option( + self.cmd_opts.add_option( "--compile", action="store_true", dest="compile", @@ -166,21 +191,21 @@ class InstallCommand(RequirementCommand): help="Compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-compile", action="store_false", dest="compile", help="Do not compile Python source files to bytecode", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-script-location", action="store_false", dest="warn_script_location", default=True, help="Do not warn when installing scripts outside PATH", ) - cmd_opts.add_option( + self.cmd_opts.add_option( "--no-warn-conflicts", action="store_false", dest="warn_about_conflicts", @@ -188,12 +213,11 @@ class InstallCommand(RequirementCommand): help="Do not warn about broken dependencies", ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) - cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -201,41 +225,34 @@ class InstallCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - cmdoptions.check_dist_restriction(options, check_target=True) - if options.python_version: - python_versions = [options.python_version] - else: - python_versions = None - - options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] - if options.use_user_site: - if options.prefix_path: - raise CommandError( - "Can not combine '--user' and '--prefix' as they imply " - "different installation locations" - ) - if virtualenv_no_global(): - raise InstallationError( - "Can not perform a '--user' install. User site-packages " - "are not visible in this virtualenv." - ) - install_options.append('--user') - install_options.append('--prefix=') - target_temp_dir = TempDirectory(kind="target") + logger.debug("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir = None # type: Optional[TempDirectory] + target_temp_dir_path = None # type: Optional[str] if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) @@ -247,290 +264,471 @@ class InstallCommand(RequirementCommand): ) # Create a target directory for using with the target option - target_temp_dir.create() - install_options.append('--home=' + target_temp_dir.path) + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) global_options = options.global_options or [] - with self._build_session(options) as session: - finder = self._build_package_finder( + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + reject_location_related_install_options( + reqs, options.install_options + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, options=options, + req_tracker=req_tracker, session=session, - platform=options.platform, - python_versions=python_versions, - abi=options.abi, - implementation=options.implementation, + finder=finder, + use_user_site=options.use_user_site, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, ) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="install" - ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - check_supported_wheels=not options.target_dir, - ) - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=None, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) + self.trace_basic_info(finder) - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=options.use_user_site, - upgrade_strategy=upgrade_strategy, - force_reinstall=options.force_reinstall, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=options.ignore_installed, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) - protect_pip_from_modification_on_windows( - modifying_pip=requirement_set.has_requirement("pip") - ) + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows( + modifying_pip=modifying_pip + ) - # Consider legacy and PEP517-using requirements separately - legacy_requirements = [] - pep517_requirements = [] - for req in requirement_set.requirements.values(): - if req.use_pep517: - pep517_requirements.append(req) - else: - legacy_requirements.append(req) - - # We don't build wheels for legacy requirements if we - # don't have wheel installed or we don't have a cache dir - try: - import wheel # noqa: F401 - build_legacy = bool(options.cache_dir) - except ImportError: - build_legacy = False - - wb = WheelBuilder( - finder, preparer, wheel_cache, - build_options=[], global_options=[], - ) + check_binary_allowed = get_check_binary_allowed( + finder.format_control + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_install_command( + r, check_binary_allowed + ) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=[], + global_options=[], + ) - # Always build PEP 517 requirements - build_failures = wb.build( - pep517_requirements, - session=session, autobuilding=True + # If we're using PEP 517, we cannot do a direct install + # so we fail here. + pep517_build_failure_names = [ + r.name # type: ignore + for r in build_failures if r.use_pep517 + ] # type: List[str] + if pep517_build_failure_names: + raise InstallationError( + "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( + ", ".join(pep517_build_failure_names) ) + ) - if build_legacy: - # We don't care about failures building legacy - # requirements, as we'll fall through to a direct - # install for those. - wb.build( - legacy_requirements, - session=session, autobuilding=True + # For now, we just warn about failures building legacy + # requirements, as we'll fall through to a direct + # install for those. + legacy_build_failure_names = [ + r.name # type: ignore + for r in build_failures if not r.use_pep517 + ] # type: List[str] + if legacy_build_failure_names: + deprecated( + reason=( + "Could not build wheels for {} which do not use " + "PEP 517. pip will fall back to legacy 'setup.py " + "install' for these.".format( + ", ".join(legacy_build_failure_names) ) + ), + replacement="to fix the wheel build issue reported above", + gone_in="21.0", + issue=8368, + ) - # If we're using PEP 517, we cannot do a direct install - # so we fail here. - if build_failures: - raise InstallationError( - "Could not build wheels for {} which use" - " PEP 517 and cannot be installed directly".format( - ", ".join(r.name for r in build_failures))) + to_install = resolver.get_installation_order( + requirement_set + ) - to_install = resolver.get_installation_order( - requirement_set - ) + # Check for conflicts in the package set we're installing. + conflicts = None # type: Optional[ConflictDetails] + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) - # Consistency Checking of the package set we're installing. - should_warn_about_conflicts = ( - not options.ignore_dependencies and - options.warn_about_conflicts - ) - if should_warn_about_conflicts: - self._warn_about_conflicts(to_install) - - # Don't warn about script install locations if - # --target has been specified - warn_script_location = options.warn_script_location - if options.target_dir: - warn_script_location = False - - installed = install_given_reqs( - to_install, - install_options, - global_options, - root=options.root_path, - home=target_temp_dir.path, - prefix=options.prefix_path, - pycompile=options.compile, - warn_script_location=warn_script_location, - use_user_site=options.use_user_site, - ) + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + working_set = pkg_resources.WorkingSet(lib_locations) - lib_locations = get_lib_location_guesses( - user=options.use_user_site, - home=target_temp_dir.path, - root=options.root_path, - prefix=options.prefix_path, - isolated=options.isolated_mode, - ) - working_set = pkg_resources.WorkingSet(lib_locations) - - reqs = sorted(installed, key=operator.attrgetter('name')) - items = [] - for req in reqs: - item = req.name - try: - installed_version = get_installed_version( - req.name, working_set=working_set - ) - if installed_version: - item += '-' + installed_version - except Exception: - pass - items.append(item) - installed = ' '.join(items) - if installed: - logger.info('Successfully installed %s', installed) - except EnvironmentError as error: - show_traceback = (self.verbosity >= 1) - - message = create_env_error_message( - error, show_traceback, options.use_user_site, + installed.sort(key=operator.attrgetter('name')) + items = [] + for result in installed: + item = result.name + try: + installed_version = get_installed_version( + result.name, working_set=working_set ) - logger.error(message, exc_info=show_traceback) - - return ERROR - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + if installed_version: + item += '-' + installed_version + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + new_resolver='2020-resolver' in options.features_enabled, + ) + + installed_desc = ' '.join(items) + if installed_desc: + write_output( + 'Successfully installed %s', installed_desc, + ) + except EnvironmentError as error: + show_traceback = (self.verbosity >= 1) + + message = create_env_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) # noqa + + return ERROR if options.target_dir: + assert target_temp_dir self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) - return requirement_set + + return SUCCESS def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): + # type: (str, TempDirectory, bool) -> None ensure_dir(target_dir) # Checking both purelib and platlib directories for installed # packages to be moved to target directory lib_dir_list = [] - with target_temp_dir: - # Checking both purelib and platlib directories for installed - # packages to be moved to target directory - scheme = distutils_scheme('', home=target_temp_dir.path) - purelib_dir = scheme['purelib'] - platlib_dir = scheme['platlib'] - data_dir = scheme['data'] - - if os.path.exists(purelib_dir): - lib_dir_list.append(purelib_dir) - if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: - lib_dir_list.append(platlib_dir) - if os.path.exists(data_dir): - lib_dir_list.append(data_dir) - - for lib_dir in lib_dir_list: - for item in os.listdir(lib_dir): - if lib_dir == data_dir: - ddir = os.path.join(data_dir, item) - if any(s.startswith(ddir) for s in lib_dir_list[:-1]): - continue - target_item_dir = os.path.join(target_dir, item) - if os.path.exists(target_item_dir): - if not upgrade: - logger.warning( - 'Target directory %s already exists. Specify ' - '--upgrade to force replacement.', - target_item_dir - ) - continue - if os.path.islink(target_item_dir): - logger.warning( - 'Target directory %s already exists and is ' - 'a link. Pip will not automatically replace ' - 'links, please remove if replacement is ' - 'desired.', - target_item_dir - ) - continue - if os.path.isdir(target_item_dir): - shutil.rmtree(target_item_dir) - else: - os.remove(target_item_dir) - - shutil.move( - os.path.join(lib_dir, item), - target_item_dir - ) + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = distutils_scheme('', home=target_temp_dir.path) + purelib_dir = scheme['purelib'] + platlib_dir = scheme['platlib'] + data_dir = scheme['data'] + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) - def _warn_about_conflicts(self, to_install): + def _determine_conflicts(self, to_install): + # type: (List[InstallRequirement]) -> Optional[ConflictDetails] try: - package_set, _dep_info = check_install_conflicts(to_install) + return check_install_conflicts(to_install) except Exception: - logger.error("Error checking for conflicts.", exc_info=True) + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts(self, conflict_details, new_resolver): + # type: (ConflictDetails, bool) -> None + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: return - missing, conflicting = _dep_info - # NOTE: There is some duplication here from pip check + parts = [] # type: List[str] + if not new_resolver: + parts.append( + "After October 2020 you may experience errors when installing " + "or updating packages. This is because pip will change the " + "way that it resolves dependency conflicts.\n" + ) + parts.append( + "We recommend you use --use-feature=2020-resolver to test " + "your packages with the new resolver before it becomes the " + "default.\n" + ) + elif not today_is_later_than(year=2020, month=7, day=31): + # NOTE: trailing newlines here are intentional + parts.append( + "Pip will install or upgrade your package(s) and its " + "dependencies without taking into account other packages you " + "already have installed. This may cause an uncaught " + "dependency conflict.\n" + ) + form_link = "https://forms.gle/cWKMoDs8sUVE29hz9" + parts.append( + "If you would like pip to take your other packages into " + "account, please tell us here: {}\n".format(form_link) + ) + + # NOTE: There is some duplication here, with commands/check.py for project_name in missing: version = package_set[project_name][0] for dependency in missing[project_name]: - logger.critical( - "%s %s requires %s, which is not installed.", - project_name, version, dependency[1], + message = ( + "{name} {version} requires {requirement}, " + "which is not installed." + ).format( + name=project_name, + version=version, + requirement=dependency[1], ) + parts.append(message) for project_name in conflicting: version = package_set[project_name][0] for dep_name, dep_version, req in conflicting[project_name]: - logger.critical( - "%s %s has requirement %s, but you'll have %s %s which is " - "incompatible.", - project_name, version, req, dep_name, dep_version, + message = ( + "{name} {version} requires {requirement}, but you'll have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, ) + parts.append(message) + + logger.critical("\n".join(parts)) -def get_lib_location_guesses(*args, **kwargs): - scheme = distutils_scheme('', *args, **kwargs) +def get_lib_location_guesses( + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None # type: Optional[str] +): + # type:(...) -> List[str] + scheme = distutils_scheme('', user=user, home=home, root=root, + isolated=isolated, prefix=prefix) return [scheme['purelib'], scheme['platlib']] +def site_packages_writable(root, isolated): + # type: (Optional[str], bool) -> bool + return all( + test_writable_dir(d) for d in set( + get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site, # type: Optional[bool] + prefix_path=None, # type: Optional[str] + target_dir=None, # type: Optional[str] + root_path=None, # type: Optional[str] + isolated_mode=False, # type: bool +): + # type: (...) -> bool + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info("Defaulting to user installation because normal site-packages " + "is not writeable") + return True + + +def reject_location_related_install_options(requirements, options): + # type: (List[InstallRequirement], Optional[List[str]]) -> None + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + def format_options(option_names): + # type: (Iterable[str]) -> List[str] + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + offenders = [] + + for requirement in requirements: + install_options = requirement.install_options + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format( + format_options(location_options.keys()) + ) + ) + + if not offenders: + return + + raise CommandError( + "Location-changing options found in --install-option: {}." + " This is unsupported, use pip-level options like --user," + " --prefix, --root, and --target instead.".format( + "; ".join(offenders) + ) + ) + + def create_env_error_message(error, show_traceback, using_user_site): + # type: (EnvironmentError, bool, bool) -> str """Format an error message for an EnvironmentError It may occur anytime during the execution of the install command. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/list.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/list.py index a6402749..a67d0f8d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/list.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/list.py @@ -4,52 +4,62 @@ import json import logging from pip._vendor import six -from pip._vendor.six.moves import zip_longest from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import CommandError -from pip._internal.index import PackageFinder +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, + dist_is_editable, + get_installed_distributions, + tabulate, + write_output, ) from pip._internal.utils.packaging import get_installer +from pip._internal.utils.parallel import map_multithread +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Set, Tuple, Iterator + + from pip._internal.network.session import PipSession + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) -class ListCommand(Command): +class ListCommand(IndexGroupCommand): """ List installed packages, including editables. Packages are listed in a case-insensitive sorted order. """ - name = 'list' + usage = """ %prog [options]""" - summary = 'List installed packages.' - - def __init__(self, *args, **kw): - super(ListCommand, self).__init__(*args, **kw) - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + def add_options(self): + # type: () -> None + self.cmd_opts.add_option( '-o', '--outdated', action='store_true', default=False, help='List outdated packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-u', '--uptodate', action='store_true', default=False, help='List uptodate packages') - cmd_opts.add_option( + self.cmd_opts.add_option( '-e', '--editable', action='store_true', default=False, help='List editable projects.') - cmd_opts.add_option( + self.cmd_opts.add_option( '-l', '--local', action='store_true', default=False, @@ -62,8 +72,8 @@ class ListCommand(Command): action='store_true', default=False, help='Only output packages installed in user-site.') - - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -71,7 +81,7 @@ class ListCommand(Command): "pip only finds stable versions."), ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--format', action='store', dest='list_format', @@ -81,7 +91,7 @@ class ListCommand(Command): "or json", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--not-required', action='store_true', dest='not_required', @@ -89,13 +99,13 @@ class ListCommand(Command): "installed packages.", ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--exclude-editable', action='store_false', dest='include_editable', help='Exclude editable package from output.', ) - cmd_opts.add_option( + self.cmd_opts.add_option( '--include-editable', action='store_true', dest='include_editable', @@ -107,30 +117,40 @@ class ListCommand(Command): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) - def _build_package_finder(self, options, index_urls, session): + def _build_package_finder(self, options, session): + # type: (Values, PipSession) -> PackageFinder """ Create a package finder appropriate to this list command. """ - return PackageFinder( - find_links=options.find_links, - index_urls=index_urls, + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, allow_all_prereleases=options.pre, - trusted_hosts=options.trusted_hosts, - session=session, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, ) def run(self, options, args): + # type: (Values, List[str]) -> int if options.outdated and options.uptodate: raise CommandError( "Options --outdated and --uptodate cannot be combined.") + cmdoptions.check_list_path_option(options) + packages = get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=options.editable, include_editables=options.include_editable, + paths=options.path, ) # get_not_required must be called firstly in order to find and @@ -146,35 +166,40 @@ class ListCommand(Command): packages = self.get_uptodate(packages, options) self.output_package_listing(packages, options) + return SUCCESS def get_outdated(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] return [ dist for dist in self.iter_packages_latest_infos(packages, options) if dist.latest_version > dist.parsed_version ] def get_uptodate(self, packages, options): + # type: (List[Distribution], Values) -> List[Distribution] return [ dist for dist in self.iter_packages_latest_infos(packages, options) if dist.latest_version == dist.parsed_version ] def get_not_required(self, packages, options): - dep_keys = set() + # type: (List[Distribution], Values) -> List[Distribution] + dep_keys = set() # type: Set[Distribution] for dist in packages: dep_keys.update(requirement.key for requirement in dist.requires()) - return {pkg for pkg in packages if pkg.key not in dep_keys} - def iter_packages_latest_infos(self, packages, options): - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug('Ignoring indexes: %s', ','.join(index_urls)) - index_urls = [] + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.key not in dep_keys}) + def iter_packages_latest_infos(self, packages, options): + # type: (List[Distribution], Values) -> Iterator[Distribution] with self._build_session(options) as session: - finder = self._build_package_finder(options, index_urls, session) + finder = self._build_package_finder(options, session) - for dist in packages: + def latest_info(dist): + # type: (Distribution) -> Distribution typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) if not options.pre: @@ -182,21 +207,29 @@ class ListCommand(Command): all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] - if not all_candidates: - continue - best_candidate = max(all_candidates, - key=finder._candidate_sort_key) + evaluator = finder.make_candidate_evaluator( + project_name=dist.project_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + remote_version = best_candidate.version - if best_candidate.location.is_wheel: + if best_candidate.link.is_wheel: typ = 'wheel' else: typ = 'sdist' # This is dirty but makes the rest of the code much cleaner dist.latest_version = remote_version dist.latest_filetype = typ - yield dist + return dist + + for dist in map_multithread(latest_info, packages): + if dist is not None: + yield dist def output_package_listing(self, packages, options): + # type: (List[Distribution], Values) -> None packages = sorted( packages, key=lambda dist: dist.project_name.lower(), @@ -207,14 +240,15 @@ class ListCommand(Command): elif options.list_format == 'freeze': for dist in packages: if options.verbose >= 1: - logger.info("%s==%s (%s)", dist.project_name, - dist.version, dist.location) + write_output("%s==%s (%s)", dist.project_name, + dist.version, dist.location) else: - logger.info("%s==%s", dist.project_name, dist.version) + write_output("%s==%s", dist.project_name, dist.version) elif options.list_format == 'json': - logger.info(format_for_json(packages, options)) + write_output(format_for_json(packages, options)) def output_package_listing_columns(self, data, header): + # type: (List[List[str]], List[str]) -> None # insert the header first: we need to know the size of column names if len(data) > 0: data.insert(0, header) @@ -226,28 +260,11 @@ class ListCommand(Command): pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) for val in pkg_strings: - logger.info(val) - - -def tabulate(vals): - # From pfmoore on GitHub: - # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 - assert len(vals) > 0 - - sizes = [0] * max(len(x) for x in vals) - for row in vals: - sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] - - result = [] - for row in vals: - display = " ".join([str(c).ljust(s) if c is not None else '' - for s, c in zip_longest(sizes, row)]) - result.append(display) - - return result, sizes + write_output(val) def format_for_columns(pkgs, options): + # type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]] """ Convert the package data into something usable by output_package_listing_columns. @@ -285,6 +302,7 @@ def format_for_columns(pkgs, options): def format_for_json(packages, options): + # type: (List[Distribution], Values) -> str data = [] for dist in packages: info = { diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/search.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/search.py index c157a312..e906ce76 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/search.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/search.py @@ -12,26 +12,37 @@ from pip._vendor.packaging.version import parse as parse_version from pip._vendor.six.moves import xmlrpc_client # type: ignore from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS -from pip._internal.download import PipXmlrpcTransport from pip._internal.exceptions import CommandError from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport from pip._internal.utils.compat import get_terminal_size from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_distribution, write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Dict, Optional + from typing_extensions import TypedDict + TransformedHit = TypedDict( + 'TransformedHit', + {'name': str, 'summary': str, 'versions': List[str]}, + ) logger = logging.getLogger(__name__) -class SearchCommand(Command): +class SearchCommand(Command, SessionCommandMixin): """Search for PyPI packages whose name or summary contains <query>.""" - name = 'search' + usage = """ %prog [options] <query>""" - summary = 'Search PyPI for packages.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(SearchCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-i', '--index', dest='index', @@ -42,6 +53,7 @@ class SearchCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: raise CommandError('Missing required argument (search query).') query = args @@ -58,21 +70,25 @@ class SearchCommand(Command): return NO_MATCHES_FOUND def search(self, query, options): + # type: (List[str], Values) -> List[Dict[str, str]] index_url = options.index - with self._build_session(options) as session: - transport = PipXmlrpcTransport(index_url, session) - pypi = xmlrpc_client.ServerProxy(index_url, transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') - return hits + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits def transform_hits(hits): + # type: (List[Dict[str, str]]) -> List[TransformedHit] """ The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use. """ - packages = OrderedDict() + packages = OrderedDict() # type: OrderedDict[str, TransformedHit] for hit in hits: name = hit['name'] summary = hit['summary'] @@ -95,6 +111,7 @@ def transform_hits(hits): def print_results(hits, name_column_width=None, terminal_width=None): + # type: (List[TransformedHit], Optional[int], Optional[int]) -> None if not hits: return if name_column_width is None: @@ -112,24 +129,31 @@ def print_results(hits, name_column_width=None, terminal_width=None): target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal - summary = textwrap.wrap(summary, target_width) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + summary_lines = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join( + summary_lines) - line = '%-*s - %s' % (name_column_width, - '%s (%s)' % (name, latest), summary) + line = '{name_latest:{name_column_width}} - {summary}'.format( + name_latest='{name} ({latest})'.format(**locals()), + **locals()) try: - logger.info(line) + write_output(line) if name in installed_packages: - dist = pkg_resources.get_distribution(name) + dist = get_distribution(name) with indent_log(): if dist.version == latest: - logger.info('INSTALLED: %s (latest)', dist.version) + write_output('INSTALLED: %s (latest)', dist.version) else: - logger.info('INSTALLED: %s', dist.version) - logger.info('LATEST: %s', latest) + write_output('INSTALLED: %s', dist.version) + if parse_version(latest).pre: + write_output('LATEST: %s (pre-release; install' + ' with "pip install --pre")', latest) + else: + write_output('LATEST: %s', latest) except UnicodeEncodeError: pass def highest_version(versions): + # type: (List[str]) -> str return max(versions, key=parse_version) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/show.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/show.py index f92c9bc6..3892c595 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/show.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/show.py @@ -2,13 +2,19 @@ from __future__ import absolute_import import logging import os -from email.parser import FeedParser # type: ignore +from email.parser import FeedParser from pip._vendor import pkg_resources from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.misc import write_output +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Dict, Iterator logger = logging.getLogger(__name__) @@ -19,14 +25,13 @@ class ShowCommand(Command): The output is in RFC-compliant mail header format. """ - name = 'show' + usage = """ %prog [options] <package> ...""" - summary = 'Show information about installed packages.' ignore_require_venv = True - def __init__(self, *args, **kw): - super(ShowCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-f', '--files', dest='files', @@ -37,6 +42,7 @@ class ShowCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): + # type: (Values, List[str]) -> int if not args: logger.warning('ERROR: Please provide a package name or names.') return ERROR @@ -50,6 +56,7 @@ class ShowCommand(Command): def search_packages_info(query): + # type: (List[str]) -> Iterator[Dict[str, str]] """ Gather details from installed distributions. Print distribution name, version, location, and installed files. Installed files requires a @@ -61,6 +68,21 @@ def search_packages_info(query): installed[canonicalize_name(p.project_name)] = p query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning('Package(s) not found: %s', ', '.join(missing)) + + def get_requiring_packages(package_name): + # type: (str) -> List[str] + canonical_name = canonicalize_name(package_name) + return [ + pkg.project_name for pkg in pkg_resources.working_set + if canonical_name in + [canonicalize_name(required.name) for required in + pkg.requires()] + ] for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { @@ -68,14 +90,15 @@ def search_packages_info(query): 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()], + 'required_by': get_requiring_packages(dist.project_name) } file_list = None - metadata = None + metadata = '' if isinstance(dist, pkg_resources.DistInfoDistribution): # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') - paths = [l.split(',')[0] for l in lines] + paths = [line.split(',')[0] for line in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] @@ -123,46 +146,41 @@ def search_packages_info(query): def print_results(distributions, list_files=False, verbose=False): + # type: (Iterator[Dict[str, str]], bool, bool) -> bool """ - Print the informations from installed distributions found. + Print the information from installed distributions found. """ results_printed = False for i, dist in enumerate(distributions): results_printed = True if i > 0: - logger.info("---") - - name = dist.get('name', '') - required_by = [ - pkg.project_name for pkg in pkg_resources.working_set - if name in [required.name for required in pkg.requires()] - ] - - logger.info("Name: %s", name) - logger.info("Version: %s", dist.get('version', '')) - logger.info("Summary: %s", dist.get('summary', '')) - logger.info("Home-page: %s", dist.get('home-page', '')) - logger.info("Author: %s", dist.get('author', '')) - logger.info("Author-email: %s", dist.get('author-email', '')) - logger.info("License: %s", dist.get('license', '')) - logger.info("Location: %s", dist.get('location', '')) - logger.info("Requires: %s", ', '.join(dist.get('requires', []))) - logger.info("Required-by: %s", ', '.join(required_by)) + write_output("---") + + write_output("Name: %s", dist.get('name', '')) + write_output("Version: %s", dist.get('version', '')) + write_output("Summary: %s", dist.get('summary', '')) + write_output("Home-page: %s", dist.get('home-page', '')) + write_output("Author: %s", dist.get('author', '')) + write_output("Author-email: %s", dist.get('author-email', '')) + write_output("License: %s", dist.get('license', '')) + write_output("Location: %s", dist.get('location', '')) + write_output("Requires: %s", ', '.join(dist.get('requires', []))) + write_output("Required-by: %s", ', '.join(dist.get('required_by', []))) if verbose: - logger.info("Metadata-Version: %s", - dist.get('metadata-version', '')) - logger.info("Installer: %s", dist.get('installer', '')) - logger.info("Classifiers:") + write_output("Metadata-Version: %s", + dist.get('metadata-version', '')) + write_output("Installer: %s", dist.get('installer', '')) + write_output("Classifiers:") for classifier in dist.get('classifiers', []): - logger.info(" %s", classifier) - logger.info("Entry-points:") + write_output(" %s", classifier) + write_output("Entry-points:") for entry in dist.get('entry_points', []): - logger.info(" %s", entry.strip()) + write_output(" %s", entry.strip()) if list_files: - logger.info("Files:") + write_output("Files:") for line in dist.get('files', []): - logger.info(" %s", line.strip()) + write_output(" %s", line.strip()) if "files" not in dist: - logger.info("Cannot locate installed-files.txt") + write_output("Cannot locate installed-files.txt") return results_printed diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/uninstall.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/uninstall.py index 0cd6f54b..3371fe47 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/uninstall.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/uninstall.py @@ -3,13 +3,23 @@ from __future__ import absolute_import from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import InstallationError from pip._internal.req import parse_requirements -from pip._internal.req.constructors import install_req_from_line +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) from pip._internal.utils.misc import protect_pip_from_modification_on_windows +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List -class UninstallCommand(Command): + +class UninstallCommand(Command, SessionCommandMixin): """ Uninstall packages. @@ -19,14 +29,13 @@ class UninstallCommand(Command): leave behind no metadata to determine what files were installed. - Script wrappers installed by ``python setup.py develop``. """ - name = 'uninstall' + usage = """ %prog [options] <package> ... %prog [options] -r <requirements file> ...""" - summary = 'Uninstall packages.' - def __init__(self, *args, **kw): - super(UninstallCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', @@ -45,34 +54,42 @@ class UninstallCommand(Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): - with self._build_session(options) as session: - reqs_to_uninstall = {} - for name in args: - req = install_req_from_line( - name, isolated=options.isolated_mode, + # type: (Values, List[str]) -> int + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, + options=options, + session=session): + req = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode ) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req - for filename in options.requirements: - for req in parse_requirements( - filename, - options=options, - session=session): - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - if not reqs_to_uninstall: - raise InstallationError( - 'You must give at least one requirement to %(name)s (see ' - '"pip help %(name)s")' % dict(name=self.name) - ) + if not reqs_to_uninstall: + raise InstallationError( + 'You must give at least one requirement to {self.name} (see ' + '"pip help {self.name}")'.format(**locals()) + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) - protect_pip_from_modification_on_windows( - modifying_pip="pip" in reqs_to_uninstall + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, ) + if uninstall_pathset: + uninstall_pathset.commit() - for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, verbose=self.verbosity > 0, - ) - if uninstall_pathset: - uninstall_pathset.commit() + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/wheel.py index cd72a3df..0f718566 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/wheel.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/commands/wheel.py @@ -1,19 +1,26 @@ # -*- coding: utf-8 -*- + from __future__ import absolute_import import logging import os +import shutil from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions -from pip._internal.cli.base_command import RequirementCommand -from pip._internal.exceptions import CommandError, PreviousBuildDirError -from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req import RequirementSet -from pip._internal.req.req_tracker import RequirementTracker -from pip._internal.resolve import Resolver +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.wheel import WheelBuilder +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List + logger = logging.getLogger(__name__) @@ -33,7 +40,6 @@ class WheelCommand(RequirementCommand): """ - name = 'wheel' usage = """ %prog [options] <requirement specifier> ... %prog [options] -r <requirements file> ... @@ -41,14 +47,10 @@ class WheelCommand(RequirementCommand): %prog [options] [-e] <local project path> ... %prog [options] <archive url/path> ...""" - summary = 'Build wheels from your requirements.' - - def __init__(self, *args, **kw): - super(WheelCommand, self).__init__(*args, **kw) + def add_options(self): + # type: () -> None - cmd_opts = self.cmd_opts - - cmd_opts.add_option( + self.cmd_opts.add_option( '-w', '--wheel-dir', dest='wheel_dir', metavar='dir', @@ -56,29 +58,29 @@ class WheelCommand(RequirementCommand): help=("Build wheels into <dir>, where the default is the " "current working directory."), ) - cmd_opts.add_option(cmdoptions.no_binary()) - cmd_opts.add_option(cmdoptions.only_binary()) - cmd_opts.add_option(cmdoptions.prefer_binary()) - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option( '--build-option', dest='build_options', metavar='options', action='append', help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", ) - cmd_opts.add_option(cmdoptions.no_build_isolation()) - cmd_opts.add_option(cmdoptions.use_pep517()) - cmd_opts.add_option(cmdoptions.no_use_pep517()) - cmd_opts.add_option(cmdoptions.constraints()) - cmd_opts.add_option(cmdoptions.editable()) - cmd_opts.add_option(cmdoptions.requirements()) - cmd_opts.add_option(cmdoptions.src()) - cmd_opts.add_option(cmdoptions.ignore_requires_python()) - cmd_opts.add_option(cmdoptions.no_deps()) - cmd_opts.add_option(cmdoptions.build_dir()) - cmd_opts.add_option(cmdoptions.progress_bar()) - - cmd_opts.add_option( + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.build_dir()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( '--global-option', dest='global_options', action='append', @@ -86,7 +88,7 @@ class WheelCommand(RequirementCommand): help="Extra global options to be supplied to the setup.py " "call before the 'bdist_wheel' command.") - cmd_opts.add_option( + self.cmd_opts.add_option( '--pre', action='store_true', default=False, @@ -94,8 +96,7 @@ class WheelCommand(RequirementCommand): "pip only finds stable versions."), ) - cmd_opts.add_option(cmdoptions.no_clean()) - cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -103,84 +104,85 @@ class WheelCommand(RequirementCommand): ) self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + @with_cleanup def run(self, options, args): + # type: (Values, List[str]) -> int cmdoptions.check_install_build_global(options) - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.debug('Ignoring indexes: %s', ','.join(index_urls)) - index_urls = [] + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) - options.src_dir = os.path.abspath(options.src_dir) + req_tracker = self.enter_context(get_requirement_tracker()) - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + directory = TempDirectory( + options.build_dir, + delete=build_delete, + kind="wheel", + globally_managed=True, + ) - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="wheel" - ) as directory: + reqs = self.get_requirements(args, options, finder, session) - requirement_set = RequirementSet( - require_hashes=options.require_hashes, + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + wheel_download_dir=options.wheel_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_wheel_command(r) + ] + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError( + "Failed to build one or more wheels" + ) - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - self.name, wheel_cache - ) - - preparer = RequirementPreparer( - build_dir=directory.path, - src_dir=options.src_dir, - download_dir=None, - wheel_download_dir=options.wheel_dir, - progress_bar=options.progress_bar, - build_isolation=options.build_isolation, - req_tracker=req_tracker, - ) - - resolver = Resolver( - preparer=preparer, - finder=finder, - session=session, - wheel_cache=wheel_cache, - use_user_site=False, - upgrade_strategy="to-satisfy-only", - force_reinstall=False, - ignore_dependencies=options.ignore_dependencies, - ignore_requires_python=options.ignore_requires_python, - ignore_installed=True, - isolated=options.isolated_mode, - use_pep517=options.use_pep517 - ) - resolver.resolve(requirement_set) - - # build wheels - wb = WheelBuilder( - finder, preparer, wheel_cache, - build_options=options.build_options or [], - global_options=options.global_options or [], - no_clean=options.no_clean, - ) - build_failures = wb.build( - requirement_set.requirements.values(), session=session, - ) - if len(build_failures) != 0: - raise CommandError( - "Failed to build one or more wheels" - ) - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + return SUCCESS diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/configuration.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/configuration.py index fe6df9b7..e49a5f4f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/configuration.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/configuration.py @@ -14,22 +14,21 @@ Some terminology: import locale import logging import os +import sys -from pip._vendor import six from pip._vendor.six.moves import configparser from pip._internal.exceptions import ( - ConfigurationError, ConfigurationFileCouldNotBeLoaded, -) -from pip._internal.locations import ( - legacy_config_file, new_config_file, running_under_virtualenv, - site_config_files, venv_config_file, + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, ) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS, expanduser from pip._internal.utils.misc import ensure_dir, enum from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Any, Dict, Iterable, List, NewType, Optional, Tuple ) @@ -52,6 +51,12 @@ def _normalize_name(name): def _disassemble_key(name): # type: (str) -> List[str] + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) return name.split(".", 1) @@ -59,12 +64,38 @@ def _disassemble_key(name): kinds = enum( USER="user", # User Specific GLOBAL="global", # System Wide - VENV="venv", # Virtual Environment Specific + SITE="site", # [Virtual] Environment Specific ENV="env", # from PIP_CONFIG_FILE ENV_VAR="env-var", # from Environment Variables ) +CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' + + +def get_configuration_files(): + # type: () -> Dict[Kind, List[str]] + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) + for path in appdirs.site_config_dirs('pip') + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + expanduser('~'), + 'pip' if WINDOWS else '.pip', + CONFIG_BASENAME, + ) + new_config_file = os.path.join( + appdirs.user_config_dir("pip"), CONFIG_BASENAME + ) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + class Configuration(object): """Handles management of configuration. @@ -80,22 +111,22 @@ class Configuration(object): """ def __init__(self, isolated, load_only=None): - # type: (bool, Kind) -> None + # type: (bool, Optional[Kind]) -> None super(Configuration, self).__init__() - _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None] + _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None] if load_only not in _valid_load_only: raise ConfigurationError( "Got invalid value for load_only - should be one of {}".format( ", ".join(map(repr, _valid_load_only[:-1])) ) ) - self.isolated = isolated # type: bool - self.load_only = load_only # type: Optional[Kind] + self.isolated = isolated + self.load_only = load_only # The order here determines the override order. self._override_order = [ - kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR + kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR ] self._ignore_env_names = ["version", "help"] @@ -151,6 +182,7 @@ class Configuration(object): """ self._ensure_have_load_only() + assert self.load_only fname, parser = self._get_parser_to_modify() if parser is not None: @@ -166,10 +198,10 @@ class Configuration(object): def unset_value(self, key): # type: (str) -> None - """Unset a value in the configuration. - """ + """Unset a value in the configuration.""" self._ensure_have_load_only() + assert self.load_only if key not in self._config[self.load_only]: raise ConfigurationError("No such key - {}".format(key)) @@ -177,35 +209,23 @@ class Configuration(object): if parser is not None: section, name = _disassemble_key(key) - - # Remove the key in the parser - modified_something = False - if parser.has_section(section): - # Returns whether the option was removed or not - modified_something = parser.remove_option(section, name) - - if modified_something: - # name removed from parser, section may now be empty - section_iter = iter(parser.items(section)) - try: - val = six.next(section_iter) - except StopIteration: - val = None - - if val is None: - parser.remove_section(section) - - self._mark_as_modified(fname, parser) - else: + if not (parser.has_section(section) + and parser.remove_option(section, name)): + # The option was not removed. raise ConfigurationError( "Fatal Internal error [id=1]. Please report as a bug." ) + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + del self._config[self.load_only][key] def save(self): # type: () -> None - """Save the currentin-memory state. + """Save the current in-memory state. """ self._ensure_have_load_only() @@ -216,7 +236,7 @@ class Configuration(object): ensure_dir(os.path.dirname(fname)) with open(fname, "w") as f: - parser.write(f) # type: ignore + parser.write(f) # # Private routines @@ -246,7 +266,7 @@ class Configuration(object): # type: () -> None """Loads configuration from configuration files """ - config_files = dict(self._iter_config_files()) + config_files = dict(self.iter_config_files()) if config_files[kinds.ENV][0:1] == [os.devnull]: logger.debug( "Skipping loading configuration files due to " @@ -308,7 +328,7 @@ class Configuration(object): """Loads configuration from environment variables """ self._config[kinds.ENV_VAR].update( - self._normalized_keys(":env:", self._get_environ_vars()) + self._normalized_keys(":env:", self.get_environ_vars()) ) def _normalized_keys(self, section, items): @@ -324,7 +344,7 @@ class Configuration(object): normalized[key] = val return normalized - def _get_environ_vars(self): + def get_environ_vars(self): # type: () -> Iterable[Tuple[str, str]] """Returns a generator with all environmental vars with prefix PIP_""" for key, val in os.environ.items(): @@ -336,7 +356,7 @@ class Configuration(object): yield key[4:].lower(), val # XXX: This is patched in the tests. - def _iter_config_files(self): + def iter_config_files(self): # type: () -> Iterable[Tuple[Kind, List[str]]] """Yields variant and configuration files associated with it. @@ -351,8 +371,10 @@ class Configuration(object): else: yield kinds.ENV, [] + config_files = get_configuration_files() + # at the base we have any global configuration - yield kinds.GLOBAL, list(site_config_files) + yield kinds.GLOBAL, config_files[kinds.GLOBAL] # per-user configuration next should_load_user_config = not self.isolated and not ( @@ -360,15 +382,20 @@ class Configuration(object): ) if should_load_user_config: # The legacy config file is overridden by the new config file - yield kinds.USER, [legacy_config_file, new_config_file] + yield kinds.USER, config_files[kinds.USER] # finally virtualenv configuration first trumping others - if running_under_virtualenv(): - yield kinds.VENV, [venv_config_file] + yield kinds.SITE, config_files[kinds.SITE] + + def get_values_in_config(self, variant): + # type: (Kind) -> Dict[str, Any] + """Get values present in a config file""" + return self._config[variant] def _get_parser_to_modify(self): # type: () -> Tuple[str, RawConfigParser] # Determine which parser to modify + assert self.load_only parsers = self._parsers[self.load_only] if not parsers: # This should not happen if everything works correctly. @@ -385,3 +412,7 @@ class Configuration(object): file_parser_tuple = (fname, parser) if file_parser_tuple not in self._modified_parsers: self._modified_parsers.append(file_parser_tuple) + + def __repr__(self): + # type: () -> str + return "{}({!r})".format(self.__class__.__name__, self._dictionary) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 00000000..d5c1afc5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.distributions.base import AbstractDistribution + from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement(install_req): + # type: (InstallRequirement) -> AbstractDistribution + """Returns a Distribution for the given InstallRequirement + """ + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 00000000..b836b98d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,45 @@ +import abc + +from pip._vendor.six import add_metaclass + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.req import InstallRequirement + from pip._internal.index.package_finder import PackageFinder + + +@add_metaclass(abc.ABCMeta) +class AbstractDistribution(object): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req): + # type: (InstallRequirement) -> None + super(AbstractDistribution, self).__init__() + self.req = req + + @abc.abstractmethod + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + raise NotImplementedError() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/installed.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 00000000..0d15bf42 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + return self.req.satisfied_by + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/sdist.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 00000000..be3d7d97 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,104 @@ +import logging + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Set, Tuple + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + return self.req.get_dist() + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + self._setup_isolation(finder) + + self.req.prepare_metadata() + + def _setup_isolation(self, finder): + # type: (PackageFinder) -> None + def _raise_conflicts(conflicting_with, conflicting_reqs): + # type: (str, Set[Tuple[str, str]]) -> None + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=', '.join( + '{} is incompatible with {}'.format(installed, wanted) + for installed, wanted in sorted(conflicting) + ) + ) + raise InstallationError(error_message) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, 'overlay', + "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + _raise_conflicts("PEP 517/518 supported requirements", + conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))) + ) + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build wheel" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + reqs = backend.get_requires_for_build_wheel() + + conflicting, missing = self.req.build_env.check_requirements(reqs) + if conflicting: + _raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, 'normal', + "Installing backend dependencies" + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 00000000..bf3482b1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,36 @@ +from zipfile import ZipFile + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + # Set as part of preparation during download. + assert self.req.local_file_path + # Wheels are never unnamed. + assert self.req.name + + with ZipFile(self.req.local_file_path, allowZip64=True) as z: + return pkg_resources_distribution_for_wheel( + z, self.req.name, self.req.local_file_path + ) + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/download.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/download.py deleted file mode 100644 index 2bbe1762..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/download.py +++ /dev/null @@ -1,971 +0,0 @@ -from __future__ import absolute_import - -import cgi -import email.utils -import getpass -import json -import logging -import mimetypes -import os -import platform -import re -import shutil -import sys - -from pip._vendor import requests, six, urllib3 -from pip._vendor.cachecontrol import CacheControlAdapter -from pip._vendor.cachecontrol.caches import FileCache -from pip._vendor.lockfile import LockError -from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter -from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth -from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response -from pip._vendor.requests.structures import CaseInsensitiveDict -from pip._vendor.requests.utils import get_netrc_auth -# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is -# why we ignore the type on this import -from pip._vendor.six.moves import xmlrpc_client # type: ignore -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request -from pip._vendor.urllib3.util import IS_PYOPENSSL - -import pip -from pip._internal.exceptions import HashMismatch, InstallationError -from pip._internal.locations import write_delete_marker_file -from pip._internal.models.index import PyPI -from pip._internal.utils.encoding import auto_decode -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.glibc import libc_ver -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume, - display_path, format_size, get_installed_version, rmtree, - split_auth_from_netloc, splitext, unpack_file, -) -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import DownloadProgressProvider -from pip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Dict, IO, Text, Union - ) - from pip._internal.models.link import Link # noqa: F401 - from pip._internal.utils.hashes import Hashes # noqa: F401 - from pip._internal.vcs import AuthInfo # noqa: F401 - -try: - import ssl # noqa -except ImportError: - ssl = None - -HAS_TLS = (ssl is not None) or IS_PYOPENSSL - -__all__ = ['get_file_content', - 'is_url', 'url_to_path', 'path_to_url', - 'is_archive_file', 'unpack_vcs_link', - 'unpack_file_url', 'is_vcs_url', 'is_file_url', - 'unpack_http_url', 'unpack_url'] - - -logger = logging.getLogger(__name__) - - -def user_agent(): - """ - Return a string representing the user agent. - """ - data = { - "installer": {"name": "pip", "version": pip.__version__}, - "python": platform.python_version(), - "implementation": { - "name": platform.python_implementation(), - }, - } - - if data["implementation"]["name"] == 'CPython': - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'PyPy': - if sys.pypy_version_info.releaselevel == 'final': - pypy_version_info = sys.pypy_version_info[:3] - else: - pypy_version_info = sys.pypy_version_info - data["implementation"]["version"] = ".".join( - [str(x) for x in pypy_version_info] - ) - elif data["implementation"]["name"] == 'Jython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'IronPython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - - if sys.platform.startswith("linux"): - from pip._vendor import distro - distro_infos = dict(filter( - lambda x: x[1], - zip(["name", "version", "id"], distro.linux_distribution()), - )) - libc = dict(filter( - lambda x: x[1], - zip(["lib", "version"], libc_ver()), - )) - if libc: - distro_infos["libc"] = libc - if distro_infos: - data["distro"] = distro_infos - - if sys.platform.startswith("darwin") and platform.mac_ver()[0]: - data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} - - if platform.system(): - data.setdefault("system", {})["name"] = platform.system() - - if platform.release(): - data.setdefault("system", {})["release"] = platform.release() - - if platform.machine(): - data["cpu"] = platform.machine() - - if HAS_TLS: - data["openssl_version"] = ssl.OPENSSL_VERSION - - setuptools_version = get_installed_version("setuptools") - if setuptools_version is not None: - data["setuptools_version"] = setuptools_version - - return "{data[installer][name]}/{data[installer][version]} {json}".format( - data=data, - json=json.dumps(data, separators=(",", ":"), sort_keys=True), - ) - - -class MultiDomainBasicAuth(AuthBase): - - def __init__(self, prompting=True): - # type: (bool) -> None - self.prompting = prompting - self.passwords = {} # type: Dict[str, AuthInfo] - - def __call__(self, req): - parsed = urllib_parse.urlparse(req.url) - - # Split the credentials from the netloc. - netloc, url_user_password = split_auth_from_netloc(parsed.netloc) - - # Set the url of the request to the url without any credentials - req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) - - # Use any stored credentials that we have for this netloc - username, password = self.passwords.get(netloc, (None, None)) - - # Use the credentials embedded in the url if we have none stored - if username is None: - username, password = url_user_password - - # Get creds from netrc if we still don't have them - if username is None and password is None: - netrc_auth = get_netrc_auth(req.url) - username, password = netrc_auth if netrc_auth else (None, None) - - if username or password: - # Store the username and password - self.passwords[netloc] = (username, password) - - # Send the basic auth with this request - req = HTTPBasicAuth(username or "", password or "")(req) - - # Attach a hook to handle 401 responses - req.register_hook("response", self.handle_401) - - return req - - def handle_401(self, resp, **kwargs): - # We only care about 401 responses, anything else we want to just - # pass through the actual response - if resp.status_code != 401: - return resp - - # We are not able to prompt the user so simply return the response - if not self.prompting: - return resp - - parsed = urllib_parse.urlparse(resp.url) - - # Prompt the user for a new username and password - username = six.moves.input("User for %s: " % parsed.netloc) - password = getpass.getpass("Password: ") - - # Store the new username and password to use for future requests - if username or password: - self.passwords[parsed.netloc] = (username, password) - - # Consume content and release the original connection to allow our new - # request to reuse the same one. - resp.content - resp.raw.release_conn() - - # Add our new username and password to the request - req = HTTPBasicAuth(username or "", password or "")(resp.request) - req.register_hook("response", self.warn_on_401) - - # Send our new request - new_resp = resp.connection.send(req, **kwargs) - new_resp.history.append(resp) - - return new_resp - - def warn_on_401(self, resp, **kwargs): - # warn user that they provided incorrect credentials - if resp.status_code == 401: - logger.warning('401 Error, Credentials not correct for %s', - resp.request.url) - - -class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, - proxies=None): - pathname = url_to_path(request.url) - - resp = Response() - resp.status_code = 200 - resp.url = request.url - - try: - stats = os.stat(pathname) - except OSError as exc: - resp.status_code = 404 - resp.raw = exc - else: - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - content_type = mimetypes.guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) - - resp.raw = open(pathname, "rb") - resp.close = resp.raw.close - - return resp - - def close(self): - pass - - -class SafeFileCache(FileCache): - """ - A file based cache which is safe to use even when the target directory may - not be accessible or writable. - """ - - def __init__(self, *args, **kwargs): - super(SafeFileCache, self).__init__(*args, **kwargs) - - # Check to ensure that the directory containing our cache directory - # is owned by the user current executing pip. If it does not exist - # we will check the parent directory until we find one that does exist. - # If it is not owned by the user executing pip then we will disable - # the cache and log a warning. - if not check_path_owner(self.directory): - logger.warning( - "The directory '%s' or its parent directory is not owned by " - "the current user and the cache has been disabled. Please " - "check the permissions and owner of that directory. If " - "executing pip with sudo, you may want sudo's -H flag.", - self.directory, - ) - - # Set our directory to None to disable the Cache - self.directory = None - - def get(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).get(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def set(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).set(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - def delete(self, *args, **kwargs): - # If we don't have a directory, then the cache should be a no-op. - if self.directory is None: - return - - try: - return super(SafeFileCache, self).delete(*args, **kwargs) - except (LockError, OSError, IOError): - # We intentionally silence this error, if we can't access the cache - # then we can just skip caching and process the request as if - # caching wasn't enabled. - pass - - -class InsecureHTTPAdapter(HTTPAdapter): - - def cert_verify(self, conn, url, verify, cert): - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - - -class PipSession(requests.Session): - - timeout = None # type: Optional[int] - - def __init__(self, *args, **kwargs): - retries = kwargs.pop("retries", 0) - cache = kwargs.pop("cache", None) - insecure_hosts = kwargs.pop("insecure_hosts", []) - - super(PipSession, self).__init__(*args, **kwargs) - - # Attach our User Agent to the request - self.headers["User-Agent"] = user_agent() - - # Attach our Authentication handler to the session - self.auth = MultiDomainBasicAuth() - - # Create our urllib3.Retry instance which will allow us to customize - # how we handle retries. - retries = urllib3.Retry( - # Set the total number of retries that a particular request can - # have. - total=retries, - - # A 503 error from PyPI typically means that the Fastly -> Origin - # connection got interrupted in some way. A 503 error in general - # is typically considered a transient error so we'll go ahead and - # retry it. - # A 500 may indicate transient error in Amazon S3 - # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], - - # Add a small amount of back off between failed requests in - # order to prevent hammering the service. - backoff_factor=0.25, - ) - - # We want to _only_ cache responses on securely fetched origins. We do - # this because we can't validate the response of an insecurely fetched - # origin, and we don't want someone to be able to poison the cache and - # require manual eviction from the cache to fix it. - if cache: - secure_adapter = CacheControlAdapter( - cache=SafeFileCache(cache, use_dir_lock=True), - max_retries=retries, - ) - else: - secure_adapter = HTTPAdapter(max_retries=retries) - - # Our Insecure HTTPAdapter disables HTTPS validation. It does not - # support caching (see above) so we'll use it for all http:// URLs as - # well as any https:// host that we've marked as ignoring TLS errors - # for. - insecure_adapter = InsecureHTTPAdapter(max_retries=retries) - - self.mount("https://", secure_adapter) - self.mount("http://", insecure_adapter) - - # Enable file:// urls - self.mount("file://", LocalFSAdapter()) - - # We want to use a non-validating adapter for any requests which are - # deemed insecure. - for host in insecure_hosts: - self.mount("https://{}/".format(host), insecure_adapter) - - def request(self, method, url, *args, **kwargs): - # Allow setting a default timeout on a session - kwargs.setdefault("timeout", self.timeout) - - # Dispatch the actual request - return super(PipSession, self).request(method, url, *args, **kwargs) - - -def get_file_content(url, comes_from=None, session=None): - # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text] - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode. - - :param url: File path or url. - :param comes_from: Origin description of requirements. - :param session: Instance of pip.download.PipSession. - """ - if session is None: - raise TypeError( - "get_file_content() missing 1 required keyword argument: 'session'" - ) - - match = _scheme_re.search(url) - if match: - scheme = match.group(1).lower() - if (scheme == 'file' and comes_from and - comes_from.startswith('http')): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - if scheme == 'file': - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib_parse.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - else: - # FIXME: catch some errors - resp = session.get(url) - resp.raise_for_status() - return resp.url, resp.text - try: - with open(url, 'rb') as f: - content = auto_decode(f.read()) - except IOError as exc: - raise InstallationError( - 'Could not open requirements file: %s' % str(exc) - ) - return url, content - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - - -def is_url(name): - # type: (Union[str, Text]) -> bool - """Returns true if the name looks like a URL""" - if ':' not in name: - return False - scheme = name.split(':', 1)[0].lower() - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes - - -def url_to_path(url): - # type: (str) -> str - """ - Convert a file: URL to a path. - """ - assert url.startswith('file:'), ( - "You can only turn file: urls into filenames (not %r)" % url) - - _, netloc, path, _, _ = urllib_parse.urlsplit(url) - - # if we have a UNC path, prepend UNC share notation - if netloc: - netloc = '\\\\' + netloc - - path = urllib_request.url2pathname(netloc + path) - return path - - -def path_to_url(path): - # type: (Union[str, Text]) -> str - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) - return url - - -def is_archive_file(name): - # type: (str) -> bool - """Return True if `name` is a considered as an archive file.""" - ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False - - -def unpack_vcs_link(link, location): - vcs_backend = _get_used_vcs_backend(link) - vcs_backend.unpack(location) - - -def _get_used_vcs_backend(link): - for backend in vcs.backends: - if link.scheme in backend.schemes: - vcs_backend = backend(link.url) - return vcs_backend - - -def is_vcs_url(link): - # type: (Link) -> bool - return bool(_get_used_vcs_backend(link)) - - -def is_file_url(link): - # type: (Link) -> bool - return link.url.lower().startswith('file:') - - -def is_dir_url(link): - # type: (Link) -> bool - """Return whether a file:// Link points to a directory. - - ``link`` must not have any other scheme but file://. Call is_file_url() - first. - - """ - link_path = url_to_path(link.url_without_fragment) - return os.path.isdir(link_path) - - -def _progress_indicator(iterable, *args, **kwargs): - return iterable - - -def _download_url( - resp, # type: Response - link, # type: Link - content_file, # type: IO - hashes, # type: Hashes - progress_bar # type: str -): - # type: (...) -> None - try: - total_length = int(resp.headers['content-length']) - except (ValueError, KeyError, TypeError): - total_length = 0 - - cached_resp = getattr(resp, "from_cache", False) - if logger.getEffectiveLevel() > logging.INFO: - show_progress = False - elif cached_resp: - show_progress = False - elif total_length > (40 * 1000): - show_progress = True - elif not total_length: - show_progress = True - else: - show_progress = False - - show_url = link.show_url - - def resp_read(chunk_size): - try: - # Special case for urllib3. - for chunk in resp.raw.stream( - chunk_size, - # We use decode_content=False here because we don't - # want urllib3 to mess with the raw bytes we get - # from the server. If we decompress inside of - # urllib3 then we cannot verify the checksum - # because the checksum will be of the compressed - # file. This breakage will only occur if the - # server adds a Content-Encoding header, which - # depends on how the server was configured: - # - Some servers will notice that the file isn't a - # compressible file and will leave the file alone - # and with an empty Content-Encoding - # - Some servers will notice that the file is - # already compressed and will leave the file - # alone and will add a Content-Encoding: gzip - # header - # - Some servers won't notice anything at all and - # will take a file that's already been compressed - # and compress it again and set the - # Content-Encoding: gzip header - # - # By setting this not to decode automatically we - # hope to eliminate problems with the second case. - decode_content=False): - yield chunk - except AttributeError: - # Standard file-like object. - while True: - chunk = resp.raw.read(chunk_size) - if not chunk: - break - yield chunk - - def written_chunks(chunks): - for chunk in chunks: - content_file.write(chunk) - yield chunk - - progress_indicator = _progress_indicator - - if link.netloc == PyPI.netloc: - url = show_url - else: - url = link.url_without_fragment - - if show_progress: # We don't show progress on cached responses - progress_indicator = DownloadProgressProvider(progress_bar, - max=total_length) - if total_length: - logger.info("Downloading %s (%s)", url, format_size(total_length)) - else: - logger.info("Downloading %s", url) - elif cached_resp: - logger.info("Using cached %s", url) - else: - logger.info("Downloading %s", url) - - logger.debug('Downloading from URL %s', link) - - downloaded_chunks = written_chunks( - progress_indicator( - resp_read(CONTENT_CHUNK_SIZE), - CONTENT_CHUNK_SIZE - ) - ) - if hashes: - hashes.check_against_chunks(downloaded_chunks) - else: - consume(downloaded_chunks) - - -def _copy_file(filename, location, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % - display_path(download_location), ('i', 'w', 'b', 'a')) - if response == 'i': - copy = False - elif response == 'w': - logger.warning('Deleting %s', display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warning( - 'Backing up %s to %s', - display_path(download_location), - display_path(dest_file), - ) - shutil.move(download_location, dest_file) - elif response == 'a': - sys.exit(-1) - if copy: - shutil.copy(filename, download_location) - logger.info('Saved %s', display_path(download_location)) - - -def unpack_http_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - if session is None: - raise TypeError( - "unpack_http_url() missing 1 required keyword argument: 'session'" - ) - - with TempDirectory(kind="unpack") as temp_dir: - # If a download dir is specified, is the file already downloaded there? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - content_type = mimetypes.guess_type(from_path)[0] - else: - # let's download to a tmp dir - from_path, content_type = _download_http_url(link, - session, - temp_dir.path, - hashes, - progress_bar) - - # unpack the archive to the build dir location. even when only - # downloading archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified; let's copy the archive there - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - if not already_downloaded_path: - os.unlink(from_path) - - -def unpack_file_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - hashes=None # type: Optional[Hashes] -): - # type: (...) -> None - """Unpack link into location. - - If download_dir is provided and link points to a file, make a copy - of the link file inside download_dir. - """ - link_path = url_to_path(link.url_without_fragment) - - # If it's a url to a local directory - if is_dir_url(link): - if os.path.isdir(location): - rmtree(location) - shutil.copytree(link_path, location, symlinks=True) - if download_dir: - logger.info('Link is a directory, ignoring download_dir') - return - - # If --require-hashes is off, `hashes` is either empty, the - # link's embedded hash, or MissingHashes; it is required to - # match. If --require-hashes is on, we are satisfied by any - # hash in `hashes` matching: a URL-based or an option-based - # one; no internet-sourced hash will be in `hashes`. - if hashes: - hashes.check_against_path(link_path) - - # If a download dir is specified, is the file already there and valid? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link_path - - content_type = mimetypes.guess_type(from_path)[0] - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified and not already downloaded - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - -def _copy_dist_from_dir(link_path, location): - """Copy distribution files in `link_path` to `location`. - - Invoked when user requests to install a local directory. E.g.: - - pip install . - pip install ~/dev/git-repos/python-prompt-toolkit - - """ - - # Note: This is currently VERY SLOW if you have a lot of data in the - # directory, because it copies everything with `shutil.copytree`. - # What it should really do is build an sdist and install that. - # See https://github.com/pypa/pip/issues/2195 - - if os.path.isdir(location): - rmtree(location) - - # build an sdist - setup_py = 'setup.py' - sdist_args = [sys.executable] - sdist_args.append('-c') - sdist_args.append(SETUPTOOLS_SHIM % setup_py) - sdist_args.append('sdist') - sdist_args += ['--dist-dir', location] - logger.info('Running setup.py sdist for %s', link_path) - - with indent_log(): - call_subprocess(sdist_args, cwd=link_path, show_stdout=False) - - # unpack sdist into `location` - sdist = os.path.join(location, os.listdir(location)[0]) - logger.info('Unpacking sdist %s into %s', sdist, location) - unpack_file(sdist, location, content_type=None, link=None) - - -class PipXmlrpcTransport(xmlrpc_client.Transport): - """Provide a `xmlrpclib.Transport` implementation via a `PipSession` - object. - """ - - def __init__(self, index_url, session, use_datetime=False): - xmlrpc_client.Transport.__init__(self, use_datetime) - index_parts = urllib_parse.urlparse(index_url) - self._scheme = index_parts.scheme - self._session = session - - def request(self, host, handler, request_body, verbose=False): - parts = (self._scheme, host, handler, None, None, None) - url = urllib_parse.urlunparse(parts) - try: - headers = {'Content-Type': 'text/xml'} - response = self._session.post(url, data=request_body, - headers=headers, stream=True) - response.raise_for_status() - self.verbose = verbose - return self.parse_response(response.raw) - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", - exc.response.status_code, url, - ) - raise - - -def unpack_url( - link, # type: Optional[Link] - location, # type: Optional[str] - download_dir=None, # type: Optional[str] - only_download=False, # type: bool - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - """Unpack link. - If link is a VCS link: - if only_download, export into download_dir and ignore location - else unpack into location - for other types of link: - - unpack into location - - if download_dir, copy the file into download_dir - - if only_download, mark location for deletion - - :param hashes: A Hashes object, one of whose embedded hashes must match, - or HashMismatch will be raised. If the Hashes is empty, no matches are - required, and unhashable types of requirements (like VCS ones, which - would ordinarily raise HashUnsupported) are allowed. - """ - # non-editable vcs urls - if is_vcs_url(link): - unpack_vcs_link(link, location) - - # file urls - elif is_file_url(link): - unpack_file_url(link, location, download_dir, hashes=hashes) - - # http urls - else: - if session is None: - session = PipSession() - - unpack_http_url( - link, - location, - download_dir, - session, - hashes=hashes, - progress_bar=progress_bar - ) - if only_download: - write_delete_marker_file(location) - - -def _download_http_url( - link, # type: Link - session, # type: PipSession - temp_dir, # type: str - hashes, # type: Hashes - progress_bar # type: str -): - # type: (...) -> Tuple[str, str] - """Download link url into temp_dir using provided session""" - target_url = link.url.split('#', 1)[0] - try: - resp = session.get( - target_url, - # We use Accept-Encoding: identity here because requests - # defaults to accepting compressed responses. This breaks in - # a variety of ways depending on how the server is configured. - # - Some servers will notice that the file isn't a compressible - # file and will leave the file alone and with an empty - # Content-Encoding - # - Some servers will notice that the file is already - # compressed and will leave the file alone and will add a - # Content-Encoding: gzip header - # - Some servers won't notice anything at all and will take - # a file that's already been compressed and compress it again - # and set the Content-Encoding: gzip header - # By setting this to request only the identity encoding We're - # hoping to eliminate the third case. Hopefully there does not - # exist a server which when given a file will notice it is - # already compressed and that you're not asking for a - # compressed file and will then decompress it before sending - # because if that's the case I don't think it'll ever be - # possible to make this work. - headers={"Accept-Encoding": "identity"}, - stream=True, - ) - resp.raise_for_status() - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", exc.response.status_code, link, - ) - raise - - content_type = resp.headers.get('content-type', '') - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: - type, params = cgi.parse_header(content_disposition) - # We use ``or`` here because we don't want to use an "empty" value - # from the filename param. - filename = params.get('filename') or filename - ext = splitext(filename)[1] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - file_path = os.path.join(temp_dir, filename) - with open(file_path, 'wb') as content_file: - _download_url(resp, link, content_file, hashes, progress_bar) - return file_path, content_type - - -def _check_download_dir(link, download_dir, hashes): - # type: (Link, str, Hashes) -> Optional[str] - """ Check download_dir for previously downloaded file with correct hash - If a correct file is found return its path else None - """ - download_path = os.path.join(download_dir, link.filename) - if os.path.exists(download_path): - # If already downloaded, does its hash match? - logger.info('File was already downloaded %s', download_path) - if hashes: - try: - hashes.check_against_path(download_path) - except HashMismatch: - logger.warning( - 'Previously-downloaded file %s has bad hash. ' - 'Re-downloading.', - download_path - ) - os.unlink(download_path) - return None - return download_path - return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/exceptions.py index 38ceeea9..3f26215d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/exceptions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/exceptions.py @@ -1,4 +1,5 @@ """Exceptions used throughout package""" + from __future__ import absolute_import from itertools import chain, groupby, repeat @@ -8,8 +9,19 @@ from pip._vendor.six import iteritems from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import Any, Optional, List, Dict, Text + + from pip._vendor.pkg_resources import Distribution + from pip._vendor.requests.models import Response, Request + from pip._vendor.six import PY3 + from pip._vendor.six.moves import configparser + + from pip._internal.req.req_install import InstallRequirement + + if PY3: + from hashlib import _Hash + else: + from hashlib import _hash as _Hash class PipError(Exception): @@ -28,6 +40,36 @@ class UninstallationError(PipError): """General exception during uninstallation""" +class NoneMetadataError(PipError): + """ + Raised when accessing "METADATA" or "PKG-INFO" metadata for a + pip._vendor.pkg_resources.Distribution object and + `dist.has_metadata('METADATA')` returns True but + `dist.get_metadata('METADATA')` returns None (and similarly for + "PKG-INFO"). + """ + + def __init__(self, dist, metadata_name): + # type: (Distribution, str) -> None + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self): + # type: () -> str + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return ( + 'None {} metadata found for distribution: {}'.format( + self.metadata_name, self.dist, + ) + ) + + class DistributionNotFound(InstallationError): """Raised when a distribution cannot be found to satisfy a requirement""" @@ -49,10 +91,38 @@ class CommandError(PipError): """Raised when there is an error in command-line arguments""" +class SubProcessError(PipError): + """Raised when there is an error raised while executing a + command in subprocess""" + + class PreviousBuildDirError(PipError): """Raised when there's a previous conflicting build directory""" +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__(self, error_msg, response=None, request=None): + # type: (Text, Response, Request) -> None + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if (self.response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(NetworkConnectionError, self).__init__( + error_msg, response, request) + + def __str__(self): + # type: () -> str + return str(self.error_msg) + + class InvalidWheelFilename(InstallationError): """Invalid wheel filename.""" @@ -61,16 +131,39 @@ class UnsupportedWheel(InstallationError): """Unsupported wheel.""" +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename + or user-supplied ``#egg=`` value. + """ + def __init__(self, ireq, field, built): + # type: (InstallRequirement, str, Any) -> None + self.ireq = ireq + self.field = field + self.built = built + + def __str__(self): + # type: () -> str + return "Requested {} has different {} in metadata: {!r}".format( + self.ireq, self.field, self.built, + ) + + class HashErrors(InstallationError): """Multiple HashError instances rolled into one for reporting""" def __init__(self): - self.errors = [] + # type: () -> None + self.errors = [] # type: List[HashError] def append(self, error): + # type: (HashError) -> None self.errors.append(error) def __str__(self): + # type: () -> str lines = [] self.errors.sort(key=lambda e: e.order) for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): @@ -78,11 +171,14 @@ class HashErrors(InstallationError): lines.extend(e.body() for e in errors_of_cls) if lines: return '\n'.join(lines) + return '' def __nonzero__(self): + # type: () -> bool return bool(self.errors) def __bool__(self): + # type: () -> bool return self.__nonzero__() @@ -104,23 +200,27 @@ class HashError(InstallationError): """ req = None # type: Optional[InstallRequirement] head = '' + order = None # type: Optional[int] def body(self): + # type: () -> str """Return a summary of me for display under the heading. This default implementation simply prints a description of the triggering requirement. :param req: The InstallRequirement that provoked this error, with - populate_link() having already been called + its link already populated by the resolver's _populate_link(). """ - return ' %s' % self._requirement_name() + return ' {}'.format(self._requirement_name()) def __str__(self): - return '%s\n%s' % (self.head, self.body()) + # type: () -> str + return '{}\n{}'.format(self.head, self.body()) def _requirement_name(self): + # type: () -> str """Return a description of the requirement that triggered me. This default implementation returns long description of the req, with @@ -161,6 +261,7 @@ class HashMissing(HashError): 'has a hash.)') def __init__(self, gotten_hash): + # type: (str) -> None """ :param gotten_hash: The hash of the (possibly malicious) archive we just downloaded @@ -168,6 +269,7 @@ class HashMissing(HashError): self.gotten_hash = gotten_hash def body(self): + # type: () -> str # Dodge circular import. from pip._internal.utils.hashes import FAVORITE_HASH @@ -180,9 +282,9 @@ class HashMissing(HashError): # In case someone feeds something downright stupid # to InstallRequirement's constructor. else getattr(self.req, 'req', None)) - return ' %s --hash=%s:%s' % (package or 'unknown package', - FAVORITE_HASH, - self.gotten_hash) + return ' {} --hash={}:{}'.format(package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) class HashUnpinned(HashError): @@ -210,6 +312,7 @@ class HashMismatch(HashError): 'someone may have tampered with them.') def __init__(self, allowed, gots): + # type: (Dict[str, List[str]], Dict[str, _Hash]) -> None """ :param allowed: A dict of algorithm names pointing to lists of allowed hex digests @@ -220,10 +323,12 @@ class HashMismatch(HashError): self.gots = gots def body(self): - return ' %s:\n%s' % (self._requirement_name(), - self._hash_comparison()) + # type: () -> str + return ' {}:\n{}'.format(self._requirement_name(), + self._hash_comparison()) def _hash_comparison(self): + # type: () -> str """ Return a comparison of actual and expected hash values. @@ -235,18 +340,18 @@ class HashMismatch(HashError): """ def hash_then_or(hash_name): + # type: (str) -> chain[str] # For now, all the decent hashes have 6-char names, so we can get # away with hard-coding space literals. return chain([hash_name], repeat(' or')) - lines = [] + lines = [] # type: List[str] for hash_name, expecteds in iteritems(self.allowed): prefix = hash_then_or(hash_name) - lines.extend((' Expected %s %s' % (next(prefix), e)) + lines.extend((' Expected {} {}'.format(next(prefix), e)) for e in expecteds) - lines.append(' Got %s\n' % - self.gots[hash_name].hexdigest()) - prefix = ' or' + lines.append(' Got {}\n'.format( + self.gots[hash_name].hexdigest())) return '\n'.join(lines) @@ -260,15 +365,17 @@ class ConfigurationFileCouldNotBeLoaded(ConfigurationError): """ def __init__(self, reason="could not be loaded", fname=None, error=None): + # type: (str, Optional[str], Optional[configparser.Error]) -> None super(ConfigurationFileCouldNotBeLoaded, self).__init__(error) self.reason = reason self.fname = fname self.error = error def __str__(self): + # type: () -> str if self.fname is not None: message_part = " in {}.".format(self.fname) else: assert self.error is not None - message_part = ".\n{}\n".format(self.error.message) + message_part = ".\n{}\n".format(self.error) return "Configuration file {}{}".format(self.reason, message_part) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index.py deleted file mode 100644 index 9eda3a35..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index.py +++ /dev/null @@ -1,990 +0,0 @@ -"""Routines related to PyPI, indexes""" -from __future__ import absolute_import - -import cgi -import itertools -import logging -import mimetypes -import os -import posixpath -import re -import sys -from collections import namedtuple - -from pip._vendor import html5lib, requests, six -from pip._vendor.distlib.compat import unescape -from pip._vendor.packaging import specifiers -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.requests.exceptions import RetryError, SSLError -from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._vendor.six.moves.urllib import request as urllib_request - -from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path -from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, - UnsupportedWheel, -) -from pip._internal.models.candidate import InstallationCandidate -from pip._internal.models.format_control import FormatControl -from pip._internal.models.index import PyPI -from pip._internal.models.link import Link -from pip._internal.pep425tags import get_supported -from pip._internal.utils.compat import ipaddress -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path, - redact_password_from_url, -) -from pip._internal.utils.packaging import check_requires_python -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from logging import Logger # noqa: F401 - from typing import ( # noqa: F401 - Tuple, Optional, Any, List, Union, Callable, Set, Sequence, - Iterable, MutableMapping - ) - from pip._vendor.packaging.version import _BaseVersion # noqa: F401 - from pip._vendor.requests import Response # noqa: F401 - from pip._internal.req import InstallRequirement # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - - SecureOrigin = Tuple[str, str, Optional[str]] - BuildTag = Tuple[Any, ...] # either emply tuple or Tuple[int, str] - CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]] - -__all__ = ['FormatControl', 'PackageFinder'] - - -SECURE_ORIGINS = [ - # protocol, hostname, port - # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) - ("https", "*", "*"), - ("*", "localhost", "*"), - ("*", "127.0.0.0/8", "*"), - ("*", "::1/128", "*"), - ("file", "*", None), - # ssh is always secure. - ("ssh", "*", "*"), -] # type: List[SecureOrigin] - - -logger = logging.getLogger(__name__) - - -def _match_vcs_scheme(url): - # type: (str) -> Optional[str] - """Look for VCS schemes in the URL. - - Returns the matched VCS scheme, or None if there's no match. - """ - from pip._internal.vcs import VcsSupport - for scheme in VcsSupport.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - return scheme - return None - - -def _is_url_like_archive(url): - # type: (str) -> bool - """Return whether the URL looks like an archive. - """ - filename = Link(url).filename - for bad_ext in ARCHIVE_EXTENSIONS: - if filename.endswith(bad_ext): - return True - return False - - -class _NotHTML(Exception): - def __init__(self, content_type, request_desc): - # type: (str, str) -> None - super(_NotHTML, self).__init__(content_type, request_desc) - self.content_type = content_type - self.request_desc = request_desc - - -def _ensure_html_header(response): - # type: (Response) -> None - """Check the Content-Type header to ensure the response contains HTML. - - Raises `_NotHTML` if the content type is not text/html. - """ - content_type = response.headers.get("Content-Type", "") - if not content_type.lower().startswith("text/html"): - raise _NotHTML(content_type, response.request.method) - - -class _NotHTTP(Exception): - pass - - -def _ensure_html_response(url, session): - # type: (str, PipSession) -> None - """Send a HEAD request to the URL, and ensure the response contains HTML. - - Raises `_NotHTTP` if the URL is not available for a HEAD request, or - `_NotHTML` if the content type is not text/html. - """ - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) - if scheme not in {'http', 'https'}: - raise _NotHTTP() - - resp = session.head(url, allow_redirects=True) - resp.raise_for_status() - - _ensure_html_header(resp) - - -def _get_html_response(url, session): - # type: (str, PipSession) -> Response - """Access an HTML page with GET, and return the response. - - This consists of three parts: - - 1. If the URL looks suspiciously like an archive, send a HEAD first to - check the Content-Type is HTML, to avoid downloading a large file. - Raise `_NotHTTP` if the content type cannot be determined, or - `_NotHTML` if it is not HTML. - 2. Actually perform the request. Raise HTTP exceptions on network failures. - 3. Check the Content-Type header to make sure we got HTML, and raise - `_NotHTML` otherwise. - """ - if _is_url_like_archive(url): - _ensure_html_response(url, session=session) - - logger.debug('Getting page %s', url) - - resp = session.get( - url, - headers={ - "Accept": "text/html", - # We don't want to blindly returned cached data for - # /simple/, because authors generally expecting that - # twine upload && pip install will function, but if - # they've done a pip install in the last ~10 minutes - # it won't. Thus by setting this to zero we will not - # blindly use any cached data, however the benefit of - # using max-age=0 instead of no-cache, is that we will - # still support conditional requests, so we will still - # minimize traffic sent in cases where the page hasn't - # changed at all, we will just always incur the round - # trip for the conditional GET now instead of only - # once per 10 minutes. - # For more information, please see pypa/pip#5670. - "Cache-Control": "max-age=0", - }, - ) - resp.raise_for_status() - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. - _ensure_html_header(resp) - - return resp - - -def _handle_get_page_fail( - link, # type: Link - reason, # type: Union[str, Exception] - meth=None # type: Optional[Callable[..., None]] -): - # type: (...) -> None - if meth is None: - meth = logger.debug - meth("Could not fetch URL %s: %s - skipping", link, reason) - - -def _get_html_page(link, session=None): - # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] - if session is None: - raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" - ) - - url = link.url.split('#', 1)[0] - - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: - logger.debug('Cannot look at %s URL %s', vcs_scheme, link) - return None - - # Tack index.html onto file:// URLs that point to directories - scheme, _, path, _, _, _ = urllib_parse.urlparse(url) - if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): - # add trailing slash if not present so urljoin doesn't trim - # final segment - if not url.endswith('/'): - url += '/' - url = urllib_parse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s', url) - - try: - resp = _get_html_response(url, session=session) - except _NotHTTP as exc: - logger.debug( - 'Skipping page %s because it looks like an archive, and cannot ' - 'be checked by HEAD.', link, - ) - except _NotHTML as exc: - logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', - link, exc.request_desc, exc.content_type, - ) - except requests.HTTPError as exc: - _handle_get_page_fail(link, exc) - except RetryError as exc: - _handle_get_page_fail(link, exc) - except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) - _handle_get_page_fail(link, reason, meth=logger.info) - except requests.ConnectionError as exc: - _handle_get_page_fail(link, "connection error: %s" % exc) - except requests.Timeout: - _handle_get_page_fail(link, "timed out") - else: - return HTMLPage(resp.content, resp.url, resp.headers) - return None - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links. - """ - - def __init__( - self, - find_links, # type: List[str] - index_urls, # type: List[str] - allow_all_prereleases=False, # type: bool - trusted_hosts=None, # type: Optional[Iterable[str]] - session=None, # type: Optional[PipSession] - format_control=None, # type: Optional[FormatControl] - platform=None, # type: Optional[str] - versions=None, # type: Optional[List[str]] - abi=None, # type: Optional[str] - implementation=None, # type: Optional[str] - prefer_binary=False # type: bool - ): - # type: (...) -> None - """Create a PackageFinder. - - :param format_control: A FormatControl object or None. Used to control - the selection of source packages / binary packages when consulting - the index and links. - :param platform: A string or None. If None, searches for packages - that are supported by the current system. Otherwise, will find - packages that can be built on the platform passed in. These - packages will only be downloaded for distribution: they will - not be built locally. - :param versions: A list of strings or None. This is passed directly - to pep425tags.py in the get_supported() method. - :param abi: A string or None. This is passed directly - to pep425tags.py in the get_supported() method. - :param implementation: A string or None. This is passed directly - to pep425tags.py in the get_supported() method. - """ - if session is None: - raise TypeError( - "PackageFinder() missing 1 required keyword argument: " - "'session'" - ) - - # Build find_links. If an argument starts with ~, it may be - # a local file relative to a home directory. So try normalizing - # it and if it exists, use the normalized version. - # This is deliberately conservative - it might be fine just to - # blindly normalize anything starting with a ~... - self.find_links = [] # type: List[str] - for link in find_links: - if link.startswith('~'): - new_link = normalize_path(link) - if os.path.exists(new_link): - link = new_link - self.find_links.append(link) - - self.index_urls = index_urls - - # These are boring links that have already been logged somehow: - self.logged_links = set() # type: Set[Link] - - self.format_control = format_control or FormatControl(set(), set()) - - # Domains that we won't emit warnings for when not using HTTPS - self.secure_origins = [ - ("*", host, "*") - for host in (trusted_hosts if trusted_hosts else []) - ] # type: List[SecureOrigin] - - # Do we want to allow _all_ pre-releases? - self.allow_all_prereleases = allow_all_prereleases - - # The Session we'll use to make requests - self.session = session - - # The valid tags to check potential found wheel candidates against - self.valid_tags = get_supported( - versions=versions, - platform=platform, - abi=abi, - impl=implementation, - ) - - # Do we prefer old, but valid, binary dist over new source dist - self.prefer_binary = prefer_binary - - # If we don't have TLS enabled, then WARN if anyplace we're looking - # relies on TLS. - if not HAS_TLS: - for link in itertools.chain(self.index_urls, self.find_links): - parsed = urllib_parse.urlparse(link) - if parsed.scheme == "https": - logger.warning( - "pip is configured with locations that require " - "TLS/SSL, however the ssl module in Python is not " - "available." - ) - break - - def get_formatted_locations(self): - # type: () -> str - lines = [] - if self.index_urls and self.index_urls != [PyPI.simple_url]: - lines.append( - "Looking in indexes: {}".format(", ".join( - redact_password_from_url(url) for url in self.index_urls)) - ) - if self.find_links: - lines.append( - "Looking in links: {}".format(", ".join(self.find_links)) - ) - return "\n".join(lines) - - @staticmethod - def _sort_locations(locations, expand_dir=False): - # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] - """ - Sort locations into "files" (archives) and "urls", and return - a pair of lists (files,urls) - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate list - def sort_path(path): - url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - - is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') - - if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) - if os.path.isdir(path): - if expand_dir: - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif is_file_url: - urls.append(url) - else: - logger.warning( - "Path '{0}' is ignored: " - "it is a directory.".format(path), - ) - elif os.path.isfile(path): - sort_path(path) - else: - logger.warning( - "Url '%s' is ignored: it is neither a file " - "nor a directory.", url, - ) - elif is_url(url): - # Only add url with clear scheme - urls.append(url) - else: - logger.warning( - "Url '%s' is ignored. It is either a non-existing " - "path or lacks a specific scheme.", url, - ) - - return files, urls - - def _candidate_sort_key(self, candidate): - # type: (InstallationCandidate) -> CandidateSortingKey - """ - Function used to generate link sort key for link tuples. - The greater the return value, the more preferred it is. - If not finding wheels, then sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min(self.valid_tags) - 3. source archives - If prefer_binary was set, then all wheels are sorted above sources. - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - support_num = len(self.valid_tags) - build_tag = tuple() # type: BuildTag - binary_preference = 0 - if candidate.location.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(candidate.location.filename) - if not wheel.supported(self.valid_tags): - raise UnsupportedWheel( - "%s is not a supported wheel for this platform. It " - "can't be sorted." % wheel.filename - ) - if self.prefer_binary: - binary_preference = 1 - pri = -(wheel.support_index_min(self.valid_tags)) - if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) - build_tag_groups = match.groups() - build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) - else: # sdist - pri = -(support_num) - return (binary_preference, candidate.version, build_tag, pri) - - def _validate_secure_origin(self, logger, location): - # type: (Logger, Link) -> bool - # Determine if this url used a secure transport mechanism - parsed = urllib_parse.urlparse(str(location)) - origin = (parsed.scheme, parsed.hostname, parsed.port) - - # The protocol to use to see if the protocol matches. - # Don't count the repository type as part of the protocol: in - # cases such as "git+ssh", only use "ssh". (I.e., Only verify against - # the last scheme.) - protocol = origin[0].rsplit('+', 1)[-1] - - # Determine if our origin is a secure origin by looking through our - # hardcoded list of secure origins, as well as any additional ones - # configured on this PackageFinder instance. - for secure_origin in (SECURE_ORIGINS + self.secure_origins): - if protocol != secure_origin[0] and secure_origin[0] != "*": - continue - - try: - # We need to do this decode dance to ensure that we have a - # unicode object, even on Python 2.x. - addr = ipaddress.ip_address( - origin[1] - if ( - isinstance(origin[1], six.text_type) or - origin[1] is None - ) - else origin[1].decode("utf8") - ) - network = ipaddress.ip_network( - secure_origin[1] - if isinstance(secure_origin[1], six.text_type) - # setting secure_origin[1] to proper Union[bytes, str] - # creates problems in other places - else secure_origin[1].decode("utf8") # type: ignore - ) - except ValueError: - # We don't have both a valid address or a valid network, so - # we'll check this origin against hostnames. - if (origin[1] and - origin[1].lower() != secure_origin[1].lower() and - secure_origin[1] != "*"): - continue - else: - # We have a valid address and network, so see if the address - # is contained within the network. - if addr not in network: - continue - - # Check to see if the port patches - if (origin[2] != secure_origin[2] and - secure_origin[2] != "*" and - secure_origin[2] is not None): - continue - - # If we've gotten here, then this origin matches the current - # secure origin and we should return True - return True - - # If we've gotten to this point, then the origin isn't secure and we - # will not accept it as a valid location to search. We will however - # log a warning that we are ignoring it. - logger.warning( - "The repository located at %s is not a trusted or secure host and " - "is being ignored. If this repository is available via HTTPS we " - "recommend you use HTTPS instead, otherwise you may silence " - "this warning and allow it anyway with '--trusted-host %s'.", - parsed.hostname, - parsed.hostname, - ) - - return False - - def _get_index_urls_locations(self, project_name): - # type: (str) -> List[str] - """Returns the locations found via self.index_urls - - Checks the url_name on the main (first in the list) index and - use this url_name to produce all locations - """ - - def mkurl_pypi_url(url): - loc = posixpath.join( - url, - urllib_parse.quote(canonicalize_name(project_name))) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's - # behavior. - if not loc.endswith('/'): - loc = loc + '/' - return loc - - return [mkurl_pypi_url(url) for url in self.index_urls] - - def find_all_candidates(self, project_name): - # type: (str) -> List[Optional[InstallationCandidate]] - """Find all available InstallationCandidate for project_name - - This checks index_urls and find_links. - All versions found are returned as an InstallationCandidate list. - - See _link_package_versions for details on which files are accepted - """ - index_locations = self._get_index_urls_locations(project_name) - index_file_loc, index_url_loc = self._sort_locations(index_locations) - fl_file_loc, fl_url_loc = self._sort_locations( - self.find_links, expand_dir=True, - ) - - file_locations = (Link(url) for url in itertools.chain( - index_file_loc, fl_file_loc, - )) - - # We trust every url that the user has given us whether it was given - # via --index-url or --find-links. - # We want to filter out any thing which does not have a secure origin. - url_locations = [ - link for link in itertools.chain( - (Link(url) for url in index_url_loc), - (Link(url) for url in fl_url_loc), - ) - if self._validate_secure_origin(logger, link) - ] - - logger.debug('%d location(s) to search for versions of %s:', - len(url_locations), project_name) - - for location in url_locations: - logger.debug('* %s', location) - - canonical_name = canonicalize_name(project_name) - formats = self.format_control.get_allowed_formats(canonical_name) - search = Search(project_name, canonical_name, formats) - find_links_versions = self._package_versions( - # We trust every directly linked archive in find_links - (Link(url, '-f') for url in self.find_links), - search - ) - - page_versions = [] - for page in self._get_pages(url_locations, project_name): - logger.debug('Analyzing links from page %s', page.url) - with indent_log(): - page_versions.extend( - self._package_versions(page.iter_links(), search) - ) - - file_versions = self._package_versions(file_locations, search) - if file_versions: - file_versions.sort(reverse=True) - logger.debug( - 'Local files found: %s', - ', '.join([ - url_to_path(candidate.location.url) - for candidate in file_versions - ]) - ) - - # This is an intentional priority ordering - return file_versions + find_links_versions + page_versions - - def find_requirement(self, req, upgrade): - # type: (InstallRequirement, bool) -> Optional[Link] - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean - Returns a Link if found, - Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise - """ - all_candidates = self.find_all_candidates(req.name) - - # Filter out anything which doesn't match our specifier - compatible_versions = set( - req.specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - [str(c.version) for c in all_candidates], - prereleases=( - self.allow_all_prereleases - if self.allow_all_prereleases else None - ), - ) - ) - applicable_candidates = [ - # Again, converting to str to deal with debundling. - c for c in all_candidates if str(c.version) in compatible_versions - ] - - if applicable_candidates: - best_candidate = max(applicable_candidates, - key=self._candidate_sort_key) - else: - best_candidate = None - - if req.satisfied_by is not None: - installed_version = parse_version(req.satisfied_by.version) - else: - installed_version = None - - if installed_version is None and best_candidate is None: - logger.critical( - 'Could not find a version that satisfies the requirement %s ' - '(from versions: %s)', - req, - ', '.join( - sorted( - {str(c.version) for c in all_candidates}, - key=parse_version, - ) - ) - ) - - raise DistributionNotFound( - 'No matching distribution found for %s' % req - ) - - best_installed = False - if installed_version and ( - best_candidate is None or - best_candidate.version <= installed_version): - best_installed = True - - if not upgrade and installed_version is not None: - if best_installed: - logger.debug( - 'Existing installed version (%s) is most up-to-date and ' - 'satisfies requirement', - installed_version, - ) - else: - logger.debug( - 'Existing installed version (%s) satisfies requirement ' - '(most up-to-date version is %s)', - installed_version, - best_candidate.version, - ) - return None - - if best_installed: - # We have an existing version, and its the best version - logger.debug( - 'Installed version (%s) is most up-to-date (past versions: ' - '%s)', - installed_version, - ', '.join(sorted(compatible_versions, key=parse_version)) or - "none", - ) - raise BestVersionAlreadyInstalled - - logger.debug( - 'Using version %s (newest of versions: %s)', - best_candidate.version, - ', '.join(sorted(compatible_versions, key=parse_version)) - ) - return best_candidate.location - - def _get_pages(self, locations, project_name): - # type: (Iterable[Link], str) -> Iterable[HTMLPage] - """ - Yields (page, page_url) from the given locations, skipping - locations that have errors. - """ - seen = set() # type: Set[Link] - for location in locations: - if location in seen: - continue - seen.add(location) - - page = _get_html_page(location, session=self.session) - if page is None: - continue - - yield page - - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') - - def _sort_links(self, links): - # type: (Iterable[Link]) -> List[Link] - """ - Returns elements of links in order, non-egg links first, egg links - second, while eliminating duplicates - """ - eggs, no_eggs = [], [] - seen = set() # type: Set[Link] - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _package_versions( - self, - links, # type: Iterable[Link] - search # type: Search - ): - # type: (...) -> List[Optional[InstallationCandidate]] - result = [] - for link in self._sort_links(links): - v = self._link_package_versions(link, search) - if v is not None: - result.append(v) - return result - - def _log_skipped_link(self, link, reason): - # type: (Link, str) -> None - if link not in self.logged_links: - logger.debug('Skipping link %s; %s', link, reason) - self.logged_links.add(link) - - def _link_package_versions(self, link, search): - # type: (Link, Search) -> Optional[InstallationCandidate] - """Return an InstallationCandidate or None""" - version = None - if link.egg_fragment: - egg_info = link.egg_fragment - ext = link.ext - else: - egg_info, ext = link.splitext() - if not ext: - self._log_skipped_link(link, 'not a file') - return None - if ext not in SUPPORTED_EXTENSIONS: - self._log_skipped_link( - link, 'unsupported archive format: %s' % ext, - ) - return None - if "binary" not in search.formats and ext == WHEEL_EXTENSION: - self._log_skipped_link( - link, 'No binaries permitted for %s' % search.supplied, - ) - return None - if "macosx10" in link.path and ext == '.zip': - self._log_skipped_link(link, 'macosx10 one') - return None - if ext == WHEEL_EXTENSION: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - self._log_skipped_link(link, 'invalid wheel filename') - return None - if canonicalize_name(wheel.name) != search.canonical: - self._log_skipped_link( - link, 'wrong project name (not %s)' % search.supplied) - return None - - if not wheel.supported(self.valid_tags): - self._log_skipped_link( - link, 'it is not compatible with this Python') - return None - - version = wheel.version - - # This should be up by the search.ok_binary check, but see issue 2700. - if "source" not in search.formats and ext != WHEEL_EXTENSION: - self._log_skipped_link( - link, 'No sources permitted for %s' % search.supplied, - ) - return None - - if not version: - version = _egg_info_matches(egg_info, search.canonical) - if not version: - self._log_skipped_link( - link, 'Missing project version for %s' % search.supplied) - return None - - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != sys.version[:3]: - self._log_skipped_link( - link, 'Python version is incorrect') - return None - try: - support_this_python = check_requires_python(link.requires_python) - except specifiers.InvalidSpecifier: - logger.debug("Package %s has an invalid Requires-Python entry: %s", - link.filename, link.requires_python) - support_this_python = True - - if not support_this_python: - logger.debug("The package %s is incompatible with the python " - "version in use. Acceptable python versions are: %s", - link, link.requires_python) - return None - logger.debug('Found link %s, version: %s', link, version) - - return InstallationCandidate(search.supplied, version, link) - - -def _find_name_version_sep(egg_info, canonical_name): - # type: (str, str) -> int - """Find the separator's index based on the package's canonical name. - - `egg_info` must be an egg info string for the given package, and - `canonical_name` must be the package's canonical name. - - This function is needed since the canonicalized name does not necessarily - have the same length as the egg info's name part. An example:: - - >>> egg_info = 'foo__bar-1.0' - >>> canonical_name = 'foo-bar' - >>> _find_name_version_sep(egg_info, canonical_name) - 8 - """ - # Project name and version must be separated by one single dash. Find all - # occurrences of dashes; if the string in front of it matches the canonical - # name, this is the one separating the name and version parts. - for i, c in enumerate(egg_info): - if c != "-": - continue - if canonicalize_name(egg_info[:i]) == canonical_name: - return i - raise ValueError("{} does not match {}".format(egg_info, canonical_name)) - - -def _egg_info_matches(egg_info, canonical_name): - # type: (str, str) -> Optional[str] - """Pull the version part out of a string. - - :param egg_info: The string to parse. E.g. foo-2.1 - :param canonical_name: The canonicalized name of the package this - belongs to. - """ - try: - version_start = _find_name_version_sep(egg_info, canonical_name) + 1 - except ValueError: - return None - version = egg_info[version_start:] - if not version: - return None - return version - - -def _determine_base_url(document, page_url): - """Determine the HTML document's base URL. - - This looks for a ``<base>`` tag in the HTML document. If present, its href - attribute denotes the base URL of anchor tags in the document. If there is - no such tag (or if it does not have a valid href attribute), the HTML - file's URL is used as the base URL. - - :param document: An HTML document representation. The current - implementation expects the result of ``html5lib.parse()``. - :param page_url: The URL of the HTML document. - """ - for base in document.findall(".//base"): - href = base.get("href") - if href is not None: - return href - return page_url - - -def _get_encoding_from_headers(headers): - """Determine if we have any encoding information in our headers. - """ - if headers and "Content-Type" in headers: - content_type, params = cgi.parse_header(headers["Content-Type"]) - if "charset" in params: - return params['charset'] - return None - - -_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) - - -def _clean_link(url): - # type: (str) -> str - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url) - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - def __init__(self, content, url, headers=None): - # type: (bytes, str, MutableMapping[str, str]) -> None - self.content = content - self.url = url - self.headers = headers - - def __str__(self): - return redact_password_from_url(self.url) - - def iter_links(self): - # type: () -> Iterable[Link] - """Yields all links in the page""" - document = html5lib.parse( - self.content, - transport_encoding=_get_encoding_from_headers(self.headers), - namespaceHTMLElements=False, - ) - base_url = _determine_base_url(document, self.url) - for anchor in document.findall(".//a"): - if anchor.get("href"): - href = anchor.get("href") - url = _clean_link(urllib_parse.urljoin(base_url, href)) - pyrequire = anchor.get('data-requires-python') - pyrequire = unescape(pyrequire) if pyrequire else None - yield Link(url, self.url, requires_python=pyrequire) - - -Search = namedtuple('Search', 'supplied canonical formats') -"""Capture key aspects of a search. - -:attribute supplied: The user supplied package. -:attribute canonical: The canonical package name. -:attribute formats: The formats allowed for this package. Should be a set - with 'binary' or 'source' or both in it. -""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 00000000..7a17b7b3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/collector.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/collector.py new file mode 100644 index 00000000..6c35fc66 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,692 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_links(). +""" + +import cgi +import functools +import itertools +import logging +import mimetypes +import os +import re +from collections import OrderedDict + +from pip._vendor import html5lib, requests +from pip._vendor.distlib.compat import unescape +from pip._vendor.requests.exceptions import RetryError, SSLError +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url, vcs + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import ( + Callable, Iterable, List, MutableMapping, Optional, + Protocol, Sequence, Tuple, TypeVar, Union, + ) + import xml.etree.ElementTree + + from pip._vendor.requests import Response + + from pip._internal.network.session import PipSession + + HTMLElement = xml.etree.ElementTree.Element + ResponseHeaders = MutableMapping[str, str] + + # Used in the @lru_cache polyfill. + F = TypeVar('F') + + class LruCache(Protocol): + def __call__(self, maxsize=None): + # type: (Optional[int]) -> Callable[[F], F] + raise NotImplementedError + + +logger = logging.getLogger(__name__) + + +# Fallback to noop_lru_cache in Python 2 +# TODO: this can be removed when python 2 support is dropped! +def noop_lru_cache(maxsize=None): + # type: (Optional[int]) -> Callable[[F], F] + def _wrapper(f): + # type: (F) -> F + return f + return _wrapper + + +_lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache + + +def _match_vcs_scheme(url): + # type: (str) -> Optional[str] + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + return scheme + return None + + +def _is_url_like_archive(url): + # type: (str) -> bool + """Return whether the URL looks like an archive. + """ + filename = Link(url).filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + return True + return False + + +class _NotHTML(Exception): + def __init__(self, content_type, request_desc): + # type: (str, str) -> None + super(_NotHTML, self).__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response): + # type: (Response) -> None + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url, session): + # type: (str, PipSession) -> None + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in {'http', 'https'}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_html_header(resp) + + +def _get_html_response(url, session): + # type: (str, PipSession) -> Response + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if _is_url_like_archive(url): + _ensure_html_response(url, session=session) + + logger.debug('Getting page %s', redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers): + # type: (ResponseHeaders) -> Optional[str] + """Determine if we have any encoding information in our headers. + """ + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params['charset'] + return None + + +def _determine_base_url(document, page_url): + # type: (HTMLElement, str) -> str + """Determine the HTML document's base URL. + + This looks for a ``<base>`` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_url_path_part(part): + # type: (str) -> str + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib_parse.quote(urllib_parse.unquote(part)) + + +def _clean_file_url_path(part): + # type: (str) -> str + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib_request.pathname2url(urllib_request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE) + + +def _clean_url_path(path, is_local_path): + # type: (str, bool) -> str + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [''])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return ''.join(cleaned_parts) + + +def _clean_link(url): + # type: (str) -> str + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib_parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib_parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor, # type: HTMLElement + page_url, # type: str + base_url, # type: str +): + # type: (...) -> Optional[Link] + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib_parse.urljoin(base_url, href)) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + + yanked_reason = anchor.get('data-yanked') + if yanked_reason: + # This is a unicode string in Python 2 (and 3). + yanked_reason = unescape(yanked_reason) + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +class CacheablePageContent(object): + def __init__(self, page): + # type: (HTMLPage) -> None + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other): + # type: (object) -> bool + return (isinstance(other, type(self)) and + self.page.url == other.page.url) + + def __hash__(self): + # type: () -> int + return hash(self.page.url) + + +def with_cached_html_pages( + fn, # type: Callable[[HTMLPage], Iterable[Link]] +): + # type: (...) -> Callable[[HTMLPage], List[Link]] + """ + Given a function that parses an Iterable[Link] from an HTMLPage, cache the + function's result (keyed by CacheablePageContent), unless the HTMLPage + `page` has `page.cache_link_parsing == False`. + """ + + @_lru_cache(maxsize=None) + def wrapper(cacheable_page): + # type: (CacheablePageContent) -> List[Link] + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page): + # type: (HTMLPage) -> List[Link] + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_html_pages +def parse_links(page): + # type: (HTMLPage) -> Iterable[Link] + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__( + self, + content, # type: bytes + encoding, # type: Optional[str] + url, # type: str + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self): + # type: () -> str + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response, cache_link_parsing=True): + # type: (Response, bool) -> HTMLPage + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage( + response.content, + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing) + + +def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning('Cannot look at %s URL %s because it does not support ' + 'lookup as web pages.', vcs_scheme, link) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib_parse.urlparse(url) + if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.warning( + 'Skipping page %s because it looks like an archive, and cannot ' + 'be checked by a HTTP HEAD request.', link, + ) + except _NotHTML as exc: + logger.warning( + 'Skipping page %s because the %s request got Content-Type: %s.' + 'The only supported Content-Type is text/html', + link, exc.request_desc, exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, "connection error: {}".format(exc)) + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp, + cache_link_parsing=link.cache_link_parsing) + return None + + +def _remove_duplicate_links(links): + # type: (Iterable[Link]) -> List[Link] + """ + Return a list of links, with duplicates removed and ordering preserved. + """ + # We preserve the ordering when removing duplicates because we can. + return list(OrderedDict.fromkeys(links)) + + +def group_locations(locations, expand_dir=False): + # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] + """ + Divide a list of locations into two groups: "files" (archives) and "urls." + + :return: A pair of lists (files, urls). + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + # type: (str) -> None + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + else: + logger.warning( + "Path '%s' is ignored: it is a directory.", path, + ) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url, + ) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url, + ) + + return files, urls + + +class CollectedLinks(object): + + """ + Encapsulates the return value of a call to LinkCollector.collect_links(). + + The return value includes both URLs to project pages containing package + links, as well as individual package Link objects collected from other + sources. + + This info is stored separately as: + + (1) links from the configured file locations, + (2) links from the configured find_links, and + (3) urls to HTML project pages, as described by the PEP 503 simple + repository API. + """ + + def __init__( + self, + files, # type: List[Link] + find_links, # type: List[Link] + project_urls, # type: List[Link] + ): + # type: (...) -> None + """ + :param files: Links from file locations. + :param find_links: Links from find_links. + :param project_urls: URLs to HTML project pages, as described by + the PEP 503 simple repository API. + """ + self.files = files + self.find_links = find_links + self.project_urls = project_urls + + +class LinkCollector(object): + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_links() method. + """ + + def __init__( + self, + session, # type: PipSession + search_scope, # type: SearchScope + ): + # type: (...) -> None + self.search_scope = search_scope + self.session = session + + @classmethod + def create(cls, session, options, suppress_no_index=False): + # type: (PipSession, Values, bool) -> LinkCollector + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + 'Ignoring indexes: %s', + ','.join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, index_urls=index_urls, + ) + link_collector = LinkCollector( + session=session, search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self): + # type: () -> List[str] + return self.search_scope.find_links + + def fetch_page(self, location): + # type: (Link) -> Optional[HTMLPage] + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_links(self, project_name): + # type: (str) -> CollectedLinks + """Find all available links for the given project name. + + :return: All the Link objects (unfiltered), as a CollectedLinks object. + """ + search_scope = self.search_scope + index_locations = search_scope.get_index_urls_locations(project_name) + index_file_loc, index_url_loc = group_locations(index_locations) + fl_file_loc, fl_url_loc = group_locations( + self.find_links, expand_dir=True, + ) + + file_links = [ + Link(url) for url in itertools.chain(index_file_loc, fl_file_loc) + ] + + # We trust every directly linked archive in find_links + find_link_links = [Link(url, '-f') for url in self.find_links] + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links. + # We want to filter out anything that does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + # Mark PyPI indices as "cache_link_parsing == False" -- this + # will avoid caching the result of parsing the page for links. + (Link(url, cache_link_parsing=False) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + ) + if self.session.is_secure_origin(link) + ] + + url_locations = _remove_duplicate_links(url_locations) + lines = [ + '{} location(s) to search for versions of {}:'.format( + len(url_locations), project_name, + ), + ] + for link in url_locations: + lines.append('* {}'.format(link)) + logger.debug('\n'.join(lines)) + + return CollectedLinks( + files=file_links, + find_links=find_link_links, + project_urls=url_locations, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/package_finder.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 00000000..84115783 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1014 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import re + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union, + ) + + from pip._vendor.packaging.tags import Tag + from pip._vendor.packaging.version import _BaseVersion + + from pip._internal.index.collector import LinkCollector + from pip._internal.models.search_scope import SearchScope + from pip._internal.req import InstallRequirement + from pip._internal.utils.hashes import Hashes + + BuildTag = Union[Tuple[()], Tuple[int, str]] + CandidateSortingKey = ( + Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] + ) + + +__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] + + +logger = logging.getLogger(__name__) + + +def _check_link_requires_python( + link, # type: Link + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> bool + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, link, + ) + else: + if not is_compatible: + version = '.'.join(map(str, version_info)) + if not ignore_requires_python: + logger.debug( + 'Link requires a different Python (%s not in: %r): %s', + version, link.requires_python, link, + ) + return False + + logger.debug( + 'Ignoring failed Requires-Python check (%s not in: %r) ' + 'for link: %s', + version, link.requires_python, link, + ) + + return True + + +class LinkEvaluator(object): + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name, # type: str + canonical_name, # type: str + formats, # type: FrozenSet[str] + target_python, # type: TargetPython + allow_yanked, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link): + # type: (Link) -> Tuple[bool, Optional[Text]] + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '<none given>' + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + return (False, u'yanked for reason: {}'.format(reason)) + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: + return (False, 'unsupported archive format: {}'.format(ext)) + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = 'No binaries permitted for {}'.format( + self.project_name) + return (False, reason) + if "macosx10" in link.path and ext == '.zip': + return (False, 'macosx10 one') + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, 'invalid wheel filename') + if canonicalize_name(wheel.name) != self._canonical_name: + reason = 'wrong project name (not {})'.format( + self.project_name) + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags match: {}".format( + ', '.join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = 'No sources permitted for {}'.format(self.project_name) + return (False, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, self._canonical_name, + ) + if not version: + reason = 'Missing project version for {}'.format(self.project_name) + return (False, reason) + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, 'Python version is incorrect') + + supports_python = _check_link_requires_python( + link, version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug('Found link %s, version: %s', link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates, # type: List[InstallationCandidate] + hashes, # type: Hashes + project_name, # type: str +): + # type: (...) -> List[InstallationCandidate] + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + 'Given no hashes to check %s links for project %r: ' + 'discarding no candidates', + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = 'discarding no candidates' + else: + discard_message = 'discarding {} non-matches:\n {}'.format( + len(non_matches), + '\n '.join(str(candidate.link) for candidate in non_matches) + ) + + logger.debug( + 'Checked %s links for project %r against %s hashes ' + '(%s matches, %s no digest): %s', + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message + ) + + return filtered + + +class CandidatePreferences(object): + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + ): + # type: (...) -> None + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult(object): + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates, # type: List[InstallationCandidate] + applicable_candidates, # type: List[InstallationCandidate] + best_candidate, # type: Optional[InstallationCandidate] + ): + # type: (...) -> None + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through all candidates. + """ + return iter(self._candidates) + + def iter_applicable(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through the applicable candidates. + """ + return iter(self._applicable_candidates) + + +class CandidateEvaluator(object): + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name, # type: str + target_python=None, # type: Optional[TargetPython] + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name, # type: str + supported_tags, # type: List[Tag] + specifier, # type: specifiers.BaseSpecifier + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> None + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + + def get_applicable_candidates( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> List[InstallationCandidate] + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [ + c for c in candidates if str(c.version) in versions + ] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate): + # type: (InstallationCandidate) -> CandidateSortingKey + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag = () # type: BuildTag + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + if not wheel.supported(valid_tags): + raise UnsupportedWheel( + "{} is not a supported wheel for this platform. It " + "can't be sorted.".format(wheel.filename) + ) + if self._prefer_binary: + binary_preference = 1 + pri = -(wheel.support_index_min(valid_tags)) + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, yank_value, binary_preference, candidate.version, + build_tag, pri, + ) + + def sort_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> Optional[InstallationCandidate] + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> BestCandidateResult + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector, # type: LinkCollector + target_python, # type: TargetPython + allow_yanked, # type: bool + format_control=None, # type: Optional[FormatControl] + candidate_prefs=None, # type: CandidatePreferences + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links = set() # type: Set[Link] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector, # type: LinkCollector + selection_prefs, # type: SelectionPreferences + target_python=None, # type: Optional[TargetPython] + ): + # type: (...) -> PackageFinder + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self): + # type: () -> TargetPython + return self._target_python + + @property + def search_scope(self): + # type: () -> SearchScope + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope): + # type: (SearchScope) -> None + self._link_collector.search_scope = search_scope + + @property + def find_links(self): + # type: () -> List[str] + return self._link_collector.find_links + + @property + def index_urls(self): + # type: () -> List[str] + return self.search_scope.index_urls + + @property + def trusted_hosts(self): + # type: () -> Iterable[str] + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self): + # type: () -> bool + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self): + # type: () -> None + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self): + # type: () -> bool + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self): + # type: () -> None + self._candidate_prefs.prefer_binary = True + + def make_link_evaluator(self, project_name): + # type: (str) -> LinkEvaluator + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() # type: Set[Link] + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link, reason): + # type: (Link, Text) -> None + if link not in self._logged_links: + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + # Also, put the link at the end so the reason is more visible + # and because the link string is usually very long. + logger.debug(u'Skipping link: %s: %s', reason, link) + self._logged_links.add(link) + + def get_install_candidate(self, link_evaluator, link): + # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + # Convert the Text result to str since InstallationCandidate + # accepts str. + version=str(result), + ) + + def evaluate_links(self, link_evaluator, links): + # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url(self, project_url, link_evaluator): + # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + logger.debug( + 'Fetching project page and analyzing links: %s', project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + def find_all_candidates(self, project_name): + # type: (str) -> List[InstallationCandidate] + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + collected_links = self._link_collector.collect_links(project_name) + + link_evaluator = self.make_link_evaluator(project_name) + + find_links_versions = self.evaluate_links( + link_evaluator, + links=collected_links.find_links, + ) + + page_versions = [] + for project_url in collected_links.project_urls: + package_links = self.process_project_url( + project_url, link_evaluator=link_evaluator, + ) + page_versions.extend(package_links) + + file_versions = self.evaluate_links( + link_evaluator, + links=collected_links.files, + ) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.link.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return file_versions + find_links_versions + page_versions + + def make_candidate_evaluator( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object to use. + """ + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + def find_best_candidate( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> BestCandidateResult + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[InstallationCandidate] + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, specifier=req.specifier, hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version = None # type: Optional[_BaseVersion] + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter): + # type: (Iterable[InstallationCandidate]) -> str + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ", ".join(sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + )) or "none" + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + 'No matching distribution found for {}'.format( + req) + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + +def _find_name_version_sep(fragment, canonical_name): + # type: (str, str) -> int + """Find the separator's index based on the package's canonical name. + + :param fragment: A <package>+<version> filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError("{} does not match {}".format(fragment, canonical_name)) + + +def _extract_version_from_fragment(fragment, canonical_name): + # type: (str, str) -> Optional[str] + """Parse the version string from a <package>+<version> filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/locations.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/locations.py index c6e2a3e4..0c123548 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/locations.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/locations.py @@ -1,4 +1,8 @@ """Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import import os @@ -9,84 +13,51 @@ import sys import sysconfig from distutils import sysconfig as distutils_sysconfig from distutils.command.install import SCHEME_KEYS # type: ignore +from distutils.command.install import install as distutils_install_command +from pip._internal.models.scheme import Scheme from pip._internal.utils import appdirs -from pip._internal.utils.compat import WINDOWS, expanduser -from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import running_under_virtualenv if MYPY_CHECK_RUNNING: - from typing import Any, Union, Dict, List, Optional # noqa: F401 + from typing import Dict, List, Optional, Union + + from distutils.cmd import Command as DistutilsCommand # Application Directories USER_CACHE_DIR = appdirs.user_cache_dir("pip") -DELETE_MARKER_MESSAGE = '''\ -This file is placed here by pip to indicate the source was put -here by pip. - -Once this package is successfully installed this source code will be -deleted (unless you remove this file). -''' -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' - - -def write_delete_marker_file(directory): - # type: (str) -> None +def get_major_minor_version(): + # type: () -> str """ - Write the pip delete marker file into this directory. - """ - filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) - with open(filepath, 'w') as marker_fp: - marker_fp.write(DELETE_MARKER_MESSAGE) - - -def running_under_virtualenv(): - # type: () -> bool - """ - Return True if we're running inside a virtualenv, False otherwise. - + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". """ - if hasattr(sys, 'real_prefix'): - return True - elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): - return True + return '{}.{}'.format(*sys.version_info) - return False - -def virtualenv_no_global(): - # type: () -> bool - """ - Return True if in a venv and no system site packages. - """ - # this mirrors the logic in virtualenv.py for locating the - # no-global-site-packages.txt file - site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') - if running_under_virtualenv() and os.path.isfile(no_global_file): - return True +def get_src_prefix(): + # type: () -> str + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') else: - return False - + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) -if running_under_virtualenv(): - src_prefix = os.path.join(sys.prefix, 'src') -else: - # FIXME: keep src in cwd for now (it is not a temporary folder) - try: - src_prefix = os.path.join(os.getcwd(), 'src') - except OSError: - # In case the current working directory has been renamed or deleted - sys.exit( - "The folder you are executing pip from can no longer be found." - ) + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) -# under macOS + virtualenv sys.prefix is not properly resolved -# it is something like /path/to/python/bin/.. -# Note: using realpath due to tmp dirs on OSX being symlinks -src_prefix = os.path.abspath(src_prefix) # FIXME doesn't account for venv linked to global site-packages @@ -103,7 +74,7 @@ try: user_site = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE -user_dir = expanduser('~') + if WINDOWS: bin_py = os.path.join(sys.prefix, 'Scripts') bin_user = os.path.join(user_site, 'Scripts') @@ -111,73 +82,49 @@ if WINDOWS: if not os.path.exists(bin_py): bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') - - config_basename = 'pip.ini' - - legacy_storage_dir = os.path.join(user_dir, 'pip') - legacy_config_file = os.path.join( - legacy_storage_dir, - config_basename, - ) else: bin_py = os.path.join(sys.prefix, 'bin') bin_user = os.path.join(user_site, 'bin') - config_basename = 'pip.conf' - - legacy_storage_dir = os.path.join(user_dir, '.pip') - legacy_config_file = os.path.join( - legacy_storage_dir, - config_basename, - ) # Forcing to use /usr/local/bin for standard macOS framework installs # Also log to ~/Library/Logs/ for use with the Console.app log viewer if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': bin_py = '/usr/local/bin' -site_config_files = [ - os.path.join(path, config_basename) - for path in appdirs.site_config_dirs('pip') -] - -venv_config_file = os.path.join(sys.prefix, config_basename) -new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename) - -def distutils_scheme(dist_name, user=False, home=None, root=None, - isolated=False, prefix=None): - # type:(str, bool, str, str, bool, str) -> dict +def distutils_scheme( + dist_name, user=False, home=None, root=None, isolated=False, prefix=None +): + # type:(str, bool, str, str, bool, str) -> Dict[str, str] """ Return a distutils install scheme """ from distutils.dist import Distribution - scheme = {} - - if isolated: - extra_dist_args = {"script_args": ["--no-user-cfg"]} - else: - extra_dist_args = {} dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] - dist_args.update(extra_dist_args) + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) - # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() - # NOTE: Ignoring type since mypy can't find attributes on 'Command' - i = d.get_command_obj('install', create=True) # type: Any - assert i is not None + obj = None # type: Optional[DistutilsCommand] + obj = d.get_command_obj('install', create=True) + assert obj is not None + i = cast(distutils_install_command, obj) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. assert not (user and prefix), "user={} prefix={}".format(user, prefix) + assert not (home and prefix), "home={} prefix={}".format(home, prefix) i.user = user or i.user - if user: + if user or home: i.prefix = "" i.prefix = prefix or i.prefix i.home = home or i.home i.root = root or i.root i.finalize_options() + + scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) @@ -186,17 +133,15 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config - - # Ignoring, typeshed issue reported python/typeshed/issues/2567 - if 'install_lib' in d.get_option_dict('install'): # type: ignore + if 'install_lib' in d.get_option_dict('install'): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join( - sys.prefix, + i.prefix, 'include', 'site', - 'python' + sys.version[:3], + 'python{}'.format(get_major_minor_version()), dist_name, ) @@ -209,3 +154,41 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, ) return scheme + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme( + dist_name, user, home, root, isolated, prefix + ) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/main.py new file mode 100644 index 00000000..3208d5b8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/main.py @@ -0,0 +1,16 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/candidate.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/candidate.py index 4475458a..9149e0fc 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/candidate.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/candidate.py @@ -4,28 +4,35 @@ from pip._internal.utils.models import KeyBasedCompareMixin from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from pip._vendor.packaging.version import _BaseVersion # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 - from typing import Any, Union # noqa: F401 + from pip._vendor.packaging.version import _BaseVersion + from pip._internal.models.link import Link class InstallationCandidate(KeyBasedCompareMixin): """Represents a potential "candidate" for installation. """ - def __init__(self, project, version, location): - # type: (Any, str, Link) -> None - self.project = project + __slots__ = ["name", "version", "link"] + + def __init__(self, name, version, link): + # type: (str, str, Link) -> None + self.name = name self.version = parse_version(version) # type: _BaseVersion - self.location = location + self.link = link super(InstallationCandidate, self).__init__( - key=(self.project, self.version, self.location), + key=(self.name, self.version, self.link), defining_class=InstallationCandidate ) def __repr__(self): # type: () -> str return "<InstallationCandidate({!r}, {!r}, {!r})>".format( - self.project, self.version, self.location, + self.name, self.version, self.link, + ) + + def __str__(self): + # type: () -> str + return '{!r} candidate (version {} at {})'.format( + self.name, self.version, self.link, ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/direct_url.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 00000000..87bd9fe4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,245 @@ +""" PEP 610 """ +import json +import re + +from pip._vendor import six +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Iterable, Optional, Type, TypeVar, Union + ) + + T = TypeVar("T") + + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + + +class DirectUrlValidationError(Exception): + pass + + +def _get(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T] + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if six.PY2 and expected_type is str: + expected_type = six.string_types # type: ignore + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + "{!r} has unexpected type for {} (expected {})".format( + value, key, expected_type + ) + ) + return value + + +def _get_required(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> T + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError("{} must have a value".format(key)) + return value + + +def _exactly_one_of(infos): + # type: (Iterable[Optional[InfoType]]) -> InfoType + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs): + # type: (Any) -> Dict[str, Any] + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo(object): + name = "vcs_info" + + def __init__( + self, + vcs, # type: str + commit_id, # type: str + requested_revision=None, # type: Optional[str] + resolved_revision=None, # type: Optional[str] + resolved_revision_type=None, # type: Optional[str] + ): + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + self.resolved_revision = resolved_revision + self.resolved_revision_type = resolved_revision_type + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo] + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + resolved_revision=_get(d, str, "resolved_revision"), + resolved_revision_type=_get(d, str, "resolved_revision_type"), + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + resolved_revision=self.resolved_revision, + resolved_revision_type=self.resolved_revision_type, + ) + + +class ArchiveInfo(object): + name = "archive_info" + + def __init__( + self, + hash=None, # type: Optional[str] + ): + self.hash = hash + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo] + if d is None: + return None + return cls(hash=_get(d, str, "hash")) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(hash=self.hash) + + +class DirInfo(object): + name = "dir_info" + + def __init__( + self, + editable=False, # type: bool + ): + self.editable = editable + + @classmethod + def _from_dict(cls, d): + # type: (Optional[Dict[str, Any]]) -> Optional[DirInfo] + if d is None: + return None + return cls( + editable=_get_required(d, bool, "editable", default=False) + ) + + def _to_dict(self): + # type: () -> Dict[str, Any] + return _filter_none(editable=self.editable or None) + + +if MYPY_CHECK_RUNNING: + InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl(object): + + def __init__( + self, + url, # type: str + info, # type: InfoType + subdirectory=None, # type: Optional[str] + ): + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc): + # type: (str) -> str + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) and + self.info.vcs == "git" and + user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self): + # type: () -> str + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib_parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib_parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self): + # type: () -> None + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d): + # type: (Dict[str, Any]) -> DirectUrl + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self): + # type: () -> Dict[str, Any] + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s): + # type: (str) -> DirectUrl + return cls.from_dict(json.loads(s)) + + def to_json(self): + # type: () -> str + return json.dumps(self.to_dict(), sort_keys=True) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/format_control.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/format_control.py index 971a3914..c6275e72 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/format_control.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/format_control.py @@ -1,17 +1,20 @@ from pip._vendor.packaging.utils import canonicalize_name +from pip._internal.exceptions import CommandError from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, Set, FrozenSet # noqa: F401 + from typing import Optional, Set, FrozenSet class FormatControl(object): """Helper for managing formats from which a package can be installed. """ + __slots__ = ["no_binary", "only_binary"] + def __init__(self, no_binary=None, only_binary=None): - # type: (Optional[Set], Optional[Set]) -> None + # type: (Optional[Set[str]], Optional[Set[str]]) -> None if no_binary is None: no_binary = set() if only_binary is None: @@ -21,12 +24,24 @@ class FormatControl(object): self.only_binary = only_binary def __eq__(self, other): - return self.__dict__ == other.__dict__ + # type: (object) -> bool + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all( + getattr(self, k) == getattr(other, k) + for k in self.__slots__ + ) def __ne__(self, other): + # type: (object) -> bool return not self.__eq__(other) def __repr__(self): + # type: () -> str return "{}({}, {})".format( self.__class__.__name__, self.no_binary, @@ -35,7 +50,11 @@ class FormatControl(object): @staticmethod def handle_mutual_excludes(value, target, other): - # type: (str, Optional[Set], Optional[Set]) -> None + # type: (str, Set[str], Set[str]) -> None + if value.startswith('-'): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) new = value.split(',') while ':all:' in new: other.clear() @@ -54,7 +73,7 @@ class FormatControl(object): target.add(name) def get_allowed_formats(self, canonical_name): - # type: (str) -> FrozenSet + # type: (str) -> FrozenSet[str] result = {"binary", "source"} if canonical_name in self.only_binary: result.discard('source') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/index.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/index.py index ead1efbd..5b4a1fe2 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/index.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/index.py @@ -5,6 +5,9 @@ class PackageIndex(object): """Represents a Package Index and provides easier access to endpoints """ + __slots__ = ['url', 'netloc', 'simple_url', 'pypi_url', + 'file_storage_domain'] + def __init__(self, url, file_storage_domain): # type: (str, str) -> None super(PackageIndex, self).__init__() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/link.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/link.py index ad2f93e1..c0d278ad 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/link.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/link.py @@ -1,87 +1,144 @@ +import os import posixpath import re from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._internal.download import path_to_url +from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.misc import ( - WHEEL_EXTENSION, redact_password_from_url, splitext, + redact_auth_from_url, + split_auth_from_netloc, + splitext, ) from pip._internal.utils.models import KeyBasedCompareMixin from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple, Union, Text # noqa: F401 - from pip._internal.index import HTMLPage # noqa: F401 + from typing import Optional, Text, Tuple, Union + from pip._internal.index.collector import HTMLPage + from pip._internal.utils.hashes import Hashes class Link(KeyBasedCompareMixin): """Represents a parsed link from a Package Index's simple URL """ - def __init__(self, url, comes_from=None, requires_python=None): - # type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None + __slots__ = [ + "_parsed_url", + "_url", + "comes_from", + "requires_python", + "yanked_reason", + "cache_link_parsing", + ] + + def __init__( + self, + url, # type: str + comes_from=None, # type: Optional[Union[str, HTMLPage]] + requires_python=None, # type: Optional[str] + yanked_reason=None, # type: Optional[Text] + cache_link_parsing=True, # type: bool + ): + # type: (...) -> None """ - url: - url of the resource pointed to (href of the link) - comes_from: - instance of HTMLPage where the link was found, or string. - requires_python: - String containing the `Requires-Python` metadata field, specified - in PEP 345. This may be specified by a data-requires-python - attribute in the HTML link tag, as described in PEP 503. + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link + should be cached. PyPI index urls should + generally have this set to False, for + example. """ # url can be a UNC windows share if url.startswith('\\\\'): url = path_to_url(url) - self.url = url + self._parsed_url = urllib_parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + self.comes_from = comes_from self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super(Link, self).__init__(key=url, defining_class=Link) - super(Link, self).__init__( - key=(self.url), - defining_class=Link - ) + self.cache_link_parsing = cache_link_parsing def __str__(self): + # type: () -> str if self.requires_python: - rp = ' (requires-python:%s)' % self.requires_python + rp = ' (requires-python:{})'.format(self.requires_python) else: rp = '' if self.comes_from: - return '%s (from %s)%s' % (redact_password_from_url(self.url), - self.comes_from, rp) + return '{} (from {}){}'.format( + redact_auth_from_url(self._url), self.comes_from, rp) else: - return redact_password_from_url(str(self.url)) + return redact_auth_from_url(str(self._url)) def __repr__(self): - return '<Link %s>' % self + # type: () -> str + return '<Link {}>'.format(self) + + @property + def url(self): + # type: () -> str + return self._url @property def filename(self): # type: () -> str - _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) - name = posixpath.basename(path.rstrip('/')) or netloc + path = self.path.rstrip('/') + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + name = urllib_parse.unquote(name) - assert name, ('URL %r produced no filename' % self.url) + assert name, ( + 'URL {self._url!r} produced no filename'.format(**locals())) return name + @property + def file_path(self): + # type: () -> str + return url_to_path(self.url) + @property def scheme(self): # type: () -> str - return urllib_parse.urlsplit(self.url)[0] + return self._parsed_url.scheme @property def netloc(self): # type: () -> str - return urllib_parse.urlsplit(self.url)[1] + """ + This can contain auth information. + """ + return self._parsed_url.netloc @property def path(self): # type: () -> str - return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) + return urllib_parse.unquote(self._parsed_url.path) def splitext(self): # type: () -> Tuple[str, str] @@ -95,7 +152,7 @@ class Link(KeyBasedCompareMixin): @property def url_without_fragment(self): # type: () -> str - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) + scheme, netloc, path, query, fragment = self._parsed_url return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') @@ -103,7 +160,7 @@ class Link(KeyBasedCompareMixin): @property def egg_fragment(self): # type: () -> Optional[str] - match = self._egg_fragment_re.search(self.url) + match = self._egg_fragment_re.search(self._url) if not match: return None return match.group(1) @@ -113,7 +170,7 @@ class Link(KeyBasedCompareMixin): @property def subdirectory_fragment(self): # type: () -> Optional[str] - match = self._subdirectory_fragment_re.search(self.url) + match = self._subdirectory_fragment_re.search(self._url) if not match: return None return match.group(1) @@ -125,7 +182,7 @@ class Link(KeyBasedCompareMixin): @property def hash(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) + match = self._hash_re.search(self._url) if match: return match.group(2) return None @@ -133,15 +190,24 @@ class Link(KeyBasedCompareMixin): @property def hash_name(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) + match = self._hash_re.search(self._url) if match: return match.group(1) return None @property def show_url(self): - # type: () -> Optional[str] - return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + # type: () -> str + return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_file(self): + # type: () -> bool + return self.scheme == 'file' + + def is_existing_dir(self): + # type: () -> bool + return self.is_file and os.path.isdir(self.file_path) @property def is_wheel(self): @@ -149,15 +215,31 @@ class Link(KeyBasedCompareMixin): return self.ext == WHEEL_EXTENSION @property - def is_artifact(self): + def is_vcs(self): # type: () -> bool - """ - Determines if this points to an actual artifact (e.g. a tarball) or if - it points to an "abstract" thing like a path or a VCS location. - """ from pip._internal.vcs import vcs - if self.scheme in vcs.all_schemes: + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self): + # type: () -> bool + return self.yanked_reason is not None + + @property + def has_hash(self): + # type: () -> bool + return self.hash_name is not None + + def is_hash_allowed(self, hashes): + # type: (Optional[Hashes]) -> bool + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None - return True + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/scheme.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 00000000..5040551e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data'] + + +class Scheme(object): + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib, # type: str + purelib, # type: str + headers, # type: str + scripts, # type: str + data, # type: str + ): + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/search_scope.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 00000000..d732504e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,135 @@ +import itertools +import logging +import os +import posixpath + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +logger = logging.getLogger(__name__) + + +class SearchScope(object): + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls"] + + @classmethod + def create( + cls, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> SearchScope + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links = [] # type: List[str] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == 'https': + logger.warning( + 'pip is configured with locations that require ' + 'TLS/SSL, however the ssl module in Python is not ' + 'available.' + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> None + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self): + # type: () -> str + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib_parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, ' + 'please provide a scheme.', redacted_index_url) + + redacted_index_urls.append(redacted_index_url) + + lines.append('Looking in indexes: {}'.format( + ', '.join(redacted_index_urls))) + + if self.find_links: + lines.append( + 'Looking in links: {}'.format(', '.join( + redact_auth_from_url(url) for url in self.find_links)) + ) + return '\n'.join(lines) + + def get_index_urls_locations(self, project_name): + # type: (str) -> List[str] + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + # type: (str) -> str + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/selection_prefs.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 00000000..5db3ca91 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,49 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences(object): + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control', + 'prefer_binary', 'ignore_requires_python'] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked, # type: bool + allow_all_prereleases=False, # type: bool + format_control=None, # type: Optional[FormatControl] + prefer_binary=False, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/target_python.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 00000000..6d1ca796 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,120 @@ +import sys + +from pip._internal.utils.compatibility_tags import ( + get_supported, + version_info_to_nodot, +) +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import Tag + + +class TargetPython(object): + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abi", + "implementation", + "platform", + "py_version", + "py_version_info", + "_valid_tags", + ] + + def __init__( + self, + platform=None, # type: Optional[str] + py_version_info=None, # type: Optional[Tuple[int, ...]] + abi=None, # type: Optional[str] + implementation=None, # type: Optional[str] + ): + # type: (...) -> None + """ + :param platform: A string or None. If None, searches for packages + that are supported by the current system. Otherwise, will find + packages that can be built on the platform passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abi: A string or None. This is passed to compatibility_tags.py's + get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = '.'.join(map(str, py_version_info[:2])) + + self.abi = abi + self.implementation = implementation + self.platform = platform + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags = None # type: Optional[List[Tag]] + + def format_given(self): + # type: () -> str + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = '.'.join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ('platform', self.platform), + ('version_info', display_version), + ('abi', self.abi), + ('implementation', self.implementation), + ] + return ' '.join( + '{}={!r}'.format(key, value) for key, value in key_values + if value is not None + ) + + def get_tags(self): + # type: () -> List[Tag] + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platform=self.platform, + abi=self.abi, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 00000000..4d4068f3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,78 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +class Wheel(object): + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) + ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + # type: (str) -> None + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "{} is not a valid wheel filename.".format(filename) + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self): + # type: () -> List[str] + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags): + # type: (List[Tag]) -> int + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def supported(self, tags): + # type: (List[Tag]) -> bool + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 00000000..b51bde91 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/auth.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/auth.py new file mode 100644 index 00000000..ca729fcd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,308 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +import logging + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.utils import get_netrc_auth +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Optional, Tuple, List, Any + + from pip._internal.vcs.versioncontrol import AuthInfo + + from pip._vendor.requests.models import Response, Request + + Credentials = Tuple[str, str, str] + +logger = logging.getLogger(__name__) + +try: + import keyring # noqa +except ImportError: + keyring = None +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + keyring = None + + +def get_keyring_auth(url, username): + # type: (str, str) -> Optional[AuthInfo] + """Return the tuple auth for a given url from keyring.""" + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + return None + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True, index_urls=None): + # type: (bool, Optional[List[str]]) -> None + self.prompting = prompting + self.index_urls = index_urls + self.passwords = {} # type: Dict[str, AuthInfo] + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save = None # type: Optional[Credentials] + + def _get_index_url(self, url): + # type: (str) -> Optional[str] + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + return None + + def _get_new_credentials(self, original_url, allow_netrc=True, + allow_keyring=True): + # type: (str, bool, bool) -> AuthInfo + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials(self, original_url): + # type: (str) -> Tuple[str, Optional[str], Optional[str]] + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + if username is None and password is None: + # No stored credentials. Acquire new credentials without prompting + # the user. (e.g. from netrc, keyring, or the URL itself) + username, password = self._get_new_credentials(original_url) + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) or + # Credentials were not found + (username is None and password is None) + ), "Could not load credentials from url: {}".format(original_url) + + return url, username, password + + def __call__(self, req): + # type: (Request) -> Request + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc): + # type: (str) -> Tuple[Optional[str], Optional[str], bool] + username = ask_input("User for {}: ".format(netloc)) + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self): + # type: () -> bool + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp, **kwargs): + # type: (Response, **Any) -> Response + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + '401 Error, Credentials not correct for %s', resp.request.url, + ) + + def save_credentials(self, resp, **kwargs): + # type: (Response, **Any) -> None + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info('Saving credentials to keyring') + keyring.set_password(*creds) + except Exception: + logger.exception('Failed to save credentials') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/cache.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/cache.py new file mode 100644 index 00000000..a0d55b5e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,79 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Iterator + + +def is_from_cache(response): + # type: (Response) -> bool + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors(): + # type: () -> Iterator[None] + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except (OSError, IOError): + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory): + # type: (str) -> None + assert directory is not None, "Cache directory must not be None." + super(SafeFileCache, self).__init__() + self.directory = directory + + def _get_cache_path(self, name): + # type: (str) -> str + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + # type: (str) -> Optional[bytes] + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, 'rb') as f: + return f.read() + + def set(self, key, value): + # type: (str, bytes) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key): + # type: (str) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/download.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/download.py new file mode 100644 index 00000000..44f9985a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/download.py @@ -0,0 +1,182 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE + +from pip._internal.cli.progress_bars import DownloadProgressProvider +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.network.cache import is_from_cache +from pip._internal.network.utils import ( + HEADERS, + raise_for_status, + response_chunks, +) +from pip._internal.utils.misc import ( + format_size, + redact_auth_from_url, + splitext, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterable, Optional + + from pip._vendor.requests.models import Response + + from pip._internal.models.link import Link + from pip._internal.network.session import PipSession + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp): + # type: (Response) -> Optional[int] + try: + return int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp, # type: Response + link, # type: Link + progress_bar # type: str +): + # type: (...) -> Iterable[bytes] + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider( + progress_bar, max=total_length + )(chunks) + + +def sanitize_content_filename(filename): + # type: (str) -> str + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition, default_filename): + # type: (str, str) -> str + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get('filename') + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp, link): + # type: (Response, Link) -> str + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext = splitext(filename)[1] # type: Optional[str] + if not ext: + ext = mimetypes.guess_extension( + resp.headers.get('content-type', '') + ) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session, link): + # type: (PipSession, Link) -> Response + target_url = link.url.split('#', 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Download(object): + def __init__( + self, + response, # type: Response + filename, # type: str + chunks, # type: Iterable[bytes] + ): + # type: (...) -> None + self.response = response + self.filename = filename + self.chunks = chunks + + +class Downloader(object): + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link): + # type: (Link) -> Download + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + return Download( + resp, + _get_http_response_filename(resp, link), + _prepare_download(resp, link, self._progress_bar), + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/lazy_wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 00000000..c2371bf5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,231 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url'] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from zipfile import BadZipfile, ZipFile + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE +from pip._vendor.six.moves import range + +from pip._internal.network.utils import ( + HEADERS, + raise_for_status, + response_chunks, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, Iterator, List, Optional, Tuple + + from pip._vendor.pkg_resources import Distribution + from pip._vendor.requests.models import Response + + from pip._internal.network.session import PipSession + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name, url, session): + # type: (str, str, PipSession) -> Distribution + """Return a pkg_resources.Distribution from the given wheel URL. + + This uses HTTP range requests to only fetch the potion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as wheel: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + zip_file = ZipFile(wheel) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name) + + +class LazyZipOverHTTP(object): + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE): + # type: (str, PipSession, int) -> None + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers['Content-Length']) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left = [] # type: List[int] + self._right = [] # type: List[int] + if 'bytes' not in head.headers.get('Accept-Ranges', 'none'): + raise HTTPRangeRequestUnsupported('range request is not supported') + self._check_zip() + + @property + def mode(self): + # type: () -> str + """Opening mode, which is always rb.""" + return 'rb' + + @property + def name(self): + # type: () -> str + """Path to the underlying file.""" + return self._file.name + + def seekable(self): + # type: () -> bool + """Return whether random access is supported, which is True.""" + return True + + def close(self): + # type: () -> None + """Close the file.""" + self._file.close() + + @property + def closed(self): + # type: () -> bool + """Whether the file is closed.""" + return self._file.closed + + def read(self, size=-1): + # type: (int) -> bytes + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + start, length = self.tell(), self._length + stop = start + size if 0 <= size <= length-start else length + self._download(start, stop-1) + return self._file.read(size) + + def readable(self): + # type: () -> bool + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset, whence=0): + # type: (int, int) -> int + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self): + # type: () -> int + """Return the current possition.""" + return self._file.tell() + + def truncate(self, size=None): + # type: (Optional[int]) -> int + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self): + # type: () -> bool + """Return False.""" + return False + + def __enter__(self): + # type: () -> LazyZipOverHTTP + self._file.__enter__() + return self + + def __exit__(self, *exc): + # type: (*Any) -> Optional[bool] + return self._file.__exit__(*exc) + + @contextmanager + def _stay(self): + # type: ()-> Iterator[None] + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self): + # type: () -> None + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipfile: + pass + else: + break + + def _stream_response(self, start, end, base_headers=HEADERS): + # type: (int, int, Dict[str, str]) -> Response + """Return HTTP response to a range request from start to end.""" + headers = {'Range': 'bytes={}-{}'.format(start, end)} + headers.update(base_headers) + return self._session.get(self._url, headers=headers, stream=True) + + def _merge(self, start, end, left, right): + # type: (int, int, int, int) -> Iterator[Tuple[int, int]] + """Return an iterator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start]+lslice[:1]) + end = max([end]+rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j-1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start, end): + # type: (int, int) -> None + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/session.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/session.py new file mode 100644 index 00000000..39a4a546 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/session.py @@ -0,0 +1,421 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import email.utils +import json +import logging +import mimetypes +import os +import platform +import sys +import warnings + +from pip._vendor import requests, six, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls, ipaddress +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import ( + build_url_from_netloc, + get_installed_version, + parse_netloc, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + Iterator, List, Optional, Tuple, Union, + ) + + from pip._internal.models.link import Link + + SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +logger = logging.getLogger(__name__) + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] # type: List[SecureOrigin] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + 'BUILD_BUILDID', + # Jenkins + 'BUILD_ID', + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + 'CI', + # Explicit environment variable. + 'PIP_IS_CI', +) + + +def looks_like_ci(): + # type: () -> bool + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + distro_infos = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], distro.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + )) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_version = get_installed_version("setuptools") + if setuptools_version is not None: + data["setuptools_version"] = setuptools_version + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + super(InsecureHTTPAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + + def cert_verify(self, conn, url, verify, cert): + super(InsecureCacheControlAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) + + +class PipSession(requests.Session): + + timeout = None # type: Optional[int] + + def __init__(self, *args, **kwargs): + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str] + index_urls = kwargs.pop("index_urls", None) + + super(PipSession, self).__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def add_trusted_host(self, host, source=None, suppress_logging=False): + # type: (str, Optional[str], bool) -> None + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = 'adding trusted host: {!r}'.format(host) + if source is not None: + msg += ' (from {})'.format(source) + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount( + build_url_from_netloc(host) + '/', + self._trusted_host_adapter + ) + if not host_port[1]: + # Mount wildcard ports for the same host. + self.mount( + build_url_from_netloc(host) + ':', + self._trusted_host_adapter + ) + + def iter_secure_origins(self): + # type: () -> Iterator[SecureOrigin] + for secure_origin in SECURE_ORIGINS: + yield secure_origin + for host, port in self.pip_trusted_origins: + yield ('*', host, '*' if port is None else port) + + def is_secure_origin(self, location): + # type: (Link) -> bool + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, parsed.hostname, parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address( + None + if origin_host is None + else six.ensure_text(origin_host) + ) + network = ipaddress.ip_network( + six.ensure_text(secure_host) + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host and + origin_host.lower() != secure_host.lower() and + secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port and + secure_port != "*" and + secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/utils.py new file mode 100644 index 00000000..907b3fed --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,97 @@ +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterator + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS = {'Accept-Encoding': 'identity'} # type: Dict[str, str] + + +def raise_for_status(resp): + # type: (Response) -> None + http_error_msg = u'' + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode('utf-8') + except UnicodeDecodeError: + reason = resp.reason.decode('iso-8859-1') + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % ( + resp.status_code, reason, resp.url) + + elif 500 <= resp.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % ( + resp.status_code, reason, resp.url) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): + # type: (Response, int) -> Iterator[bytes] + """Given a requests Response, provide the data chunks. + """ + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/xmlrpc.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 00000000..e6112624 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,52 @@ +"""xmlrpclib.Transport implementation +""" + +import logging + +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict + from pip._internal.network.session import PipSession + + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__(self, index_url, session, use_datetime=False): + # type: (str, PipSession, bool) -> None + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + # type: (str, str, Dict[str, str], bool) -> None + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 00000000..cf52f8d8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,37 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.build_env import BuildEnvironment + from pip._vendor.pep517.wrappers import Pep517HookCaller + + +def generate_metadata(build_env, backend): + # type: (BuildEnvironment, Pep517HookCaller) -> str + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory( + kind="modern-metadata", globally_managed=True + ) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing wheel metadata") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel( + metadata_dir + ) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 00000000..14762aef --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,77 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.build_env import BuildEnvironment + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory): + # type: (str) -> str + """Find an .egg-info subdirectory in `directory`. + """ + filenames = [ + f for f in os.listdir(directory) if f.endswith(".egg-info") + ] + + if not filenames: + raise InstallationError( + "No .egg-info directory found in {}".format(directory) + ) + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format( + directory + ) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env, # type: BuildEnvironment + setup_py_path, # type: str + source_dir, # type: str + isolated, # type: bool + details, # type: str +): + # type: (...) -> str + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + setup_py_path, details, + ) + + egg_info_dir = TempDirectory( + kind="pip-egg-info", globally_managed=True + ).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + call_subprocess( + args, + cwd=source_dir, + command_desc='python setup.py egg_info', + ) + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 00000000..0c28c498 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,46 @@ +import logging +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + from pip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name, # type: str + backend, # type: Pep517HookCaller + metadata_directory, # type: str + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + if build_options: + # PEP 517 does not support --build-options + logger.error('Cannot build wheel for %s using PEP 517 when ' + '--build-option is present', name) + return None + try: + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( + 'Building wheel for {} (PEP 517)'.format(name) + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error('Failed building wheel for %s', name) + return None + return os.path.join(tempd, wheel_name) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 00000000..37dc876a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,115 @@ +import logging +import os.path + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import ( + make_setuptools_bdist_wheel_args, +) +from pip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Text + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = 'Command arguments: {}\n'.format(command_desc) + + if not command_output: + text += 'Command output: None' + elif logger.getEffectiveLevel() > logging.DEBUG: + text += 'Command output: [use --verbose to show]' + else: + if not command_output.endswith('\n'): + command_output += '\n' + text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) + + return text + + +def get_legacy_build_wheel_path( + names, # type: List[str] + temp_dir, # type: str + name, # type: str + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> Optional[str] + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ( + 'Legacy build of wheel for {!r} created no files.\n' + ).format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + 'Legacy build of wheel for {!r} created more than one file.\n' + 'Filenames (choosing first): {}\n' + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name, # type: str + setup_py_path, # type: str + source_dir, # type: str + global_options, # type: List[str] + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = 'Building wheel for {} (setup.py)'.format(name) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/check.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/check.py index 0b56eda4..5714915b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/check.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/check.py @@ -7,15 +7,17 @@ from collections import namedtuple from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.pkg_resources import RequirementParseError -from pip._internal.operations.prepare import make_abstract_dist +from pip._internal.distributions import ( + make_distribution_for_install_requirement, +) from pip._internal.utils.misc import get_installed_distributions from pip._internal.utils.typing import MYPY_CHECK_RUNNING logger = logging.getLogger(__name__) if MYPY_CHECK_RUNNING: - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from typing import ( # noqa: F401 + from pip._internal.req.req_install import InstallRequirement + from typing import ( Any, Callable, Dict, Optional, Set, Tuple, List ) @@ -27,6 +29,7 @@ if MYPY_CHECK_RUNNING: MissingDict = Dict[str, List[Missing]] ConflictingDict = Dict[str, List[Conflicting]] CheckResult = Tuple[MissingDict, ConflictingDict] + ConflictDetails = Tuple[PackageSet, CheckResult] PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) @@ -47,7 +50,7 @@ def create_package_set_from_installed(**kwargs): package_set[name] = PackageDetails(dist.version, dist.requires()) except RequirementParseError as e: # Don't crash on broken metadata - logging.warning("Error parsing requirements for %s: %s", name, e) + logger.warning("Error parsing requirements for %s: %s", name, e) problems = True return package_set, problems @@ -59,19 +62,16 @@ def check_package_set(package_set, should_ignore=None): If should_ignore is passed, it should be a callable that takes a package name and returns a boolean. """ - if should_ignore is None: - def should_ignore(name): - return False - missing = dict() - conflicting = dict() + missing = {} + conflicting = {} for package_name in package_set: # Info about dependencies of package_name missing_deps = set() # type: Set[Missing] conflicting_deps = set() # type: Set[Conflicting] - if should_ignore(package_name): + if should_ignore and should_ignore(package_name): continue for req in package_set[package_name].requires: @@ -100,7 +100,7 @@ def check_package_set(package_set, should_ignore=None): def check_install_conflicts(to_install): - # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] + # type: (List[InstallRequirement]) -> ConflictDetails """For checking if the dependency graph would be consistent after \ installing given requirements """ @@ -130,7 +130,10 @@ def _simulate_installation_of(to_install, package_set): # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: - dist = make_abstract_dist(inst_req).dist() + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_pkg_resources_distribution() + + assert dist is not None name = canonicalize_name(dist.key) package_set[name] = PackageDetails(dist.version, dist.requires()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/freeze.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/freeze.py index 388bb73a..ddb9cb23 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/freeze.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/freeze.py @@ -3,7 +3,6 @@ from __future__ import absolute_import import collections import logging import os -import re from pip._vendor import six from pip._vendor.packaging.utils import canonicalize_name @@ -11,20 +10,26 @@ from pip._vendor.pkg_resources import RequirementParseError from pip._internal.exceptions import BadCommand, InstallationError from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, + install_req_from_editable, + install_req_from_line, ) from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import ( + direct_url_as_pep440_direct_reference, + dist_get_direct_url, +) from pip._internal.utils.misc import ( - dist_is_editable, get_installed_distributions, + dist_is_editable, + get_installed_distributions, ) from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union ) - from pip._internal.cache import WheelCache # noqa: F401 - from pip._vendor.pkg_resources import ( # noqa: F401 + from pip._internal.cache import WheelCache + from pip._vendor.pkg_resources import ( Distribution, Requirement ) @@ -37,9 +42,9 @@ logger = logging.getLogger(__name__) def freeze( requirement=None, # type: Optional[List[str]] find_links=None, # type: Optional[List[str]] - local_only=None, # type: Optional[bool] - user_only=None, # type: Optional[bool] - skip_regex=None, # type: Optional[str] + local_only=False, # type: bool + user_only=False, # type: bool + paths=None, # type: Optional[List[str]] isolated=False, # type: bool wheel_cache=None, # type: Optional[WheelCache] exclude_editable=False, # type: bool @@ -47,28 +52,32 @@ def freeze( ): # type: (...) -> Iterator[str] find_links = find_links or [] - skip_match = None - - if skip_regex: - skip_match = re.compile(skip_regex).search for link in find_links: - yield '-f %s' % link + yield '-f {}'.format(link) installations = {} # type: Dict[str, FrozenRequirement] - for dist in get_installed_distributions(local_only=local_only, - skip=(), - user_only=user_only): + + for dist in get_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + paths=paths + ): try: req = FrozenRequirement.from_dist(dist) - except RequirementParseError: + except RequirementParseError as exc: + # We include dist rather than dist.project_name because the + # dist string includes more information, like the version and + # location. We also include the exception message to aid + # troubleshooting. logger.warning( - "Could not parse requirement: %s", - dist.project_name + 'Could not generate requirement for distribution %r: %s', + dist, exc ) continue if exclude_editable and req.editable: continue - installations[req.name] = req + installations[req.canonical_name] = req if requirement: # the options that don't get turned into an InstallRequirement @@ -84,16 +93,15 @@ def freeze( for line in req_file: if (not line.strip() or line.strip().startswith('#') or - (skip_match and skip_match(line)) or line.startswith(( '-r', '--requirement', - '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', - '--extra-index-url'))): + '--extra-index-url', + '--use-feature'))): line = line.rstrip() if line not in emitted_options: emitted_options.add(line) @@ -108,13 +116,11 @@ def freeze( line_req = install_req_from_editable( line, isolated=isolated, - wheel_cache=wheel_cache, ) else: line_req = install_req_from_line( COMMENT_RE.sub('', line).strip(), isolated=isolated, - wheel_cache=wheel_cache, ) if not line_req.name: @@ -127,22 +133,27 @@ def freeze( " (add #egg=PackageName to the URL to avoid" " this warning)" ) - elif line_req.name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub('', line).strip(), line_req.name - ) + else: + line_req_canonical_name = canonicalize_name( + line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) else: + yield str(installations[ + line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] req_files[line_req.name].append(req_file_path) - else: - yield str(installations[line_req.name]).rstrip() - del installations[line_req.name] - req_files[line_req.name].append(req_file_path) # Warn about requirements that were included multiple times (in a # single requirements file or in different requirements files). @@ -157,7 +168,7 @@ def freeze( ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): - if canonicalize_name(installation.name) not in skip: + if installation.canonical_name not in skip: yield str(installation).rstrip() @@ -173,12 +184,12 @@ def get_requirement_info(dist): location = os.path.normcase(os.path.abspath(dist.location)) from pip._internal.vcs import vcs, RemoteNotFoundError - vc_type = vcs.get_backend_type(location) + vcs_backend = vcs.get_backend_for_dir(location) - if not vc_type: + if vcs_backend is None: req = dist.as_requirement() logger.debug( - 'No VCS found for editable requirement {!r} in: {!r}', req, + 'No VCS found for editable requirement "%s" in: %r', req, location, ) comments = [ @@ -187,12 +198,12 @@ def get_requirement_info(dist): return (location, True, comments) try: - req = vc_type.get_src_requirement(location, dist.project_name) + req = vcs_backend.get_src_requirement(location, dist.project_name) except RemoteNotFoundError: req = dist.as_requirement() comments = [ '# Editable {} install with no remote ({})'.format( - vc_type.__name__, req, + type(vcs_backend).__name__, req, ) ] return (location, True, comments) @@ -202,7 +213,7 @@ def get_requirement_info(dist): 'cannot determine version of editable source in %s ' '(%s command not found in path)', location, - vc_type.name, + vcs_backend.name, ) return (None, True, []) @@ -227,6 +238,7 @@ class FrozenRequirement(object): def __init__(self, name, req, editable, comments=()): # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None self.name = name + self.canonical_name = canonicalize_name(name) self.req = req self.editable = editable self.comments = comments @@ -234,14 +246,27 @@ class FrozenRequirement(object): @classmethod def from_dist(cls, dist): # type: (Distribution) -> FrozenRequirement + # TODO `get_requirement_info` is taking care of editable requirements. + # TODO This should be refactored when we will add detection of + # editable that provide .dist-info metadata. req, editable, comments = get_requirement_info(dist) + if req is None and not editable: + # if PEP 610 metadata is present, attempt to use it + direct_url = dist_get_direct_url(dist) + if direct_url: + req = direct_url_as_pep440_direct_reference( + direct_url, dist.project_name + ) + comments = [] if req is None: + # name==version requirement req = dist.as_requirement() return cls(dist.project_name, req, editable, comments=comments) def __str__(self): + # type: () -> str req = self.req if self.editable: - req = '-e %s' % req + req = '-e {}'.format(req) return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 00000000..24d6a5dd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 00000000..a668a61d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,52 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging + +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.build_env import BuildEnvironment + + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options, # type: List[str] + global_options, # type: Sequence[str] + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + name, # type: str + setup_py_path, # type: str + isolated, # type: bool + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str +): + # type: (...) -> None + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info('Running setup.py develop for %s', name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/legacy.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 00000000..87227d5f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,130 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +import sys +from distutils.util import change_root + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.build_env import BuildEnvironment + from pip._internal.models.scheme import Scheme + + +logger = logging.getLogger(__name__) + + +class LegacyInstallFailure(Exception): + def __init__(self): + # type: () -> None + self.parent = sys.exc_info() + + +def install( + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + use_user_site, # type: bool + pycompile, # type: bool + scheme, # type: Scheme + setup_py_path, # type: str + isolated, # type: bool + req_name, # type: str + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str + req_description, # type: str +): + # type: (...) -> bool + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + try: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = make_setuptools_install_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + "Running setup.py install for {}".format(req_name) + ) + with indent_log(), build_env: + runner( + cmd=install_args, + cwd=unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + # Signal to the caller that we didn't install the new package + return False + + except Exception: + # Signal to the caller that we didn't install the new package + raise LegacyInstallFailure + + # At this point, we have successfully installed the requirement. + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + def prepend_root(path): + # type: (str) -> str + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + message = ( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(req_description) + raise InstallationError(message) + + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + + return True diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 00000000..8f73a88b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,830 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +from __future__ import absolute_import + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from itertools import chain, starmap +from zipfile import ZipFile + +from pip._vendor import pkg_resources +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.six import ( + PY2, + ensure_str, + ensure_text, + itervalues, + reraise, + text_type, +) +from pip._vendor.six.moves import filterfalse, map + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ( + captured_stdout, + ensure_dir, + hash_file, + partition, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import ( + parse_wheel, + pkg_resources_distribution_for_wheel, +) + +# Use the custom cast function at runtime to make cast work, +# and import typing.cast when performing pre-commit and type +# checks +if not MYPY_CHECK_RUNNING: + from pip._internal.utils.typing import cast +else: + from email.message import Message + from typing import ( + Any, + Callable, + Dict, + IO, + Iterable, + Iterator, + List, + NewType, + Optional, + Protocol, + Sequence, + Set, + Tuple, + Union, + cast, + ) + + from pip._vendor.pkg_resources import Distribution + + from pip._internal.models.scheme import Scheme + from pip._internal.utils.filesystem import NamedTemporaryFileResult + + RecordPath = NewType('RecordPath', text_type) + InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + class File(Protocol): + src_record_path = None # type: RecordPath + dest_path = None # type: text_type + changed = None # type: bool + + def save(self): + # type: () -> None + pass + + +logger = logging.getLogger(__name__) + + +def rehash(path, blocksize=1 << 20): + # type: (text_type, int) -> Tuple[str, str] + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + # unicode/str python2 issues + return (digest, str(length)) # type: ignore + + +def csv_io_kwargs(mode): + # type: (str) -> Dict[str, Any] + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + if PY2: + return {'mode': '{}b'.format(mode)} + else: + return {'mode': mode, 'newline': '', 'encoding': 'utf-8'} + + +def fix_script(path): + # type: (text_type) -> bool + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata): + # type: (Message) -> bool + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(distribution): + # type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]] + # get the entry points and then the script names + try: + console = distribution.get_entry_map('console_scripts') + gui = distribution.get_entry_map('gui_scripts') + except KeyError: + # Our dict-based Distribution raises KeyError if entry_points.txt + # doesn't exist. + return {}, {} + + def _split_ep(s): + # type: (pkg_resources.EntryPoint) -> Tuple[str, str] + """get the string representation of EntryPoint, + remove space and split on '=' + """ + split_parts = str(s).replace(" ", "").split("=") + return split_parts[0], split_parts[1] + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (Sequence[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } # type: Dict[str, Set[str]] + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts = sorted(dir_scripts) # type: List[str] + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) + for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path): + # type: (RecordPath) -> text_type + return record_path + + +def _fs_to_record_path(path, relative_to=None): + # type: (text_type, Optional[text_type]) -> RecordPath + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == \ + os.path.splitdrive(relative_to)[0].lower(): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, '/') + return cast('RecordPath', path) + + +def _parse_record_path(record_column): + # type: (str) -> RecordPath + p = ensure_text(record_column, encoding='utf-8') + return cast('RecordPath', p) + + +def get_csv_rows_for_installed( + old_csv_rows, # type: List[List[str]] + installed, # type: Dict[RecordPath, RecordPath] + changed, # type: Set[RecordPath] + generated, # type: List[str] + lib_dir, # type: str +): + # type: (...) -> List[InstalledCSVRow] + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows = [] # type: List[InstalledCSVRow] + for row in old_csv_rows: + if len(row) > 3: + logger.warning('RECORD line has more than three elements: %s', row) + old_record_path = _parse_record_path(row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else '' + length = row[2] if len(row) > 2 else '' + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + for installed_record_path in itervalues(installed): + installed_rows.append((installed_record_path, '', '')) + return installed_rows + + +def get_console_script_specs(console): + # type: (Dict[str, str]) -> List[str] + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append('pip = ' + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + 'pip{} = {}'.format(sys.version_info[0], pip_script) + ) + + scripts_to_generate.append( + 'pip{} = {}'.format(get_major_minor_version(), pip_script) + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append( + 'easy_install = ' + easy_install_script + ) + + scripts_to_generate.append( + 'easy_install-{} = {}'.format( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap('{} = {}'.format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile(object): + def __init__(self, src_record_path, dest_path, zip_file): + # type: (RecordPath, text_type, ZipFile) -> None + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def save(self): + # type: () -> None + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + with self._zip_file.open(self.src_record_path) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + zipinfo = self._zip_file.getinfo(self.src_record_path) + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile(object): + def __init__(self, file): + # type: (File) -> None + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self): + # type: () -> None + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point): + # type: (str) -> None + super(MissingCallableSuffix, self).__init__( + "Invalid script entry point: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry_point) + ) + + +def _raise_for_invalid_entrypoint(specification): + # type: (str) -> None + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification, options=None): + # type: (str, Dict[str, Any]) -> List[str] + _raise_for_invalid_entrypoint(specification) + return super(PipScriptMaker, self).make(specification, options) + + +def _install_wheel( + name, # type: str + wheel_zip, # type: ZipFile + wheel_path, # type: str + scheme, # type: Scheme + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} # type: Dict[RecordPath, RecordPath] + changed = set() # type: Set[RecordPath] + generated = [] # type: List[str] + + def record_installed(srcfile, destfile, modified=False): + # type: (RecordPath, text_type, bool) -> None + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(_fs_to_record_path(destfile)) + + def all_paths(): + # type: () -> Iterable[RecordPath] + names = wheel_zip.namelist() + # If a flag is set, names may be unicode in Python 2. We convert to + # text explicitly so these are valid for lookup in RECORD. + decoded_names = map(ensure_text, names) + for name in decoded_names: + yield cast("RecordPath", name) + + def is_dir_path(path): + # type: (RecordPath) -> bool + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path, target_path): + # type: (text_type, text_type) -> None + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker(zip_file, dest): + # type: (ZipFile, text_type) -> Callable[[RecordPath], File] + def make_root_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker(zip_file, scheme): + # type: (ZipFile, Scheme) -> Callable[[RecordPath], File] + scheme_paths = {} + for key in SCHEME_KEYS: + encoded_key = ensure_text(key) + scheme_paths[encoded_key] = ensure_text( + getattr(scheme, key), encoding=sys.getfilesystemencoding() + ) + + def make_data_scheme_file(record_path): + # type: (RecordPath) -> File + normed_path = os.path.normpath(record_path) + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + scheme_path = scheme_paths[scheme_key] + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path): + # type: (RecordPath) -> bool + return path.split("/", 1)[0].endswith(".data") + + paths = all_paths() + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition( + is_data_scheme_path, file_paths + ) + + make_root_scheme_file = root_scheme_file_maker( + wheel_zip, + ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), + ) + files = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path): + # type: (RecordPath) -> bool + parts = path.split("/", 2) + return ( + len(parts) > 2 and + parts[0].endswith(".data") and + parts[1] == "scripts" + ) + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = pkg_resources_distribution_for_wheel( + wheel_zip, name, wheel_path + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file): + # type: (File) -> bool + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + script_scheme_files = map(make_data_scheme_file, script_scheme_paths) + script_scheme_files = filterfalse( + is_entrypoint_wrapper, script_scheme_files + ) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths(): + # type: () -> Iterator[text_type] + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith('.py'): + continue + yield full_installed_path + + def pyc_output_path(path): + # type: (text_type) -> text_type + """Return the path the pyc file would have been written to. + """ + if PY2: + if sys.flags.optimize: + return path + 'o' + else: + return path + 'c' + else: + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + for path in pyc_source_file_paths(): + # Python 2's `compileall.compile_file` requires a str in + # error cases, so we must convert to the native type. + path_arg = ensure_str( + path, encoding=sys.getfilesystemencoding() + ) + success = compileall.compile_file( + path_arg, force=True, quiet=True + ) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend( + maker.make_multiple(gui_scripts_to_generate, {'gui': True}) + ) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path, **kwargs): + # type: (str, **Any) -> Iterator[NamedTemporaryFileResult] + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, 'INSTALLER') + with _generate_file(installer_path) as installer_file: + installer_file.write(b'pip\n') + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, 'REQUESTED') + with open(requested_path, "w"): + pass + generated.append(requested_path) + + record_text = distribution.get_metadata('RECORD') + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, 'RECORD') + + with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: + # The type mypy infers for record_file is different for Python 3 + # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly + # cast to typing.IO[str] as a workaround. + writer = csv.writer(cast('IO[str]', record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description): + # type: (str) -> Iterator[None] + try: + yield + except InstallationError as e: + message = "For req: {}. {}".format(req_description, e.args[0]) + reraise( + InstallationError, InstallationError(message), sys.exc_info()[2] + ) + + +def install_wheel( + name, # type: str + wheel_path, # type: str + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True, # type: bool + direct_url=None, # type: Optional[DirectUrl] + requested=False, # type: bool +): + # type: (...) -> None + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/prepare.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/prepare.py index 4f31dd5a..a5455fcc 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/prepare.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/operations/prepare.py @@ -1,173 +1,317 @@ """Prepares a distribution for installation """ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + import logging +import mimetypes import os +import shutil -from pip._vendor import pkg_resources, requests +from pip._vendor.six import PY2 -from pip._internal.build_env import BuildEnvironment -from pip._internal.download import ( - is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, +from pip._internal.distributions import ( + make_distribution_for_install_requirement, ) +from pip._internal.distributions.installed import InstalledDistribution from pip._internal.exceptions import ( - DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, - PreviousBuildDirError, VcsHashUnsupported, + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + VcsHashUnsupported, ) -from pip._internal.utils.compat import expanduser +from pip._internal.utils.filesystem import copy2_fixed from pip._internal.utils.hashes import MissingHashes from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.utils.misc import ( + display_path, + hide_url, + path_to_display, + rmtree, +) +from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import unpack_file from pip._internal.vcs import vcs if MYPY_CHECK_RUNNING: - from typing import Any, Optional # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.req.req_tracker import RequirementTracker # noqa: F401 + from typing import ( + Callable, List, Optional, Tuple, + ) + + from mypy_extensions import TypedDict + + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.network.download import Downloader + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.utils.hashes import Hashes + + if PY2: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'ignore': Callable[[str, List[str]], List[str]], + 'symlinks': bool, + }, + total=False, + ) + else: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'copy_function': Callable[[str, str], None], + 'ignore': Callable[[str, List[str]], List[str]], + 'ignore_dangling_symlinks': bool, + 'symlinks': bool, + }, + total=False, + ) logger = logging.getLogger(__name__) -def make_abstract_dist(req): - # type: (InstallRequirement) -> DistAbstraction - """Factory to make an abstract dist object. - - Preconditions: Either an editable req with a source_dir, or satisfied_by or - a wheel link, or a non-editable req with a source_dir. - - :return: A concrete DistAbstraction. - """ - if req.editable: - return IsSDist(req) - elif req.link and req.link.is_wheel: - return IsWheel(req) - else: - return IsSDist(req) - - -class DistAbstraction(object): - """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. - - The requirements for anything installable are as follows: - - we must be able to determine the requirement name - (or we can't correctly handle the non-upgrade case). - - we must be able to generate a list of run-time dependencies - without installing any additional packages (or we would - have to either burn time by doing temporary isolated installs - or alternatively violate pips 'don't start installing unless - all requirements are available' rule - neither of which are - desirable). - - for packages with setup requirements, we must also be able - to determine their requirements without installing additional - packages (for the same reason as run-time dependencies) - - we must be able to create a Distribution object exposing the - above metadata. +def _get_prepared_distribution( + req, # type: InstallRequirement + req_tracker, # type: RequirementTracker + finder, # type: PackageFinder + build_isolation # type: bool +): + # type: (...) -> AbstractDistribution + """Prepare a distribution for installation. """ + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist + + +def unpack_vcs_link(link, location): + # type: (Link, str) -> None + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +class File(object): + def __init__(self, path, content_type): + # type: (str, str) -> None + self.path = path + self.content_type = content_type + + +def get_http_url( + link, # type: Link + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> File + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) - def __init__(self, req): - # type: (InstallRequirement) -> None - self.req = req # type: InstallRequirement - - def dist(self): - # type: () -> Any - """Return a setuptools Dist object.""" - raise NotImplementedError - - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - """Ensure that we can get a Dist for this requirement.""" - raise NotImplementedError - + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url( + link, downloader, temp_dir.path, hashes + ) -class IsWheel(DistAbstraction): + return File(from_path, content_type) - def dist(self): - # type: () -> pkg_resources.Distribution - return list(pkg_resources.find_distributions( - self.req.source_dir))[0] - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - # FIXME:https://github.com/pypa/pip/issues/1112 - pass +def _copy2_ignoring_special_files(src, dest): + # type: (str, str) -> None + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + path_to_display(src), + path_to_display(dest), + ) -class IsSDist(DistAbstraction): +def _copy_source_tree(source, target): + # type: (str, str) -> None + target_abspath = os.path.abspath(target) + target_basename = os.path.basename(target_abspath) + target_dirname = os.path.dirname(target_abspath) + + def ignore(d, names): + # type: (str, List[str]) -> List[str] + skipped = [] # type: List[str] + if d == source: + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + skipped += ['.tox', '.nox'] + if os.path.abspath(d) == target_dirname: + # Prevent an infinite recursion if the target is in source. + # This can happen when TMPDIR is set to ${PWD}/... + # and we copy PWD to TMPDIR. + skipped += [target_basename] + return skipped + + kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs + + if not PY2: + # Python 2 does not support copy_function, so we only ignore + # errors on special file copy in Python 3. + kwargs['copy_function'] = _copy2_ignoring_special_files + + shutil.copytree(source, target, **kwargs) + + +def get_file_url( + link, # type: Link + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> File + """Get file and optionally check its hash. + """ + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) - def dist(self): - return self.req.get_dist() + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + + content_type = mimetypes.guess_type(from_path)[0] + + return File(from_path, content_type) + + +def unpack_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> Optional[File] + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # If it's a url to a local directory + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link.file_path, location) + return None + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + downloader, + download_dir, + hashes=hashes, + ) - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None - # Prepare for building. We need to: - # 1. Load pyproject.toml (if it exists) - # 2. Set up the build environment + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) - self.req.load_pyproject_toml() - should_isolate = self.req.use_pep517 and build_isolation + return file - def _raise_conflicts(conflicting_with, conflicting_reqs): - raise InstallationError( - "Some build dependencies for %s conflict with %s: %s." % ( - self.req, conflicting_with, ', '.join( - '%s is incompatible with %s' % (installed, wanted) - for installed, wanted in sorted(conflicting)))) - if should_isolate: - # Isolate in a BuildEnvironment and install the build-time - # requirements. - self.req.build_env = BuildEnvironment() - self.req.build_env.install_requirements( - finder, self.req.pyproject_requires, 'overlay', - "Installing build dependencies" - ) - conflicting, missing = self.req.build_env.check_requirements( - self.req.requirements_to_check - ) - if conflicting: - _raise_conflicts("PEP 517/518 supported requirements", - conflicting) - if missing: - logger.warning( - "Missing build requirements in pyproject.toml for %s.", - self.req, - ) - logger.warning( - "The project does not specify a build backend, and " - "pip cannot fall back to setuptools without %s.", - " and ".join(map(repr, sorted(missing))) - ) - # Install any extra build dependencies that the backend requests. - # This must be done in a second pass, as the pyproject.toml - # dependencies must be installed before we can call the backend. - with self.req.build_env: - # We need to have the env active when calling the hook. - self.req.spin_message = "Getting requirements to build wheel" - reqs = self.req.pep517_backend.get_requires_for_build_wheel() - conflicting, missing = self.req.build_env.check_requirements(reqs) - if conflicting: - _raise_conflicts("the backend dependencies", conflicting) - self.req.build_env.install_requirements( - finder, missing, 'normal', - "Installing backend dependencies" - ) +def _download_http_url( + link, # type: Link + downloader, # type: Downloader + temp_dir, # type: str + hashes, # type: Optional[Hashes] +): + # type: (...) -> Tuple[str, str] + """Download link url into temp_dir using provided session""" + download = downloader(link) - self.req.prepare_metadata() - self.req.assert_source_matches_version() + file_path = os.path.join(temp_dir, download.filename) + with open(file_path, 'wb') as content_file: + for chunk in download.chunks: + content_file.write(chunk) + if hashes: + hashes.check_against_path(file_path) -class Installed(DistAbstraction): + return file_path, download.response.headers.get('content-type', '') - def dist(self): - # type: () -> pkg_resources.Distribution - return self.req.satisfied_by - def prep_for_dist(self, finder, build_isolation): - # type: (PackageFinder, bool) -> Any - pass +def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Optional[Hashes]) -> Optional[str] + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path class RequirementPreparer(object): @@ -180,9 +324,12 @@ class RequirementPreparer(object): download_dir, # type: Optional[str] src_dir, # type: str wheel_download_dir, # type: Optional[str] - progress_bar, # type: str build_isolation, # type: bool - req_tracker # type: RequirementTracker + req_tracker, # type: RequirementTracker + downloader, # type: Downloader + finder, # type: PackageFinder + require_hashes, # type: bool + use_user_site, # type: bool ): # type: (...) -> None super(RequirementPreparer, self).__init__() @@ -190,16 +337,16 @@ class RequirementPreparer(object): self.src_dir = src_dir self.build_dir = build_dir self.req_tracker = req_tracker + self.downloader = downloader + self.finder = finder - # Where still packed archives should be written to. If None, they are + # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. self.download_dir = download_dir # Where still-packed .whl files should be written to. If None, they are # written to the download_dir parameter. Separate to download_dir to # permit only keeping wheel archives for pip wheel. - if wheel_download_dir: - wheel_download_dir = normalize_path(wheel_download_dir) self.wheel_download_dir = wheel_download_dir # NOTE @@ -207,160 +354,158 @@ class RequirementPreparer(object): # be combined if we're willing to have non-wheel archives present in # the wheelhouse output by 'pip wheel'. - self.progress_bar = progress_bar - # Is build isolation allowed? self.build_isolation = build_isolation + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + @property def _download_should_save(self): # type: () -> bool - # TODO: Modify to reduce indentation needed - if self.download_dir: - self.download_dir = expanduser(self.download_dir) - if os.path.exists(self.download_dir): - return True - else: - logger.critical('Could not find download directory') - raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) - return False + if not self.download_dir: + return False - def prepare_linked_requirement( - self, - req, # type: InstallRequirement - session, # type: PipSession - finder, # type: PackageFinder - upgrade_allowed, # type: bool - require_hashes # type: bool - ): - # type: (...) -> DistAbstraction - """Prepare a requirement that would be obtained from req.link - """ - # TODO: Breakup into smaller functions - if req.link and req.link.scheme == 'file': - path = url_to_path(req.link.url) + if os.path.exists(self.download_dir): + return True + + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '{}'" + .format(self.download_dir)) + + def _log_preparing_link(self, req): + # type: (InstallRequirement) -> None + """Log the way the link prepared.""" + if req.link.is_file: + path = req.link.file_path logger.info('Processing %s', display_path(path)) else: - logger.info('Collecting %s', req) + logger.info('Collecting %s', req.req or req) + + def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds): + # type: (InstallRequirement, Optional[str], bool) -> None + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) - with indent_log(): - # @@ if filesystem packages are not marked - # editable in a req, a non deterministic error - # occurs when the script attempts to unpack the - # build directory - req.ensure_has_source_dir(self.build_dir) - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` - if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - raise PreviousBuildDirError( - "pip can't proceed with requirements '%s' due to a" - " pre-existing build directory (%s). This is " - "likely due to a previous installation that failed" - ". pip is being responsible and not assuming it " - "can delete this. Please delete it and try again." - % (req, req.source_dir) - ) - req.populate_link(finder, upgrade_allowed, require_hashes) - - # We can't hit this spot and have populate_link return None. - # req.satisfied_by is None here (because we're - # guarded) and upgrade has no impact except when satisfied_by - # is not None. - # Then inside find_requirement existing_applicable -> False - # If no new versions are found, DistributionNotFound is raised, - # otherwise a result is guaranteed. - assert req.link - link = req.link - - # Now that we have the real link, we can tell what kind of - # requirements we have and raise some more informative errors - # than otherwise. (For example, we can raise VcsHashUnsupported - # for a VCS URL rather than HashMissing.) - if require_hashes: - # We could check these first 2 conditions inside - # unpack_url and save repetition of conditions, but then - # we would report less-useful error messages for - # unhashable requirements, complaining that there's no - # hash provided. - if is_vcs_url(link): - raise VcsHashUnsupported() - elif is_file_url(link) and is_dir_url(link): - raise DirectoryUrlHashUnsupported() - if not req.original_link and not req.is_pinned: - # Unpinned packages are asking for trouble when a new - # version is uploaded. This isn't a security check, but - # it saves users a surprising hash mismatch in the - # future. - # - # file:/// URLs aren't pinnable, so don't complain - # about them not being pinned. - raise HashUnpinned() - - hashes = req.hashes(trust_internet=not require_hashes) - if require_hashes and not hashes: - # Known-good hashes are missing for this requirement, so - # shim it with a facade object that will provoke hash - # computation and then raise a HashMissing exception - # showing the user what the hash should be. - hashes = MissingHashes() + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + + def _get_linked_req_hashes(self, req): + # type: (InstallRequirement) -> Hashes + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def prepare_linked_requirement(self, req, parallel_builds=False): + # type: (InstallRequirement, bool) -> AbstractDistribution + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) + if link.is_wheel and self.wheel_download_dir: + # Download wheels to a dedicated dir when doing `pip wheel`. + download_dir = self.wheel_download_dir + else: + download_dir = self.download_dir + with indent_log(): + self._ensure_link_req_src_dir(req, download_dir, parallel_builds) try: - download_dir = self.download_dir - # We always delete unpacked sdists after pip ran. - autodelete_unpacked = True - if req.link.is_wheel and self.wheel_download_dir: - # when doing 'pip wheel` we download wheels to a - # dedicated dir. - download_dir = self.wheel_download_dir - if req.link.is_wheel: - if download_dir: - # When downloading, we only unpack wheels to get - # metadata. - autodelete_unpacked = True - else: - # When installing a wheel, we use the unpacked - # wheel. - autodelete_unpacked = False - unpack_url( - req.link, req.source_dir, - download_dir, autodelete_unpacked, - session=session, hashes=hashes, - progress_bar=self.progress_bar - ) - except requests.HTTPError as exc: - logger.critical( - 'Could not install requirement %s because of error %s', - req, - exc, + local_file = unpack_url( + link, req.source_dir, self.downloader, download_dir, + hashes=self._get_linked_req_hashes(req) ) + except NetworkConnectionError as exc: raise InstallationError( - 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % - (req, exc, req.link) + 'Could not install requirement {} because of HTTP ' + 'error {} for URL {}'.format(req, exc, link) ) - abstract_dist = make_abstract_dist(req) - with self.req_tracker.track(req): - abstract_dist.prep_for_dist(finder, self.build_isolation) + + # For use in later processing, preserve the file path on the + # requirement. + if local_file: + req.local_file_path = local_file.path + + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + + if download_dir: + if link.is_existing_dir(): + logger.info('Link is a directory, ignoring download_dir') + elif local_file: + download_location = os.path.join( + download_dir, link.filename + ) + if not os.path.exists(download_location): + shutil.copy(local_file.path, download_location) + download_path = display_path(download_location) + logger.info('Saved %s', download_path) + if self._download_should_save: # Make a .zip of the source_dir we already created. - if req.link.scheme in vcs.all_schemes: + if link.is_vcs: req.archive(self.download_dir) return abstract_dist def prepare_editable_requirement( self, req, # type: InstallRequirement - require_hashes, # type: bool - use_user_site, # type: bool - finder # type: PackageFinder ): - # type: (...) -> DistAbstraction + # type: (...) -> AbstractDistribution """Prepare an editable requirement """ assert req.editable, "cannot prepare a non-editable req as editable" @@ -368,46 +513,50 @@ class RequirementPreparer(object): logger.info('Obtaining %s', req) with indent_log(): - if require_hashes: + if self.require_hashes: raise InstallationError( - 'The editable requirement %s cannot be installed when ' + 'The editable requirement {} cannot be installed when ' 'requiring hashes, because there is no single file to ' - 'hash.' % req + 'hash.'.format(req) ) req.ensure_has_source_dir(self.src_dir) req.update_editable(not self._download_should_save) - abstract_dist = make_abstract_dist(req) - with self.req_tracker.track(req): - abstract_dist.prep_for_dist(finder, self.build_isolation) + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) if self._download_should_save: req.archive(self.download_dir) - req.check_if_exists(use_user_site) + req.check_if_exists(self.use_user_site) return abstract_dist - def prepare_installed_requirement(self, req, require_hashes, skip_reason): - # type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction + def prepare_installed_requirement( + self, + req, # type: InstallRequirement + skip_reason # type: str + ): + # type: (...) -> AbstractDistribution """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " - "is set to %r" % (req.satisfied_by,) + "is set to {}".format(req.satisfied_by) ) logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version ) with indent_log(): - if require_hashes: + if self.require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.' ) - abstract_dist = Installed(req) + abstract_dist = InstalledDistribution(req) return abstract_dist diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pep425tags.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pep425tags.py deleted file mode 100644 index 1e782d1a..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pep425tags.py +++ /dev/null @@ -1,381 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags.""" -from __future__ import absolute_import - -import distutils.util -import logging -import platform -import re -import sys -import sysconfig -import warnings -from collections import OrderedDict - -import pip._internal.utils.glibc -from pip._internal.utils.compat import get_extension_suffixes -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Tuple, Callable, List, Optional, Union, Dict - ) - - Pep425Tag = Tuple[str, str, str] - -logger = logging.getLogger(__name__) - -_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') - - -def get_config_var(var): - # type: (str) -> Optional[str] - try: - return sysconfig.get_config_var(var) - except IOError as e: # Issue #1074 - warnings.warn("{}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - # type: () -> str - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - -def get_impl_ver(): - # type: () -> str - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - # type: () -> Tuple[int, ...] - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - # attrs exist only on pypy - return (sys.version_info[0], - sys.pypy_version_info.major, # type: ignore - sys.pypy_version_info.minor) # type: ignore - else: - return sys.version_info[0], sys.version_info[1] - - -def get_impl_tag(): - # type: () -> str - """ - Returns the Tag for this specific implementation. - """ - return "{}{}".format(get_abbr_impl(), get_impl_ver()) - - -def get_flag(var, fallback, expected=True, warn=True): - # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - logger.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) - return fallback() - return val == expected - - -def get_abi_tag(): - # type: () -> Optional[str] - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp')): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - sys.version_info < (3, 3))) \ - and sys.version_info < (3, 3): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def _is_running_32bit(): - # type: () -> bool - return sys.maxsize == 2147483647 - - -def get_platform(): - # type: () -> str - """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': - # distutils.util.get_platform() returns the release based on the value - # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be significantly older than the user's current machine. - release, _, machine = platform.mac_ver() - split_ver = release.split('.') - - if machine == "x86_64" and _is_running_32bit(): - machine = "i386" - elif machine == "ppc64" and _is_running_32bit(): - machine = "ppc" - - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) - - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and _is_running_32bit(): - # 32 bit Python program (running on a 64 bit Linux): pip should only - # install and run 32 bit compiled extensions in that case. - result = "linux_i686" - - return result - - -def is_manylinux1_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux1_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. - return pip._internal.utils.glibc.have_compatible_glibc(2, 5) - - -def is_manylinux2010_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux2010_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 6 uses glibc 2.12. - return pip._internal.utils.glibc.have_compatible_glibc(2, 12) - - -def get_darwin_arches(major, minor, machine): - # type: (int, int, str) -> List[str] - """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of an macOS machine. - """ - arches = [] - - def _supports_arch(major, minor, arch): - # type: (int, int, str) -> bool - # Looking at the application support for macOS versions in the chart - # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears - # our timeline looks roughly like: - # - # 10.0 - Introduces ppc support. - # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 - # and x86_64 support is CLI only, and cannot be used for GUI - # applications. - # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. - # 10.6 - Drops support for ppc64 - # 10.7 - Drops support for ppc - # - # Given that we do not know if we're installing a CLI or a GUI - # application, we must be conservative and assume it might be a GUI - # application and behave as if ppc64 and x86_64 support did not occur - # until 10.5. - # - # Note: The above information is taken from the "Application support" - # column in the chart not the "Processor support" since I believe - # that we care about what instruction sets an application can use - # not which processors the OS supports. - if arch == 'ppc': - return (major, minor) <= (10, 5) - if arch == 'ppc64': - return (major, minor) == (10, 5) - if arch == 'i386': - return (major, minor) >= (10, 4) - if arch == 'x86_64': - return (major, minor) >= (10, 5) - if arch in groups: - for garch in groups[arch]: - if _supports_arch(major, minor, garch): - return True - return False - - groups = OrderedDict([ - ("fat", ("i386", "ppc")), - ("intel", ("x86_64", "i386")), - ("fat64", ("x86_64", "ppc64")), - ("fat32", ("x86_64", "i386", "ppc")), - ]) # type: Dict[str, Tuple[str, ...]] - - if _supports_arch(major, minor, machine): - arches.append(machine) - - for garch in groups: - if machine in groups[garch] and _supports_arch(major, minor, garch): - arches.append(garch) - - arches.append('universal') - - return arches - - -def get_all_minor_versions_as_strings(version_info): - # type: (Tuple[int, ...]) -> List[str] - versions = [] - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - return versions - - -def get_supported( - versions=None, # type: Optional[List[str]] - noarch=False, # type: bool - platform=None, # type: Optional[str] - impl=None, # type: Optional[str] - abi=None # type: Optional[str] -): - # type: (...) -> List[Pep425Tag] - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - :param platform: specify the exact platform you want valid - tags for, or None. If None, use the local system platform. - :param impl: specify the exact implementation you want valid - tags for, or None. If None, use the local interpreter impl. - :param abi: specify the exact abi you want valid - tags for, or None. If None, use the local interpreter abi. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - version_info = get_impl_version_info() - versions = get_all_minor_versions_as_strings(version_info) - - impl = impl or get_abbr_impl() - - abis = [] # type: List[str] - - abi = abi or get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - for suffix in get_extension_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - if not noarch: - arch = platform or get_platform() - arch_prefix, arch_sep, arch_suffix = arch.partition('_') - if arch.startswith('macosx'): - # support macosx-10.6-intel on macosx-10.9-x86_64 - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) - arches = [] - for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): - arches.append(tpl % (m, a)) - else: - # arch pattern didn't match (?!) - arches = [arch] - elif arch_prefix == 'manylinux2010': - # manylinux1 wheels run on most manylinux2010 systems with the - # exception of wheels depending on ncurses. PEP 571 states - # manylinux1 wheels should be considered manylinux2010 wheels: - # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches = [arch, 'manylinux1' + arch_sep + arch_suffix] - elif platform is None: - arches = [] - if is_manylinux2010_compatible(): - arches.append('manylinux2010' + arch_sep + arch_suffix) - if is_manylinux1_compatible(): - arches.append('manylinux1' + arch_sep + arch_suffix) - arches.append(arch) - else: - arches = [arch] - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: - break - for abi in abi3s: # empty set if not Python 3 - for arch in arches: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # Has binaries, does not use the Python API: - for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported - - -implementation_tag = get_impl_tag() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pyproject.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pyproject.py index 8d739a6c..6b4faf7a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pyproject.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/pyproject.py @@ -3,14 +3,16 @@ from __future__ import absolute_import import io import os import sys +from collections import namedtuple -from pip._vendor import pytoml, six +from pip._vendor import six, toml +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._internal.exceptions import InstallationError from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Tuple, Optional, List # noqa: F401 + from typing import Any, Optional, List def _is_list_of_str(obj): @@ -21,9 +23,9 @@ def _is_list_of_str(obj): ) -def make_pyproject_path(setup_py_dir): +def make_pyproject_path(unpacked_source_directory): # type: (str) -> str - path = os.path.join(setup_py_dir, 'pyproject.toml') + path = os.path.join(unpacked_source_directory, 'pyproject.toml') # Python2 __file__ should not be unicode if six.PY2 and isinstance(path, six.text_type): @@ -32,13 +34,18 @@ def make_pyproject_path(setup_py_dir): return path +BuildSystemDetails = namedtuple('BuildSystemDetails', [ + 'requires', 'backend', 'check', 'backend_path' +]) + + def load_pyproject_toml( use_pep517, # type: Optional[bool] pyproject_toml, # type: str setup_py, # type: str req_name # type: str ): - # type: (...) -> Optional[Tuple[List[str], str, List[str]]] + # type: (...) -> Optional[BuildSystemDetails] """Load the pyproject.toml file. Parameters: @@ -56,6 +63,8 @@ def load_pyproject_toml( name of PEP 517 backend, requirements we should check are installed after setting up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. ) """ has_pyproject = os.path.isfile(pyproject_toml) @@ -63,7 +72,7 @@ def load_pyproject_toml( if has_pyproject: with io.open(pyproject_toml, encoding="utf-8") as f: - pp_toml = pytoml.load(f) + pp_toml = toml.load(f) build_system = pp_toml.get("build-system") else: build_system = None @@ -150,7 +159,23 @@ def load_pyproject_toml( reason="'build-system.requires' is not a list of strings.", )) + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use @@ -168,4 +193,4 @@ def load_pyproject_toml( backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] - return (requires, backend, check) + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/__init__.py index 5e4eb92f..8568d3f8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/__init__.py @@ -1,15 +1,17 @@ from __future__ import absolute_import +import collections import logging -from .req_install import InstallRequirement -from .req_set import RequirementSet -from .req_file import parse_requirements from pip._internal.utils.logging import indent_log from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + if MYPY_CHECK_RUNNING: - from typing import List, Sequence # noqa: F401 + from typing import Iterator, List, Optional, Sequence, Tuple __all__ = [ "RequirementSet", "InstallRequirement", @@ -19,59 +21,83 @@ __all__ = [ logger = logging.getLogger(__name__) +class InstallationResult(object): + def __init__(self, name): + # type: (str) -> None + self.name = name + + def __repr__(self): + # type: () -> str + return "InstallationResult(name={!r})".format(self.name) + + +def _validate_requirements( + requirements, # type: List[InstallRequirement] +): + # type: (...) -> Iterator[Tuple[str, InstallRequirement]] + for req in requirements: + assert req.name, "invalid to-be-installed requirement: {}".format(req) + yield req.name, req + + def install_given_reqs( - to_install, # type: List[InstallRequirement] + requirements, # type: List[InstallRequirement] install_options, # type: List[str] - global_options=(), # type: Sequence[str] - *args, **kwargs + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + warn_script_location, # type: bool + use_user_site, # type: bool + pycompile, # type: bool ): - # type: (...) -> List[InstallRequirement] + # type: (...) -> List[InstallationResult] """ Install everything in the given list. (to be called after having downloaded and unpacked the packages) """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) if to_install: logger.info( 'Installing collected packages: %s', - ', '.join([req.name for req in to_install]), + ', '.join(to_install.keys()), ) + installed = [] + with indent_log(): - for requirement in to_install: - if requirement.conflicts_with: - logger.info( - 'Found existing installation: %s', - requirement.conflicts_with, - ) + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info('Attempting uninstall: %s', req_name) with indent_log(): uninstalled_pathset = requirement.uninstall( auto_confirm=True ) + else: + uninstalled_pathset = None + try: requirement.install( install_options, global_options, - *args, - **kwargs + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, ) except Exception: - should_rollback = ( - requirement.conflicts_with and - not requirement.install_succeeded - ) # if install did not succeed, rollback previous uninstall - if should_rollback: + if uninstalled_pathset and not requirement.install_succeeded: uninstalled_pathset.rollback() raise else: - should_commit = ( - requirement.conflicts_with and - requirement.install_succeeded - ) - if should_commit: + if uninstalled_pathset and requirement.install_succeeded: uninstalled_pathset.commit() - requirement.remove_temporary_source() - return to_install + installed.append(InstallationResult(req_name)) + + return installed diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/constructors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/constructors.py index 1eed1dd3..7a4641ef 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/constructors.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/constructors.py @@ -17,24 +17,24 @@ from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._vendor.packaging.specifiers import Specifier from pip._vendor.pkg_resources import RequirementParseError, parse_requirements -from pip._internal.download import ( - is_archive_file, is_url, path_to_url, url_to_path, -) from pip._internal.exceptions import InstallationError from pip._internal.models.index import PyPI, TestPyPI from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel from pip._internal.pyproject import make_pyproject_path from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import is_installable_dir, splitext from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.vcs import vcs -from pip._internal.wheel import Wheel +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Set, Any, Union, Text, Dict, + from typing import ( + Any, Dict, Optional, Set, Tuple, Union, ) - from pip._internal.cache import WheelCache # noqa: F401 + from pip._internal.req.req_file import ParsedRequirement __all__ = [ @@ -46,6 +46,15 @@ logger = logging.getLogger(__name__) operators = Specifier._operators.keys() +def is_archive_file(name): + # type: (str) -> bool + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + def _strip_extras(path): # type: (str) -> Tuple[str, Optional[str]] m = re.match(r'^(.+)(\[[^\]]+\])$', path) @@ -59,8 +68,15 @@ def _strip_extras(path): return path_no_extras, extras +def convert_extras(extras): + # type: (Optional[str]) -> Set[str] + if not extras: + return set() + return Requirement("placeholder" + extras.lower()).extras + + def parse_editable(editable_req): - # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] + # type: (str) -> Tuple[Optional[str], str, Set[str]] """Parses an editable requirement into: - a requirement name - an URL @@ -102,35 +118,36 @@ def parse_editable(editable_req): Requirement("placeholder" + extras.lower()).extras, ) else: - return package_name, url_no_extras, None + return package_name, url_no_extras, set() for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) + if url.lower().startswith('{}:'.format(version_control)): + url = '{}+{}'.format(version_control, url) break if '+' not in url: raise InstallationError( - '%s should either be a path to a local project or a VCS url ' - 'beginning with svn+, git+, hg+, or bzr+' % - editable_req + '{} is not a valid editable requirement. ' + 'It should either be a path to a local project or a VCS URL ' + '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req) ) vc_type = url.split('+', 1)[0].lower() if not vcs.get_backend(vc_type): - error_message = 'For --editable=%s only ' % editable_req + \ - ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ - ' is currently supported' + backends = ", ".join([bends.name + '+URL' for bends in vcs.backends]) + error_message = "For --editable={}, " \ + "only {} are currently supported".format( + editable_req, backends) raise InstallationError(error_message) package_name = Link(url).egg_fragment if not package_name: raise InstallationError( - "Could not detect requirement name for '%s', please specify one " - "with #egg=your_package_name" % editable_req + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) ) - return package_name, url, None + return package_name, url, set() def deduce_helpful_msg(req): @@ -148,71 +165,141 @@ def deduce_helpful_msg(req): with open(req, 'r') as fp: # parse first line only next(parse_requirements(fp.read())) - msg += " The argument you provided " + \ - "(%s) appears to be a" % (req) + \ - " requirements file. If that is the" + \ - " case, use the '-r' flag to install" + \ + msg += ( + "The argument you provided " + "({}) appears to be a" + " requirements file. If that is the" + " case, use the '-r' flag to install" " the packages specified within it." + ).format(req) except RequirementParseError: - logger.debug("Cannot parse '%s' as requirements \ - file" % (req), exc_info=True) + logger.debug( + "Cannot parse '%s' as requirements file", req, exc_info=True + ) else: - msg += " File '%s' does not exist." % (req) + msg += " File '{}' does not exist.".format(req) return msg +class RequirementParts(object): + def __init__( + self, + requirement, # type: Optional[Requirement] + link, # type: Optional[Link] + markers, # type: Optional[Marker] + extras, # type: Set[str] + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req): + # type: (str) -> RequirementParts + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req = Requirement(name) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '{}'".format(name)) + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + # ---- The actual constructors follow ---- def install_req_from_editable( editable_req, # type: str - comes_from=None, # type: Optional[str] + comes_from=None, # type: Optional[Union[InstallRequirement, str]] use_pep517=None, # type: Optional[bool] isolated=False, # type: bool options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False # type: bool + constraint=False, # type: bool + user_supplied=False, # type: bool ): # type: (...) -> InstallRequirement - name, url, extras_override = parse_editable(editable_req) - if url.startswith('file:'): - source_dir = url_to_path(url) - else: - source_dir = None - if name is not None: - try: - req = Requirement(name) - except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % name) - else: - req = None + parts = parse_req_from_editable(editable_req) + return InstallRequirement( - req, comes_from, source_dir=source_dir, + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, editable=True, - link=Link(url), + link=parts.link, constraint=constraint, use_pep517=use_pep517, isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, - extras=extras_override or (), + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + extras=parts.extras, ) -def install_req_from_line( - name, # type: str - comes_from=None, # type: Optional[Union[str, InstallRequirement]] - use_pep517=None, # type: Optional[bool] - isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] - constraint=False # type: bool -): - # type: (...) -> InstallRequirement - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. +def _looks_like_path(name): + # type: (str) -> bool + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path, name): + # type: (str, str) -> Optional[str] """ + First, it checks whether a provided path is an installable directory + (e.g. it has a setup.py). If it is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + raise InstallationError( + "Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found.".format(**locals()) + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split('@', 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + return path_to_url(path) + + +def parse_req_from_line(name, line_source): + # type: (str, Optional[str]) -> RequirementParts if is_url(name): marker_sep = '; ' else: @@ -236,26 +323,9 @@ def install_req_from_line( link = Link(name) else: p, extras_as_string = _strip_extras(path) - looks_like_dir = os.path.isdir(p) and ( - os.path.sep in name or - (os.path.altsep is not None and os.path.altsep in name) or - name.startswith('.') - ) - if looks_like_dir: - if not is_installable_dir(p): - raise InstallationError( - "Directory %r is not installable. Neither 'setup.py' " - "nor 'pyproject.toml' found." % name - ) - link = Link(path_to_url(p)) - elif is_archive_file(p): - if not os.path.isfile(p): - logger.warning( - 'Requirement %r looks like a filename, but the ' - 'file does not exist', - name - ) - link = Link(path_to_url(p)) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) # it's a local file, dir, or url if link: @@ -266,7 +336,7 @@ def install_req_from_line( # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename - req_as_string = "%s==%s" % (wheel.name, wheel.version) + req_as_string = "{wheel.name}=={wheel.version}".format(**locals()) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement @@ -276,10 +346,14 @@ def install_req_from_line( else: req_as_string = name - if extras_as_string: - extras = Requirement("placeholder" + extras_as_string.lower()).extras - else: - extras = () + extras = convert_extras(extras_as_string) + + def with_source(text): + # type: (str) -> str + if not line_source: + return text + return '{} (from {})'.format(text, line_source) + if req_as_string is not None: try: req = Requirement(req_as_string) @@ -291,20 +365,58 @@ def install_req_from_line( not any(op in req_as_string for op in operators)): add_msg = "= is not a valid operator. Did you mean == ?" else: - add_msg = "" - raise InstallationError( - "Invalid requirement: '%s'\n%s" % (req_as_string, add_msg) + add_msg = '' + msg = with_source( + 'Invalid requirement: {!r}'.format(req_as_string) ) + if add_msg: + msg += '\nHint: {}'.format(add_msg) + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith(']'): + msg = "Extras after version '{}'.".format(spec_str) + replace = "moving the extras before version specifiers" + deprecated(msg, replacement=replace, gone_in="21.0") else: req = None + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name, # type: str + comes_from=None, # type: Optional[Union[str, InstallRequirement]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + constraint=False, # type: bool + line_source=None, # type: Optional[str] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + return InstallRequirement( - req, comes_from, link=link, markers=markers, + parts.requirement, comes_from, link=parts.link, markers=parts.markers, use_pep517=use_pep517, isolated=isolated, - options=options if options else {}, - wheel_cache=wheel_cache, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, constraint=constraint, - extras=extras, + extras=parts.extras, + user_supplied=user_supplied, ) @@ -312,28 +424,63 @@ def install_req_from_req_string( req_string, # type: str comes_from=None, # type: Optional[InstallRequirement] isolated=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool ): # type: (...) -> InstallRequirement try: req = Requirement(req_string) except InvalidRequirement: - raise InstallationError("Invalid requirement: '%s'" % req) + raise InstallationError("Invalid requirement: '{}'".format(req_string)) domains_not_allowed = [ PyPI.file_storage_domain, TestPyPI.file_storage_domain, ] - if req.url and comes_from.link.netloc in domains_not_allowed: + if (req.url and comes_from and comes_from.link and + comes_from.link.netloc in domains_not_allowed): # Explicitly disallow pypi packages that depend on external urls raise InstallationError( "Packages installed from PyPI cannot depend on packages " "which are not also hosted on PyPI.\n" - "%s depends on %s " % (comes_from.name, req) + "{} depends on {} ".format(comes_from.name, req) ) return InstallRequirement( - req, comes_from, isolated=isolated, wheel_cache=wheel_cache, - use_pep517=use_pep517 + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, ) + + +def install_req_from_parsed_requirement( + parsed_req, # type: ParsedRequirement + isolated=False, # type: bool + use_pep517=None, # type: Optional[bool] + user_supplied=False, # type: bool +): + # type: (...) -> InstallRequirement + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=parsed_req.options, + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + ) + return req diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_file.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_file.py index 726f2f6a..10505822 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_file.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_file.py @@ -10,32 +10,37 @@ import re import shlex import sys -from pip._vendor.six.moves import filterfalse from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._internal.cli import cmdoptions -from pip._internal.download import get_file_content -from pip._internal.exceptions import RequirementsFileParseError -from pip._internal.req.constructors import ( - install_req_from_editable, install_req_from_line, +from pip._internal.exceptions import ( + InstallationError, + RequirementsFileParseError, ) +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Iterator, Tuple, Optional, List, Callable, Text + from optparse import Values + from typing import ( + Any, Callable, Dict, Iterator, List, NoReturn, Optional, Text, Tuple, ) - from pip._internal.req import InstallRequirement # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 + + from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession ReqFileLines = Iterator[Tuple[int, Text]] + LineParser = Callable[[Text], Tuple[str, Values]] + + __all__ = ['parse_requirements'] SCHEME_RE = re.compile(r'^(http|https|file):', re.I) -COMMENT_RE = re.compile(r'(^|\s)+#.*$') +COMMENT_RE = re.compile(r'(^|\s+)#.*$') # Matches environment variable-style values in '${MY_VARIABLE_1}' with the # variable name consisting of only uppercase letters, digits or the '_' @@ -44,19 +49,20 @@ COMMENT_RE = re.compile(r'(^|\s)+#.*$') ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') SUPPORTED_OPTIONS = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, cmdoptions.constraints, - cmdoptions.editable, cmdoptions.requirements, - cmdoptions.no_index, - cmdoptions.index_url, + cmdoptions.editable, cmdoptions.find_links, - cmdoptions.extra_index_url, - cmdoptions.always_unzip, cmdoptions.no_binary, cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, cmdoptions.pre, cmdoptions.trusted_host, - cmdoptions.require_hashes, + cmdoptions.use_new_feature, ] # type: List[Callable[..., optparse.Option]] # options to be passed to requirements @@ -70,174 +76,175 @@ SUPPORTED_OPTIONS_REQ = [ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +class ParsedRequirement(object): + def __init__( + self, + requirement, # type:str + is_editable, # type: bool + comes_from, # type: str + constraint, # type: bool + options=None, # type: Optional[Dict[str, Any]] + line_source=None, # type: Optional[str] + ): + # type: (...) -> None + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine(object): + def __init__( + self, + filename, # type: str + lineno, # type: int + comes_from, # type: Optional[str] + args, # type: str + opts, # type: Values + constraint, # type: bool + ): + # type: (...) -> None + self.filename = filename + self.lineno = lineno + self.comes_from = comes_from + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + def parse_requirements( filename, # type: str + session, # type: PipSession finder=None, # type: Optional[PackageFinder] comes_from=None, # type: Optional[str] options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] constraint=False, # type: bool - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None # type: Optional[bool] ): - # type: (...) -> Iterator[InstallRequirement] - """Parse a requirements file and yield InstallRequirement instances. + # type: (...) -> Iterator[ParsedRequirement] + """Parse a requirements file and yield ParsedRequirement instances. :param filename: Path or url of requirements file. + :param session: PipSession instance. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: cli options. - :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. - :param wheel_cache: Instance of pip.wheel.WheelCache - :param use_pep517: Value of the --use-pep517 option. """ - if session is None: - raise TypeError( - "parse_requirements() missing 1 required keyword argument: " - "'session'" + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser, comes_from) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, + options=options, + finder=finder, + session=session ) + if parsed_req is not None: + yield parsed_req - _, content = get_file_content( - filename, comes_from=comes_from, session=session - ) - - lines_enum = preprocess(content, options) - - for line_number, line in lines_enum: - req_iter = process_line(line, filename, line_number, finder, - comes_from, options, session, wheel_cache, - use_pep517=use_pep517, constraint=constraint) - for req in req_iter: - yield req - -def preprocess(content, options): - # type: (Text, Optional[optparse.Values]) -> ReqFileLines +def preprocess(content): + # type: (Text) -> ReqFileLines """Split, filter, and join lines, and return a line iterator :param content: the content of the requirements file - :param options: cli options """ lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines lines_enum = join_lines(lines_enum) lines_enum = ignore_comments(lines_enum) - lines_enum = skip_regex(lines_enum, options) lines_enum = expand_env_variables(lines_enum) return lines_enum -def process_line( - line, # type: Text - filename, # type: str - line_number, # type: int - finder=None, # type: Optional[PackageFinder] - comes_from=None, # type: Optional[str] +def handle_requirement_line( + line, # type: ParsedLine options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] - wheel_cache=None, # type: Optional[WheelCache] - use_pep517=None, # type: Optional[bool] - constraint=False # type: bool ): - # type: (...) -> Iterator[InstallRequirement] - """Process a single requirements line; This can result in creating/yielding - requirements, or updating the finder. - - For lines that contain requirements, the only options that have an effect - are from SUPPORTED_OPTIONS_REQ, and they are scoped to the - requirement. Other options from SUPPORTED_OPTIONS may be present, but are - ignored. - - For lines that do not contain requirements, the only options that have an - effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may - be present, but are ignored. These lines may contain multiple options - (although our docs imply only one is supported), and all our parsed and - affect the finder. - - :param constraint: If True, parsing a constraints file. - :param options: OptionParser options that we may update - """ - parser = build_parser(line) - defaults = parser.get_default_values() - defaults.index_url = None - if finder: - defaults.format_control = finder.format_control - args_str, options_str = break_args_options(line) - # Prior to 2.7.3, shlex cannot deal with unicode entries - if sys.version_info < (2, 7, 3): - # https://github.com/python/mypy/issues/1174 - options_str = options_str.encode('utf8') # type: ignore - # https://github.com/python/mypy/issues/1174 - opts, _ = parser.parse_args( - shlex.split(options_str), defaults) # type: ignore + # type: (...) -> ParsedRequirement # preserve for the nested code path - line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', filename, line_number, + line_comes_from = '{} {} (line {})'.format( + '-c' if line.constraint else '-r', line.filename, line.lineno, ) - # yield a line requirement - if args_str: - isolated = options.isolated_mode if options else False + assert line.is_requirement + + if line.is_editable: + # For editable requirements, we don't support per-requirement + # options, so just return the parsed requirement. + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + ) + else: if options: - cmdoptions.check_install_build_global(options, opts) + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) + # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in opts.__dict__ and opts.__dict__[dest]: - req_options[dest] = opts.__dict__[dest] - yield install_req_from_line( - args_str, line_comes_from, constraint=constraint, - use_pep517=use_pep517, - isolated=isolated, options=req_options, wheel_cache=wheel_cache + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = 'line {} of {}'.format(line.lineno, line.filename) + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, ) - # yield an editable requirement - elif opts.editables: - isolated = options.isolated_mode if options else False - yield install_req_from_editable( - opts.editables[0], comes_from=line_comes_from, - use_pep517=use_pep517, - constraint=constraint, isolated=isolated, wheel_cache=wheel_cache - ) - # parse a nested requirements file - elif opts.requirements or opts.constraints: - if opts.requirements: - req_path = opts.requirements[0] - nested_constraint = False - else: - req_path = opts.constraints[0] - nested_constraint = True - # original file is over http - if SCHEME_RE.search(filename): - # do a url join so relative paths work - req_path = urllib_parse.urljoin(filename, req_path) - # original file and nested file are paths - elif not SCHEME_RE.search(req_path): - # do a join so relative paths work - req_path = os.path.join(os.path.dirname(filename), req_path) - # TODO: Why not use `comes_from='-r {} (line {})'` here as well? - parsed_reqs = parse_requirements( - req_path, finder, comes_from, options, session, - constraint=nested_constraint, wheel_cache=wheel_cache - ) - for req in parsed_reqs: - yield req - - # percolate hash-checking option upward - elif opts.require_hashes: - options.require_hashes = opts.require_hashes +def handle_option_line( + opts, # type: Values + filename, # type: str + lineno, # type: int + finder=None, # type: Optional[PackageFinder] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] +): + # type: (...) -> None + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled + if f not in options.features_enabled + ) # set finder options - elif finder: + if finder: + find_links = finder.find_links + index_urls = finder.index_urls if opts.index_url: - finder.index_urls = [opts.index_url] + index_urls = [opts.index_url] if opts.no_index is True: - finder.index_urls = [] + index_urls = [] if opts.extra_index_urls: - finder.index_urls.extend(opts.extra_index_urls) + index_urls.extend(opts.extra_index_urls) if opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path @@ -247,12 +254,174 @@ def process_line( relative_to_reqs_file = os.path.join(req_dir, value) if os.path.exists(relative_to_reqs_file): value = relative_to_reqs_file - finder.find_links.append(value) + find_links.append(value) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + if opts.pre: - finder.allow_all_prereleases = True - if opts.trusted_hosts: - finder.secure_origins.extend( - ("*", host, "*") for host in opts.trusted_hosts) + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = 'line {} of {}'.format(lineno, filename) + session.add_trusted_host(host, source=source) + + +def handle_line( + line, # type: ParsedLine + options=None, # type: Optional[optparse.Values] + finder=None, # type: Optional[PackageFinder] + session=None, # type: Optional[PipSession] +): + # type: (...) -> Optional[ParsedRequirement] + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser(object): + def __init__( + self, + session, # type: PipSession + line_parser, # type: LineParser + comes_from, # type: Optional[str] + ): + # type: (...) -> None + self._session = session + self._line_parser = line_parser + self._comes_from = comes_from + + def parse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + """Parse a given file, yielding parsed lines. + """ + for line in self._parse_and_recurse(filename, constraint): + yield line + + def _parse_and_recurse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + for line in self._parse_file(filename, constraint): + if ( + not line.is_requirement and + (line.opts.requirements or line.opts.constraints) + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), req_path, + ) + + for inner_line in self._parse_and_recurse( + req_path, nested_constraint, + ): + yield inner_line + else: + yield line + + def _parse_file(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + _, content = get_file_content( + filename, self._session, comes_from=self._comes_from + ) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = 'Invalid requirement: {}\n{}'.format(line, e.msg) + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + self._comes_from, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder): + # type: (Optional[PackageFinder]) -> LineParser + def parse_line(line): + # type: (Text) -> Tuple[str, Values] + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + # Prior to 2.7.3, shlex cannot deal with unicode entries + if sys.version_info < (2, 7, 3): + # https://github.com/python/mypy/issues/1174 + options_str = options_str.encode('utf8') # type: ignore + + # https://github.com/python/mypy/issues/1174 + opts, _ = parser.parse_args( + shlex.split(options_str), defaults) # type: ignore + + return args_str, opts + + return parse_line def break_args_options(line): @@ -273,8 +442,14 @@ def break_args_options(line): return ' '.join(args), ' '.join(options) # type: ignore -def build_parser(line): - # type: (Text) -> optparse.OptionParser +class OptionParsingError(Exception): + def __init__(self, msg): + # type: (str) -> None + self.msg = msg + + +def build_parser(): + # type: () -> optparse.OptionParser """ Return a parser for parsing requirement lines """ @@ -288,9 +463,8 @@ def build_parser(line): # By default optparse sys.exits on parsing errors. We want to wrap # that in our own exception. def parser_exit(self, msg): - # add offending line - msg = 'Invalid requirement: %s\n%s' % (line, msg) - raise RequirementsFileParseError(msg) + # type: (Any, str) -> NoReturn + raise OptionParsingError(msg) # NOTE: mypy disallows assigning to a method # https://github.com/python/mypy/issues/2427 parser.exit = parser_exit # type: ignore @@ -312,6 +486,7 @@ def join_lines(lines_enum): line = ' ' + line if new_line: new_line.append(line) + assert primary_line_number is not None yield primary_line_number, ''.join(new_line) new_line = [] else: @@ -323,6 +498,7 @@ def join_lines(lines_enum): # last line contains \ if new_line: + assert primary_line_number is not None yield primary_line_number, ''.join(new_line) # TODO: handle space after '\'. @@ -340,20 +516,6 @@ def ignore_comments(lines_enum): yield line_number, line -def skip_regex(lines_enum, options): - # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines - """ - Skip lines that match '--skip-requirements-regex' pattern - - Note: the regex pattern is only built once - """ - skip_regex = options.skip_requirements_regex if options else None - if skip_regex: - pattern = re.compile(skip_regex) - lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) - return lines_enum - - def expand_env_variables(lines_enum): # type: (ReqFileLines) -> ReqFileLines """Replace all environment variables that can be retrieved via `os.getenv`. @@ -364,7 +526,7 @@ def expand_env_variables(lines_enum): 1. Strings that contain a `$` aren't accidentally (partially) expanded. 2. Ensure consistency across platforms for requirement files. - These points are the result of a discusssion on the `github pull + These points are the result of a discussion on the `github pull request #3514 <https://github.com/pypa/pip/pull/3514>`_. Valid characters in variable names follow the `POSIX standard @@ -380,3 +542,51 @@ def expand_env_variables(lines_enum): line = line.replace(env_var, value) yield line_number, line + + +def get_file_content(url, session, comes_from=None): + # type: (str, PipSession, Optional[str]) -> Tuple[str, Text] + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + :param comes_from: Origin description of requirements. + """ + scheme = get_url_scheme(url) + + if scheme in ['http', 'https']: + # FIXME: catch some errors + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + elif scheme == 'file': + if comes_from and comes_from.startswith('http'): + raise InstallationError( + 'Requirements file {} references URL {}, ' + 'which is local'.format(comes_from, url) + ) + + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: {}'.format(exc) + ) + return url, content + + +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_install.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_install.py index a4834b00..644930a1 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_install.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_install.py @@ -1,12 +1,14 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import import logging import os import shutil import sys -import sysconfig +import uuid import zipfile -from distutils.util import change_root from pip._vendor import pkg_resources, six from pip._vendor.packaging.requirements import Requirement @@ -15,50 +17,85 @@ from pip._vendor.packaging.version import Version from pip._vendor.packaging.version import parse as parse_version from pip._vendor.pep517.wrappers import Pep517HookCaller -from pip._internal import wheel from pip._internal.build_env import NoOpBuildEnvironment from pip._internal.exceptions import InstallationError -from pip._internal.locations import ( - PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, -) +from pip._internal.locations import get_scheme from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_legacy import \ + generate_metadata as generate_metadata_legacy +from pip._internal.operations.install.editable_legacy import \ + install_editable as install_editable_legacy +from pip._internal.operations.install.legacy import LegacyInstallFailure +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.compat import native_str +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.direct_url_helpers import direct_url_from_link from pip._internal.utils.hashes import Hashes from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - _make_build_dir, ask_path_exists, backup_dir, call_subprocess, - display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, - get_installed_version, redact_password_from_url, rmtree, + ask_path_exists, + backup_dir, + display_path, + dist_in_site_packages, + dist_in_usersite, + get_distribution, + get_installed_version, + hide_url, + redact_auth_from_url, ) from pip._internal.utils.packaging import get_metadata -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner +from pip._internal.utils.virtualenv import running_under_virtualenv from pip._internal.vcs import vcs -from pip._internal.wheel import move_wheel_files if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Iterable, List, Union, Any, Text, Sequence, Dict + from typing import ( + Any, Dict, Iterable, List, Optional, Sequence, Union, ) - from pip._internal.build_env import BuildEnvironment # noqa: F401 - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._vendor.pkg_resources import Distribution # noqa: F401 - from pip._vendor.packaging.specifiers import SpecifierSet # noqa: F401 - from pip._vendor.packaging.markers import Marker # noqa: F401 + from pip._internal.build_env import BuildEnvironment + from pip._vendor.pkg_resources import Distribution + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.markers import Marker logger = logging.getLogger(__name__) +def _get_dist(metadata_directory): + # type: (str) -> Distribution + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + class InstallRequirement(object): """ Represents something that may be installed later on, may have information - about where to fetch the relavant requirement and also contains logic for + about where to fetch the relevant requirement and also contains logic for installing the said requirement. """ @@ -66,34 +103,48 @@ class InstallRequirement(object): self, req, # type: Optional[Requirement] comes_from, # type: Optional[Union[str, InstallRequirement]] - source_dir=None, # type: Optional[str] editable=False, # type: bool link=None, # type: Optional[Link] - update=True, # type: bool markers=None, # type: Optional[Marker] use_pep517=None, # type: Optional[bool] isolated=False, # type: bool - options=None, # type: Optional[Dict[str, Any]] - wheel_cache=None, # type: Optional[WheelCache] + install_options=None, # type: Optional[List[str]] + global_options=None, # type: Optional[List[str]] + hash_options=None, # type: Optional[Dict[str, List[str]]] constraint=False, # type: bool - extras=() # type: Iterable[str] + extras=(), # type: Iterable[str] + user_supplied=False, # type: bool ): # type: (...) -> None assert req is None or isinstance(req, Requirement), req self.req = req self.comes_from = comes_from self.constraint = constraint - if source_dir is not None: - self.source_dir = os.path.normpath(os.path.abspath(source_dir)) - else: - self.source_dir = None self.editable = editable - self._wheel_cache = wheel_cache + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir = None # type: Optional[str] + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath( + os.path.abspath(link.file_path) + ) + if link is None and req and req.url: # PEP 508 URL requirement link = Link(req.url) self.link = self.original_link = link + self.original_link_is_in_wheel_cache = False + + # Path to any downloaded or already-existing package. + self.local_file_path = None # type: Optional[str] + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path if extras: self.extras = extras @@ -107,28 +158,35 @@ class InstallRequirement(object): markers = req.marker self.markers = markers - self._egg_info_path = None # type: Optional[str] # This holds the pkg_resources.Distribution object if this requirement # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None + self.satisfied_by = None # type: Optional[Distribution] + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False # Temporary build location - self._temp_build_dir = TempDirectory(kind="req-build") - # Used to store the global directory where the _temp_build_dir should - # have been created. Cf _correct_build_location method. - self._ideal_build_dir = None # type: Optional[str] - # True if the editable should be updated: - self.update = update + self._temp_build_dir = None # type: Optional[TempDirectory] # Set to True after successful installation self.install_succeeded = None # type: Optional[bool] - # UninstallPathSet of uninstalled distribution (for possible rollback) - self.uninstalled_pathset = None - self.options = options if options else {} + # Supplied options + self.install_options = install_options if install_options else [] + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} # Set to True after successful preparation of this requirement self.prepared = False - self.is_direct = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + # Set by the legacy resolver when the requirement has been downloaded + # TODO: This introduces a strong coupling between the resolver and the + # requirement (the coupling was previously between the resolver + # and the requirement set). This should be refactored to allow + # the requirement to decide for itself when it has been + # successfully downloaded - but that is more tricky to get right, + # se we are making the change in stages. + self.successfully_downloaded = False self.isolated = isolated self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment @@ -156,49 +214,45 @@ class InstallRequirement(object): self.use_pep517 = use_pep517 def __str__(self): + # type: () -> str if self.req: s = str(self.req) if self.link: - s += ' from %s' % redact_password_from_url(self.link.url) + s += ' from {}'.format(redact_auth_from_url(self.link.url)) elif self.link: - s = redact_password_from_url(self.link.url) + s = redact_auth_from_url(self.link.url) else: s = '<InstallRequirement>' if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) + s += ' in {}'.format(display_path(self.satisfied_by.location)) if self.comes_from: if isinstance(self.comes_from, six.string_types): - comes_from = self.comes_from + comes_from = self.comes_from # type: Optional[str] else: comes_from = self.comes_from.from_path() if comes_from: - s += ' (from %s)' % comes_from + s += ' (from {})'.format(comes_from) return s def __repr__(self): - return '<%s object: %s editable=%r>' % ( + # type: () -> str + return '<{} object: {} editable={!r}>'.format( self.__class__.__name__, str(self), self.editable) - def populate_link(self, finder, upgrade, require_hashes): - # type: (PackageFinder, bool, bool) -> None - """Ensure that if a link can be found for this, that it is found. - - Note that self.link may still be None - if Upgrade is False and the - requirement is already installed. - - If require_hashes is True, don't use the wheel cache, because cached - wheels, always built locally, have different hashes than the files - downloaded from the index server and thus throw false hash mismatches. - Furthermore, cached wheels at present have undeterministic contents due - to file modification times. + def format_debug(self): + # type: () -> str + """An un-tested helper for getting state, for debugging. """ - if self.link is None: - self.link = finder.find_requirement(self, upgrade) - if self._wheel_cache is not None and not require_hashes: - old_link = self.link - self.link = self._wheel_cache.get(self.link, self.name) - if old_link != self.link: - logger.debug('Using cached wheel link: %s', self.link) + attributes = vars(self) + names = sorted(attributes) + + state = ( + "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) + ) + return '<{name} object: {{{state}}}>'.format( + name=self.__class__.__name__, + state=", ".join(state), + ) # Things that are valid for all kinds of requirements? @property @@ -206,7 +260,7 @@ class InstallRequirement(object): # type: () -> Optional[str] if self.req is None: return None - return native_str(pkg_resources.safe_name(self.req.name)) + return six.ensure_str(pkg_resources.safe_name(self.req.name)) @property def specifier(self): @@ -226,6 +280,7 @@ class InstallRequirement(object): @property def installed_version(self): + # type: () -> Optional[str] return get_installed_version(self.name) def match_markers(self, extras_requested=None): @@ -250,7 +305,7 @@ class InstallRequirement(object): URL do not. """ - return bool(self.options.get('hashes', {})) + return bool(self.hash_options) def hashes(self, trust_internet=True): # type: (bool) -> Hashes @@ -268,7 +323,7 @@ class InstallRequirement(object): downloaded from the internet, as by populate_link() """ - good_hashes = self.options.get('hashes', {}).copy() + good_hashes = self.hash_options.copy() link = self.link if trust_internet else self.original_link if link and link.hash: good_hashes.setdefault(link.hash_name, []).append(link.hash) @@ -290,130 +345,127 @@ class InstallRequirement(object): s += '->' + comes_from return s - def build_location(self, build_dir): - # type: (str) -> Optional[str] + def ensure_build_location(self, build_dir, autodelete, parallel_builds): + # type: (str, bool, bool) -> str assert build_dir is not None - if self._temp_build_dir.path is not None: + if self._temp_build_dir is not None: + assert self._temp_build_dir.path return self._temp_build_dir.path if self.req is None: - # for requirement via a path to a directory: the name of the - # package is not available yet so we create a temp directory - # Once run_egg_info will have run, we'll be able - # to fix it via _correct_build_location # Some systems have /tmp as a symlink which confuses custom # builds (such as numpy). Thus, we ensure that the real path # is returned. - self._temp_build_dir.create() - self._ideal_build_dir = build_dir + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) return self._temp_build_dir.path - if self.editable: - name = self.name.lower() - else: - name = self.name + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name = canonicalize_name(self.name) + if parallel_builds: + dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex) + # FIXME: Is there a better place to create the build_dir? (hg and bzr # need this) if not os.path.exists(build_dir): logger.debug('Creating directory %s', build_dir) - _make_build_dir(build_dir) - return os.path.join(build_dir, name) - - def _correct_build_location(self): + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self): # type: () -> None - """Move self._temp_build_dir to self._ideal_build_dir/self.req.name + """Set requirement after generating metadata. + """ + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None - For some requirements (e.g. a path to a directory), the name of the - package is not available until we run egg_info, so the build_location - will return a temporary directory and store the _ideal_build_dir. + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join([ + self.metadata["Name"], + op, + self.metadata["Version"], + ]) + ) - This is only called by self.run_egg_info to fix the temporary build - directory. - """ - if self.source_dir is not None: + def warn_on_mismatching_name(self): + # type: () -> None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. return - assert self.req is not None - assert self._temp_build_dir.path - assert (self._ideal_build_dir is not None and - self._ideal_build_dir.path) # type: ignore - old_location = self._temp_build_dir.path - self._temp_build_dir.path = None - - new_location = self.build_location(self._ideal_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location)) - logger.debug( - 'Moving package %s from %s to new location %s', - self, display_path(old_location), display_path(new_location), + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + 'Generating metadata for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.name, metadata_name, self.name ) - shutil.move(old_location, new_location) - self._temp_build_dir.path = new_location - self._ideal_build_dir = None - self.source_dir = os.path.normpath(os.path.abspath(new_location)) - self._egg_info_path = None - - # Correct the metadata directory, if it exists - if self.metadata_directory: - old_meta = self.metadata_directory - rel = os.path.relpath(old_meta, start=old_location) - new_meta = os.path.join(new_location, rel) - new_meta = os.path.normpath(os.path.abspath(new_meta)) - self.metadata_directory = new_meta - - def remove_temporary_source(self): - # type: () -> None - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.source_dir and os.path.exists( - os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): - logger.debug('Removing source in %s', self.source_dir) - rmtree(self.source_dir) - self.source_dir = None - self._temp_build_dir.cleanup() - self.build_env.cleanup() + self.req = Requirement(metadata_name) def check_if_exists(self, use_user_site): - # type: (bool) -> bool + # type: (bool) -> None """Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately. + self.should_reinstall appropriately. """ if self.req is None: - return False + return + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + + # pkg_resources uses the canonical name to look up packages, but + # the name passed passed to get_distribution is not canonicalized + # so we have to explicitly convert it to a canonical name + no_marker.name = canonicalize_name(no_marker.name) try: - # get_distribution() will resolve the entire list of requirements - # anyway, and we've already determined that we need the requirement - # in question, so strip the marker so that we don't try to - # evaluate it. - no_marker = Requirement(str(self.req)) - no_marker.marker = None self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) - if self.editable and self.satisfied_by: - self.conflicts_with = self.satisfied_by - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - return True except pkg_resources.DistributionNotFound: - return False + return except pkg_resources.VersionConflict: - existing_dist = pkg_resources.get_distribution( + existing_dist = get_distribution( self.req.name ) if use_user_site: if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist + self.should_reinstall = True elif (running_under_virtualenv() and dist_in_site_packages(existing_dist)): raise InstallationError( "Will not install to the user site because it will " - "lack sys.path precedence to %s in %s" % - (existing_dist.project_name, existing_dist.location) + "lack sys.path precedence to {} in {}".format( + existing_dist.project_name, existing_dist.location) ) else: - self.conflicts_with = existing_dist - return True + self.should_reinstall = True + else: + if self.editable and self.satisfied_by: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None # Things valid for wheels @property @@ -423,42 +475,19 @@ class InstallRequirement(object): return False return self.link.is_wheel - def move_wheel_files( - self, - wheeldir, # type: str - root=None, # type: Optional[str] - home=None, # type: Optional[str] - prefix=None, # type: Optional[str] - warn_script_location=True, # type: bool - use_user_site=False, # type: bool - pycompile=True # type: bool - ): - # type: (...) -> None - move_wheel_files( - self.name, self.req, wheeldir, - user=use_user_site, - home=home, - root=root, - prefix=prefix, - pycompile=pycompile, - isolated=self.isolated, - warn_script_location=warn_script_location, - ) - # Things valid for sdists @property - def setup_py_dir(self): + def unpacked_source_directory(self): # type: () -> str return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or '') @property - def setup_py(self): + def setup_py_path(self): # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - setup_py = os.path.join(self.setup_py_dir, 'setup.py') + assert self.source_dir, "No source dir for {}".format(self) + setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') # Python2 __file__ should not be unicode if six.PY2 and isinstance(setup_py, six.text_type): @@ -467,11 +496,10 @@ class InstallRequirement(object): return setup_py @property - def pyproject_toml(self): + def pyproject_toml_path(self): # type: () -> str - assert self.source_dir, "No source dir for %s" % self - - return make_pyproject_path(self.setup_py_dir) + assert self.source_dir, "No source dir for {}".format(self) + return make_pyproject_path(self.unpacked_source_directory) def load_pyproject_toml(self): # type: () -> None @@ -482,37 +510,46 @@ class InstallRequirement(object): use_pep517 attribute can be used to determine whether we should follow the PEP 517 or legacy (setup.py) code path. """ - pep517_data = load_pyproject_toml( + pyproject_toml_data = load_pyproject_toml( self.use_pep517, - self.pyproject_toml, - self.setup_py, + self.pyproject_toml_path, + self.setup_py_path, str(self) ) - if pep517_data is None: + if pyproject_toml_data is None: self.use_pep517 = False - else: - self.use_pep517 = True - requires, backend, check = pep517_data - self.requirements_to_check = check - self.pyproject_requires = requires - self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend) - - # Use a custom function to call subprocesses - self.spin_message = "" - - def runner(cmd, cwd=None, extra_environ=None): - with open_spinner(self.spin_message) as spinner: - call_subprocess( - cmd, - cwd=cwd, - extra_environ=extra_environ, - show_stdout=False, - spinner=spinner - ) - self.spin_message = "" + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, backend, backend_path=backend_path, + ) + + def _generate_metadata(self): + # type: () -> str + """Invokes metadata generator functions, with the required arguments. + """ + if not self.use_pep517: + assert self.unpacked_source_directory + + return generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=self.name or "from {}".format(self.link) + ) + + assert self.pep517_backend is not None - self.pep517_backend._subprocess_runner = runner + return generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) def prepare_metadata(self): # type: () -> None @@ -524,145 +561,19 @@ class InstallRequirement(object): assert self.source_dir with indent_log(): - if self.use_pep517: - self.prepare_pep517_metadata() - else: - self.run_egg_info() + self.metadata_directory = self._generate_metadata() - if not self.req: - if isinstance(parse_version(self.metadata["Version"]), Version): - op = "==" - else: - op = "===" - self.req = Requirement( - "".join([ - self.metadata["Name"], - op, - self.metadata["Version"], - ]) - ) - self._correct_build_location() + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() else: - metadata_name = canonicalize_name(self.metadata["Name"]) - if canonicalize_name(self.req.name) != metadata_name: - logger.warning( - 'Generating metadata for package %s ' - 'produced metadata for project name %s. Fix your ' - '#egg=%s fragments.', - self.name, metadata_name, self.name - ) - self.req = Requirement(metadata_name) - - def prepare_pep517_metadata(self): - # type: () -> None - assert self.pep517_backend is not None + self.warn_on_mismatching_name() - metadata_dir = os.path.join( - self.setup_py_dir, - 'pip-wheel-metadata' - ) - ensure_dir(metadata_dir) - - with self.build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel, so we don't have to - # consider the possibility that this hook doesn't exist. - backend = self.pep517_backend - self.spin_message = "Preparing wheel metadata" - distinfo_dir = backend.prepare_metadata_for_build_wheel( - metadata_dir - ) - - self.metadata_directory = os.path.join(metadata_dir, distinfo_dir) - - def run_egg_info(self): - # type: () -> None - if self.name: - logger.debug( - 'Running setup.py (path:%s) egg_info for package %s', - self.setup_py, self.name, - ) - else: - logger.debug( - 'Running setup.py (path:%s) egg_info for package from %s', - self.setup_py, self.link, - ) - script = SETUPTOOLS_SHIM % self.setup_py - base_cmd = [sys.executable, '-c', script] - if self.isolated: - base_cmd += ["--no-user-cfg"] - egg_info_cmd = base_cmd + ['egg_info'] - # We can't put the .egg-info files at the root, because then the - # source code will be mistaken for an installed egg, causing - # problems - if self.editable: - egg_base_option = [] # type: List[str] - else: - egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') - ensure_dir(egg_info_dir) - egg_base_option = ['--egg-base', 'pip-egg-info'] - with self.build_env: - call_subprocess( - egg_info_cmd + egg_base_option, - cwd=self.setup_py_dir, - show_stdout=False, - command_desc='python setup.py egg_info') - - @property - def egg_info_path(self): - # type: () -> str - if self._egg_info_path is None: - if self.editable: - base = self.source_dir - else: - base = os.path.join(self.setup_py_dir, 'pip-egg-info') - filenames = os.listdir(base) - if self.editable: - filenames = [] - for root, dirs, files in os.walk(base): - for dir in vcs.dirnames: - if dir in dirs: - dirs.remove(dir) - # Iterate over a copy of ``dirs``, since mutating - # a list while iterating over it can cause trouble. - # (See https://github.com/pypa/pip/pull/462.) - for dir in list(dirs): - # Don't search in anything that looks like a virtualenv - # environment - if ( - os.path.lexists( - os.path.join(root, dir, 'bin', 'python') - ) or - os.path.exists( - os.path.join( - root, dir, 'Scripts', 'Python.exe' - ) - )): - dirs.remove(dir) - # Also don't search through tests - elif dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) - filenames = [f for f in filenames if f.endswith('.egg-info')] - - if not filenames: - raise InstallationError( - "Files/directories not found in %s" % base - ) - # if we have more than one match, we pick the toplevel one. This - # can easily be the case if there is a dist folder which contains - # an extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort( - key=lambda x: x.count(os.path.sep) + - (os.path.altsep and x.count(os.path.altsep) or 0) - ) - self._egg_info_path = os.path.join(base, filenames[0]) - return self._egg_info_path + self.assert_source_matches_version() @property def metadata(self): + # type: () -> Any if not hasattr(self, '_metadata'): self._metadata = get_metadata(self.get_dist()) @@ -670,27 +581,7 @@ class InstallRequirement(object): def get_dist(self): # type: () -> Distribution - """Return a pkg_resources.Distribution for this requirement""" - if self.metadata_directory: - base_dir, distinfo = os.path.split(self.metadata_directory) - metadata = pkg_resources.PathMetadata( - base_dir, self.metadata_directory - ) - dist_name = os.path.splitext(distinfo)[0] - typ = pkg_resources.DistInfoDistribution - else: - egg_info = self.egg_info_path.rstrip(os.path.sep) - base_dir = os.path.dirname(egg_info) - metadata = pkg_resources.PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - # https://github.com/python/mypy/issues/1174 - typ = pkg_resources.Distribution # type: ignore - - return typ( - base_dir, - project_name=dist_name, - metadata=metadata, - ) + return _get_dist(self.metadata_directory) def assert_source_matches_version(self): # type: () -> None @@ -711,8 +602,13 @@ class InstallRequirement(object): ) # For both source distributions and editables - def ensure_has_source_dir(self, parent_dir): - # type: (str) -> str + def ensure_has_source_dir( + self, + parent_dir, + autodelete=False, + parallel_builds=False, + ): + # type: (str, bool, bool) -> None """Ensure that a source_dir is set. This will create a temporary build dir if the name of the requirement @@ -723,45 +619,13 @@ class InstallRequirement(object): :return: self.source_dir """ if self.source_dir is None: - self.source_dir = self.build_location(parent_dir) - return self.source_dir + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) # For editable installations - def install_editable( - self, - install_options, # type: List[str] - global_options=(), # type: Sequence[str] - prefix=None # type: Optional[str] - ): - # type: (...) -> None - logger.info('Running setup.py develop for %s', self.name) - - if self.isolated: - global_options = list(global_options) + ["--no-user-cfg"] - - if prefix: - prefix_param = ['--prefix={}'.format(prefix)] - install_options = list(install_options) + prefix_param - - with indent_log(): - # FIXME: should we do --install-headers here too? - with self.build_env: - call_subprocess( - [ - sys.executable, - '-c', - SETUPTOOLS_SHIM % self.setup_py - ] + - list(global_options) + - ['develop', '--no-deps'] + - list(install_options), - - cwd=self.setup_py_dir, - show_stdout=False, - ) - - self.install_succeeded = True - def update_editable(self, obtain=True): # type: (bool) -> None if not self.link: @@ -776,26 +640,38 @@ class InstallRequirement(object): if self.link.scheme == 'file': # Static paths don't get updated return - assert '+' in self.link.url, "bad url: %r" % self.link.url - if not self.update: - return + assert '+' in self.link.url, \ + "bad url: {self.link.url!r}".format(**locals()) vc_type, url = self.link.url.split('+', 1) - backend = vcs.get_backend(vc_type) - if backend: - vcs_backend = backend(self.link.url) + vcs_backend = vcs.get_backend(vc_type) + if vcs_backend: + if not self.link.is_vcs: + reason = ( + "This form of VCS requirement is being deprecated: {}." + ).format( + self.link.url + ) + replacement = None + if self.link.url.startswith("git+git@"): + replacement = ( + "git+https://git@example.com/..., " + "git+ssh://git@example.com/..., " + "or the insecure git+git://git@example.com/..." + ) + deprecated(reason, replacement, gone_in="21.0", issue=7554) + hidden_url = hide_url(self.link.url) if obtain: - vcs_backend.obtain(self.source_dir) + vcs_backend.obtain(self.source_dir, url=hidden_url) else: - vcs_backend.export(self.source_dir) + vcs_backend.export(self.source_dir, url=hidden_url) else: assert 0, ( - 'Unexpected version control type (in %s): %s' - % (self.link, vc_type)) + 'Unexpected version control type (in {}): {}'.format( + self.link, vc_type)) # Top-level Actions - def uninstall(self, auto_confirm=False, verbose=False, - use_user_site=False): - # type: (bool, bool, bool) -> Optional[UninstallPathSet] + def uninstall(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> Optional[UninstallPathSet] """ Uninstall the distribution currently satisfying this requirement. @@ -808,41 +684,52 @@ class InstallRequirement(object): linked to global site-packages. """ - if not self.check_if_exists(use_user_site): + assert self.req + dist = get_distribution(self.req.name) + if not dist: logger.warning("Skipping %s as it is not installed.", self.name) return None - dist = self.satisfied_by or self.conflicts_with + logger.info('Found existing installation: %s', dist) uninstalled_pathset = UninstallPathSet.from_dist(dist) uninstalled_pathset.remove(auto_confirm, verbose) return uninstalled_pathset - def _clean_zip_name(self, name, prefix): # only used by archive. - assert name.startswith(prefix + os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix) - ) - name = name[len(prefix) + 1:] - name = name.replace(os.path.sep, '/') - return name - def _get_archive_name(self, path, parentdir, rootdir): # type: (str, str, str) -> str + + def _clean_zip_name(name, prefix): + # type: (str, str) -> str + assert name.startswith(prefix + os.path.sep), ( + "name {name!r} doesn't start with prefix {prefix!r}" + .format(**locals()) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + path = os.path.join(parentdir, path) - name = self._clean_zip_name(path, rootdir) + name = _clean_zip_name(path, rootdir) return self.name + '/' + name - # TODO: Investigate if this should be kept in InstallRequirement - # Seems to be used only when VCS + downloads def archive(self, build_dir): # type: (str) -> None + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ assert self.source_dir + create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) + archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % - display_path(archive_path), ('i', 'w', 'b', 'a')) + 'The file {} exists. (i)gnore, (w)ipe, ' + '(b)ackup, (a)bort '.format( + display_path(archive_path)), + ('i', 'w', 'b', 'a')) if response == 'i': create_archive = False elif response == 'w': @@ -858,32 +745,33 @@ class InstallRequirement(object): shutil.move(archive_path, dest_file) elif response == 'a': sys.exit(-1) - if create_archive: - zip = zipfile.ZipFile( - archive_path, 'w', zipfile.ZIP_DEFLATED, - allowZip64=True + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, + ) + with zip_output: + dir = os.path.normcase( + os.path.abspath(self.unpacked_source_directory) ) - dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') for dirname in dirnames: - dir_arcname = self._get_archive_name(dirname, - parentdir=dirpath, - rootdir=dir) + dir_arcname = self._get_archive_name( + dirname, parentdir=dirpath, rootdir=dir, + ) zipdir = zipfile.ZipInfo(dir_arcname + '/') zipdir.external_attr = 0x1ED << 16 # 0o755 - zip.writestr(zipdir, '') + zip_output.writestr(zipdir, '') for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - file_arcname = self._get_archive_name(filename, - parentdir=dirpath, - rootdir=dir) + file_arcname = self._get_archive_name( + filename, parentdir=dirpath, rootdir=dir, + ) filename = os.path.join(dirpath, filename) - zip.write(filename, file_arcname) - zip.close() - logger.info('Saved %s', display_path(archive_path)) + zip_output.write(filename, file_arcname) + + logger.info('Saved %s', display_path(archive_path)) def install( self, @@ -897,125 +785,118 @@ class InstallRequirement(object): pycompile=True # type: bool ): # type: (...) -> None + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + global_options = global_options if global_options is not None else [] if self.editable: - self.install_editable( - install_options, global_options, prefix=prefix, + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, ) + self.install_succeeded = True return - if self.is_wheel: - version = wheel.wheel_version(self.source_dir) - wheel.check_compatibility(version, self.name) - self.move_wheel_files( - self.source_dir, root=root, prefix=prefix, home=home, + if self.is_wheel: + assert self.local_file_path + direct_url = None + if self.original_link: + direct_url = direct_url_from_link( + self.original_link, + self.source_dir, + self.original_link_is_in_wheel_cache, + ) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, warn_script_location=warn_script_location, - use_user_site=use_user_site, pycompile=pycompile, + direct_url=direct_url, + requested=self.user_supplied, ) self.install_succeeded = True return + # TODO: Why don't we do this for editable installs? + # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. - global_options = list(global_options) + \ - self.options.get('global_options', []) - install_options = list(install_options) + \ - self.options.get('install_options', []) - - if self.isolated: - # https://github.com/python/mypy/issues/1174 - global_options = global_options + ["--no-user-cfg"] # type: ignore - - with TempDirectory(kind="record") as temp_dir: - record_filename = os.path.join(temp_dir.path, 'install-record.txt') - install_args = self.get_install_args( - global_options, record_filename, root, prefix, pycompile, + global_options = list(global_options) + self.global_options + install_options = list(install_options) + self.install_options + + try: + success = install_legacy( + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + req_name=self.name, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + req_description=str(self.req), ) - msg = 'Running setup.py install for %s' % (self.name,) - with open_spinner(msg) as spinner: - with indent_log(): - with self.build_env: - call_subprocess( - install_args + install_options, - cwd=self.setup_py_dir, - show_stdout=False, - spinner=spinner, - ) - - if not os.path.exists(record_filename): - logger.debug('Record file %s not found', record_filename) - return + except LegacyInstallFailure as exc: + self.install_succeeded = False + six.reraise(*exc.parent) + except Exception: self.install_succeeded = True + raise + + self.install_succeeded = success + + +def check_invalid_constraint_type(req): + # type: (InstallRequirement) -> str + + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.link: + problem = "Links are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement=( + "replacing the constraint with a requirement." + ), + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210 + ) - def prepend_root(path): - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - with open(record_filename) as f: - for line in f: - directory = os.path.dirname(line) - if directory.endswith('.egg-info'): - egg_info_dir = prepend_root(directory) - break - else: - logger.warning( - 'Could not find .egg-info directory in install record' - ' for %s', - self, - ) - # FIXME: put the record somewhere - # FIXME: should this be an error? - return - new_lines = [] - with open(record_filename) as f: - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append( - os.path.relpath(prepend_root(filename), egg_info_dir) - ) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') - with open(inst_files_path, 'w') as f: - f.write('\n'.join(new_lines) + '\n') - - def get_install_args( - self, - global_options, # type: Sequence[str] - record_filename, # type: str - root, # type: Optional[str] - prefix, # type: Optional[str] - pycompile # type: bool - ): - # type: (...) -> List[str] - install_args = [sys.executable, "-u"] - install_args.append('-c') - install_args.append(SETUPTOOLS_SHIM % self.setup_py) - install_args += list(global_options) + \ - ['install', '--record', record_filename] - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - if prefix is not None: - install_args += ['--prefix', prefix] - - if pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] - - return install_args + return problem diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_set.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_set.py index d1410e93..ab4b6f84 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_set.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_set.py @@ -3,14 +3,16 @@ from __future__ import absolute_import import logging from collections import OrderedDict +from pip._vendor.packaging.utils import canonicalize_name + from pip._internal.exceptions import InstallationError -from pip._internal.utils.logging import indent_log +from pip._internal.models.wheel import Wheel +from pip._internal.utils import compatibility_tags from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.wheel import Wheel if MYPY_CHECK_RUNNING: - from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import Dict, Iterable, List, Optional, Tuple + from pip._internal.req.req_install import InstallRequirement logger = logging.getLogger(__name__) @@ -18,33 +20,49 @@ logger = logging.getLogger(__name__) class RequirementSet(object): - def __init__(self, require_hashes=False, check_supported_wheels=True): - # type: (bool, bool) -> None + def __init__(self, check_supported_wheels=True): + # type: (bool) -> None """Create a RequirementSet. """ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 - self.require_hashes = require_hashes self.check_supported_wheels = check_supported_wheels - # Mapping of alias: real_name - self.requirement_aliases = {} # type: Dict[str, str] self.unnamed_requirements = [] # type: List[InstallRequirement] - self.successfully_downloaded = [] # type: List[InstallRequirement] - self.reqs_to_cleanup = [] # type: List[InstallRequirement] def __str__(self): - reqs = [req for req in self.requirements.values() - if not req.comes_from] - reqs.sort(key=lambda req: req.name.lower()) - return ' '.join([str(req.req) for req in reqs]) + # type: () -> str + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name), + ) + return ' '.join(str(req.req) for req in requirements) def __repr__(self): - reqs = [req for req in self.requirements.values()] - reqs.sort(key=lambda req: req.name.lower()) - reqs_str = ', '.join([str(req.req) for req in reqs]) - return ('<%s object; %d requirement(s): %s>' - % (self.__class__.__name__, len(reqs), reqs_str)) + # type: () -> str + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name), + ) + + format_string = '<{classname} object; {count} requirement(s): {reqs}>' + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=', '.join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req def add_requirement( self, @@ -67,13 +85,11 @@ class RequirementSet(object): the requirement is not applicable, or [install_req] if the requirement is applicable and has just been added. """ - name = install_req.name - # If the markers do not match, ignore this requirement. if not install_req.match_markers(extras_requested): logger.info( "Ignoring %s: markers '%s' don't match your environment", - name, install_req.markers, + install_req.name, install_req.markers, ) return [], None @@ -83,27 +99,27 @@ class RequirementSet(object): # single requirements file. if install_req.link and install_req.link.is_wheel: wheel = Wheel(install_req.link.filename) - if self.check_supported_wheels and not wheel.supported(): + tags = compatibility_tags.get_supported() + if (self.check_supported_wheels and not wheel.supported(tags)): raise InstallationError( - "%s is not a supported wheel on this platform." % - wheel.filename + "{} is not a supported wheel on this platform.".format( + wheel.filename) ) # This next bit is really a sanity check. - assert install_req.is_direct == (parent_req_name is None), ( - "a direct req shouldn't have a parent and also, " - "a non direct req should have a parent" + assert not install_req.user_supplied or parent_req_name is None, ( + "a user supplied req shouldn't have a parent" ) # Unnamed requirements are scanned again and the requirement won't be # added as a dependency until after scanning. - if not name: - # url or path requirement w/o an egg fragment - self.unnamed_requirements.append(install_req) + if not install_req.name: + self.add_unnamed_requirement(install_req) return [install_req], None try: - existing_req = self.get_requirement(name) + existing_req = self.get_requirement( + install_req.name) # type: Optional[InstallRequirement] except KeyError: existing_req = None @@ -116,18 +132,15 @@ class RequirementSet(object): ) if has_conflicting_requirement: raise InstallationError( - "Double requirement given: %s (already in %s, name=%r)" - % (install_req, existing_req, name) + "Double requirement given: {} (already in {}, name={!r})" + .format(install_req, existing_req, install_req.name) ) # When no existing requirement exists, add the requirement as a # dependency and it will be scanned again after. if not existing_req: - self.requirements[name] = install_req - # FIXME: what about other normalizations? E.g., _ vs. -? - if name.lower() != name: - self.requirement_aliases[name.lower()] = name - # We'd want to rescan this requirements later + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later return [install_req], install_req # Assume there's no need to scan, and that we've already @@ -143,15 +156,18 @@ class RequirementSet(object): ) ) if does_not_satisfy_constraint: - self.reqs_to_cleanup.append(install_req) raise InstallationError( - "Could not satisfy constraints for '%s': " + "Could not satisfy constraints for '{}': " "installation from path or url cannot be " - "constrained to a version" % name, + "constrained to a version".format(install_req.name) ) # If we're now installing a constraint, mark the existing # object for real installation. existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True existing_req.extras = tuple(sorted( set(existing_req.extras) | set(install_req.extras) )) @@ -163,35 +179,25 @@ class RequirementSet(object): # scanning again. return [existing_req], existing_req - def has_requirement(self, project_name): + def has_requirement(self, name): # type: (str) -> bool - name = project_name.lower() - if (name in self.requirements and - not self.requirements[name].constraint or - name in self.requirement_aliases and - not self.requirements[self.requirement_aliases[name]].constraint): - return True - return False + project_name = canonicalize_name(name) - @property - def has_requirements(self): - # type: () -> List[InstallRequirement] - return list(req for req in self.requirements.values() if not - req.constraint) or self.unnamed_requirements + return ( + project_name in self.requirements and + not self.requirements[project_name].constraint + ) - def get_requirement(self, project_name): + def get_requirement(self, name): # type: (str) -> InstallRequirement - for name in project_name, project_name.lower(): - if name in self.requirements: - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] - raise KeyError("No project with the name %r" % project_name) - - def cleanup_files(self): - # type: () -> None - """Clean up files, remove builds.""" - logger.debug('Cleaning up...') - with indent_log(): - for req in self.reqs_to_cleanup: - req.remove_temporary_source() + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError("No project with the name {name!r}".format(**locals())) + + @property + def all_requirements(self): + # type: () -> List[InstallRequirement] + return self.unnamed_requirements + list(self.requirements.values()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_tracker.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_tracker.py index 82e084a4..13fb2456 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_tracker.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_tracker.py @@ -6,36 +6,83 @@ import hashlib import logging import os +from pip._vendor import contextlib2 + from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Set, Iterator # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 + from types import TracebackType + from typing import Dict, Iterator, Optional, Set, Type, Union + from pip._internal.req.req_install import InstallRequirement + from pip._internal.models.link import Link logger = logging.getLogger(__name__) +@contextlib.contextmanager +def update_env_context_manager(**changes): + # type: (str) -> Iterator[None] + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values = {} # type: Dict[str, Union[object, str]] + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker(): + # type: () -> Iterator[RequirementTracker] + root = os.environ.get('PIP_REQ_TRACKER') + with contextlib2.ExitStack() as ctx: + if root is None: + root = ctx.enter_context( + TempDirectory(kind='req-tracker') + ).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + class RequirementTracker(object): - def __init__(self): - # type: () -> None - self._root = os.environ.get('PIP_REQ_TRACKER') - if self._root is None: - self._temp_dir = TempDirectory(delete=False, kind='req-tracker') - self._temp_dir.create() - self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path - logger.debug('Created requirements tracker %r', self._root) - else: - self._temp_dir = None - logger.debug('Re-using requirements tracker %r', self._root) + def __init__(self, root): + # type: (str) -> None + self._root = root self._entries = set() # type: Set[InstallRequirement] + logger.debug("Created build tracker: %s", self._root) def __enter__(self): + # type: () -> RequirementTracker + logger.debug("Entered build tracker: %s", self._root) return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None self.cleanup() def _entry_path(self, link): @@ -45,40 +92,55 @@ class RequirementTracker(object): def add(self, req): # type: (InstallRequirement) -> None - link = req.link - info = str(req) - entry_path = self._entry_path(link) + """Add an InstallRequirement to build tracking. + """ + + assert req.link + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. try: with open(entry_path) as fp: - # Error, these's already a build in progress. - raise LookupError('%s is already being built: %s' - % (link, fp.read())) + contents = fp.read() except IOError as e: + # if the error is anything other than "file does not exist", raise. if e.errno != errno.ENOENT: raise - assert req not in self._entries - with open(entry_path, 'w') as fp: - fp.write(info) - self._entries.add(req) - logger.debug('Added %s to build tracker %r', req, self._root) + else: + message = '{} is already being built: {}'.format( + req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, 'w') as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug('Added %s to build tracker %r', req, self._root) def remove(self, req): # type: (InstallRequirement) -> None - link = req.link + """Remove an InstallRequirement from build tracking. + """ + + assert req.link + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) self._entries.remove(req) - os.unlink(self._entry_path(link)) + logger.debug('Removed %s from build tracker %r', req, self._root) def cleanup(self): # type: () -> None for req in set(self._entries): self.remove(req) - remove = self._temp_dir is not None - if remove: - self._temp_dir.cleanup() - logger.debug('%s build tracker %r', - 'Removed' if remove else 'Cleaned', - self._root) + + logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager def track(self, req): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_uninstall.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_uninstall.py index c80959e4..69719d33 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_uninstall.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/req/req_uninstall.py @@ -14,15 +14,30 @@ from pip._internal.locations import bin_py, bin_user from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, - normalize_path, renames, rmtree, + FakeFile, + ask, + dist_in_usersite, + dist_is_local, + egg_link_path, + is_local, + normalize_path, + renames, + rmtree, ) from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple, + ) + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) def _script_names(dist, script_name, is_gui): + # type: (Distribution, str, bool) -> List[str] """Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names @@ -44,9 +59,11 @@ def _script_names(dist, script_name, is_gui): def _unique(fn): + # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]] @functools.wraps(fn) def unique(*args, **kw): - seen = set() + # type: (Any, Any) -> Iterator[Any] + seen = set() # type: Set[Any] for item in fn(*args, **kw): if item not in seen: seen.add(item) @@ -56,6 +73,7 @@ def _unique(fn): @_unique def uninstallation_paths(dist): + # type: (Distribution) -> Iterator[str] """ Yield all the uninstallation paths for dist based on RECORD-without-.py[co] @@ -78,13 +96,14 @@ def uninstallation_paths(dist): def compact(paths): + # type: (Iterable[str]) -> Set[str] """Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.""" sep = os.path.sep - short_paths = set() + short_paths = set() # type: Set[str] for path in sorted(paths, key=len): should_skip = any( path.startswith(shortpath.rstrip("*")) and @@ -97,6 +116,7 @@ def compact(paths): def compress_for_rename(paths): + # type: (Iterable[str]) -> Set[str] """Returns a set containing the paths that need to be renamed. This set may include directories when the original sequence of paths @@ -106,9 +126,10 @@ def compress_for_rename(paths): remaining = set(case_map) unchecked = sorted(set(os.path.split(p)[0] for p in case_map.values()), key=len) - wildcards = set() + wildcards = set() # type: Set[str] def norm_join(*a): + # type: (str) -> str return os.path.normcase(os.path.join(*a)) for root in unchecked: @@ -117,8 +138,8 @@ def compress_for_rename(paths): # This directory has already been handled. continue - all_files = set() - all_subdirs = set() + all_files = set() # type: Set[str] + all_subdirs = set() # type: Set[str] for dirname, subdirs, files in os.walk(root): all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) @@ -135,6 +156,7 @@ def compress_for_rename(paths): def compress_for_output_listing(paths): + # type: (Iterable[str]) -> Tuple[Set[str], Set[str]] """Returns a tuple of 2 sets of which paths to display to user The first set contains paths that would be deleted. Files of a package @@ -145,7 +167,7 @@ def compress_for_output_listing(paths): folders. """ - will_remove = list(paths) + will_remove = set(paths) will_skip = set() # Determine folders and files @@ -158,7 +180,8 @@ def compress_for_output_listing(paths): folders.add(os.path.dirname(path)) files.add(path) - _normcased_files = set(map(os.path.normcase, files)) + # probably this one https://github.com/python/mypy/issues/390 + _normcased_files = set(map(os.path.normcase, files)) # type: ignore folders = compact(folders) @@ -187,30 +210,31 @@ class StashedUninstallPathSet(object): """A set of file rename operations to stash files while tentatively uninstalling them.""" def __init__(self): + # type: () -> None # Mapping from source file root to [Adjacent]TempDirectory # for files under that directory. - self._save_dirs = {} + self._save_dirs = {} # type: Dict[str, TempDirectory] # (old path, new path) tuples for each move that may need # to be undone. - self._moves = [] + self._moves = [] # type: List[Tuple[str, str]] def _get_directory_stash(self, path): + # type: (str) -> str """Stashes a directory. Directories are stashed adjacent to their original location if possible, or else moved/copied into the user's temp dir.""" try: - save_dir = AdjacentTempDirectory(path) - save_dir.create() + save_dir = AdjacentTempDirectory(path) # type: TempDirectory except OSError: save_dir = TempDirectory(kind="uninstall") - save_dir.create() self._save_dirs[os.path.normcase(path)] = save_dir return save_dir.path def _get_file_stash(self, path): + # type: (str) -> str """Stashes a file. If no root has been provided, one will be created for the directory @@ -230,7 +254,6 @@ class StashedUninstallPathSet(object): # Did not find any suitable root head = os.path.dirname(path) save_dir = TempDirectory(kind='uninstall') - save_dir.create() self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) @@ -239,15 +262,18 @@ class StashedUninstallPathSet(object): return save_dir.path def stash(self, path): + # type: (str) -> str """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. """ - if os.path.isdir(path): + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: new_path = self._get_directory_stash(path) else: new_path = self._get_file_stash(path) self._moves.append((path, new_path)) - if os.path.isdir(path) and os.path.isdir(new_path): + if (path_is_dir and os.path.isdir(new_path)): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. @@ -258,6 +284,7 @@ class StashedUninstallPathSet(object): return new_path def commit(self): + # type: () -> None """Commits the uninstall by removing stashed files.""" for _, save_dir in self._save_dirs.items(): save_dir.cleanup() @@ -265,14 +292,15 @@ class StashedUninstallPathSet(object): self._save_dirs = {} def rollback(self): + # type: () -> None """Undoes the uninstall by moving stashed files back.""" for p in self._moves: - logging.info("Moving to %s\n from %s", *p) + logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) - if os.path.isfile(new_path): + if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) @@ -285,6 +313,7 @@ class StashedUninstallPathSet(object): @property def can_rollback(self): + # type: () -> bool return bool(self._moves) @@ -292,13 +321,15 @@ class UninstallPathSet(object): """A set of file paths to be removed in the uninstallation of a requirement.""" def __init__(self, dist): - self.paths = set() - self._refuse = set() - self.pth = {} + # type: (Distribution) -> None + self.paths = set() # type: Set[str] + self._refuse = set() # type: Set[str] + self.pth = {} # type: Dict[str, UninstallPthEntries] self.dist = dist self._moved_paths = StashedUninstallPathSet() def _permitted(self, path): + # type: (str) -> bool """ Return True if the given path is one we are permitted to remove/modify, False otherwise. @@ -307,6 +338,7 @@ class UninstallPathSet(object): return is_local(path) def add(self, path): + # type: (str) -> None head, tail = os.path.split(path) # we normalize the head to resolve parent directory symlinks, but not @@ -326,6 +358,7 @@ class UninstallPathSet(object): self.add(cache_from_source(path)) def add_pth(self, pth_file, entry): + # type: (str, str) -> None pth_file = normalize_path(pth_file) if self._permitted(pth_file): if pth_file not in self.pth: @@ -335,6 +368,7 @@ class UninstallPathSet(object): self._refuse.add(pth_file) def remove(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> None """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" @@ -366,10 +400,12 @@ class UninstallPathSet(object): logger.info('Successfully uninstalled %s', dist_name_version) def _allowed_to_proceed(self, verbose): + # type: (bool) -> bool """Display which files would be deleted and prompt for confirmation """ def _display(msg, paths): + # type: (str, Iterable[str]) -> None if not paths: return @@ -383,7 +419,7 @@ class UninstallPathSet(object): else: # In verbose mode, display all the files that are going to be # deleted. - will_remove = list(self.paths) + will_remove = set(self.paths) will_skip = set() _display('Would remove:', will_remove) @@ -395,24 +431,27 @@ class UninstallPathSet(object): return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' def rollback(self): + # type: () -> None """Rollback the changes previously made by remove().""" if not self._moved_paths.can_rollback: logger.error( "Can't roll back %s; was not uninstalled", self.dist.project_name, ) - return False + return logger.info('Rolling back uninstall of %s', self.dist.project_name) self._moved_paths.rollback() for pth in self.pth.values(): pth.rollback() def commit(self): + # type: () -> None """Remove temporary save dir: rollback will no longer be possible.""" self._moved_paths.commit() @classmethod def from_dist(cls, dist): + # type: (Distribution) -> UninstallPathSet dist_path = normalize_path(dist.location) if not dist_is_local(dist): logger.info( @@ -501,8 +540,9 @@ class UninstallPathSet(object): with open(develop_egg_link, 'r') as fh: link_pointer = os.path.normcase(fh.readline().strip()) assert (link_pointer == dist.location), ( - 'Egg-link %s does not match installed location of %s ' - '(at %s)' % (link_pointer, dist.project_name, dist.location) + 'Egg-link {} does not match installed location of {} ' + '(at {})'.format( + link_pointer, dist.project_name, dist.location) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), @@ -544,26 +584,37 @@ class UninstallPathSet(object): class UninstallPthEntries(object): def __init__(self, pth_file): - if not os.path.isfile(pth_file): - raise UninstallationError( - "Cannot remove entries from nonexistent file %s" % pth_file - ) + # type: (str) -> None self.file = pth_file - self.entries = set() - self._saved_lines = None + self.entries = set() # type: Set[str] + self._saved_lines = None # type: Optional[List[bytes]] def add(self, entry): + # type: (str) -> None entry = os.path.normcase(entry) # On Windows, os.path.normcase converts the entry to use # backslashes. This is correct for entries that describe absolute # paths outside of site-packages, but all the others use forward # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive. if WINDOWS and not os.path.splitdrive(entry)[0]: entry = entry.replace('\\', '/') self.entries.add(entry) def remove(self): + # type: () -> None logger.debug('Removing pth entries from %s:', self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning( + "Cannot remove entries from nonexistent file %s", self.file + ) + return with open(self.file, 'rb') as fh: # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() @@ -585,6 +636,7 @@ class UninstallPthEntries(object): fh.writelines(lines) def rollback(self): + # type: () -> bool if self._saved_lines is None: logger.error( 'Cannot roll back changes to %s, none were made', self.file diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 00000000..2fa118bd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Callable, List + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_set import RequirementSet + + InstallRequirementProvider = Callable[ + [str, InstallRequirement], InstallRequirement + ] + + +class BaseResolver(object): + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + raise NotImplementedError() + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + raise NotImplementedError() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolve.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py similarity index 60% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolve.py rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py index 33f572f1..c9b4c661 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolve.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -10,35 +10,102 @@ for sub-dependencies a. "first found, wins" (where the order is breadth first) """ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + import logging +import sys from collections import defaultdict from itertools import chain +from pip._vendor.packaging import specifiers + from pip._internal.exceptions import ( - BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, UnsupportedPythonVersion, ) -from pip._internal.req.constructors import install_req_from_req_string +from pip._internal.req.req_install import check_invalid_constraint_type +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver +from pip._internal.utils.compatibility_tags import get_supported from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import dist_in_usersite, ensure_dir -from pip._internal.utils.packaging import check_dist_requires_python +from pip._internal.utils.misc import dist_in_usersite, normalize_version_info +from pip._internal.utils.packaging import ( + check_requires_python, + get_requires_python, +) from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, DefaultDict, List, Set # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.index import PackageFinder # noqa: F401 - from pip._internal.req.req_set import RequirementSet # noqa: F401 - from pip._internal.operations.prepare import ( # noqa: F401 - DistAbstraction, RequirementPreparer - ) - from pip._internal.cache import WheelCache # noqa: F401 + from typing import DefaultDict, List, Optional, Set, Tuple + from pip._vendor import pkg_resources + + from pip._internal.cache import WheelCache + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.req.req_install import InstallRequirement + from pip._internal.resolution.base import InstallRequirementProvider + + DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] logger = logging.getLogger(__name__) -class Resolver(object): +def _check_dist_requires_python( + dist, # type: pkg_resources.Distribution + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> None + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + requires_python = get_requires_python(dist) + try: + is_compatible = check_requires_python( + requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", + dist.project_name, exc, + ) + return + + if is_compatible: + return + + version = '.'.join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + 'Ignoring failed Requires-Python check for package %r: ' + '%s not in %r', + dist.project_name, version, requires_python, + ) + return + + raise UnsupportedPythonVersion( + 'Package {!r} requires a different Python: {} not in {!r}'.format( + dist.project_name, version, requires_python, + )) + + +class Resolver(BaseResolver): """Resolves which packages need to be installed/uninstalled to perform \ the requested operation without breaking the requirements of any package. """ @@ -48,47 +115,45 @@ class Resolver(object): def __init__( self, preparer, # type: RequirementPreparer - session, # type: PipSession finder, # type: PackageFinder wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider use_user_site, # type: bool ignore_dependencies, # type: bool ignore_installed, # type: bool ignore_requires_python, # type: bool force_reinstall, # type: bool - isolated, # type: bool upgrade_strategy, # type: str - use_pep517=None # type: Optional[bool] + py_version_info=None, # type: Optional[Tuple[int, ...]] ): # type: (...) -> None super(Resolver, self).__init__() assert upgrade_strategy in self._allowed_strategies + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + self.preparer = preparer self.finder = finder - self.session = session - - # NOTE: This would eventually be replaced with a cache that can give - # information about both sdist and wheels transparently. self.wheel_cache = wheel_cache - # This is set in resolve - self.require_hashes = None # type: Optional[bool] - self.upgrade_strategy = upgrade_strategy self.force_reinstall = force_reinstall - self.isolated = isolated self.ignore_dependencies = ignore_dependencies self.ignore_installed = ignore_installed self.ignore_requires_python = ignore_requires_python self.use_user_site = use_user_site - self.use_pep517 = use_pep517 + self._make_install_req = make_install_req self._discovered_dependencies = \ - defaultdict(list) # type: DefaultDict[str, List] + defaultdict(list) # type: DiscoveredDependencies - def resolve(self, requirement_set): - # type: (RequirementSet) -> None + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet """Resolve what operations need to be done As a side-effect of this method, the packages (and their dependencies) @@ -99,37 +164,23 @@ class Resolver(object): possible to move the preparation to become a step separated from dependency resolution. """ - # make the wheelhouse - if self.preparer.wheel_download_dir: - ensure_dir(self.preparer.wheel_download_dir) - - # If any top-level requirement has a hash specified, enter - # hash-checking mode, which requires hashes from all. - root_reqs = ( - requirement_set.unnamed_requirements + - list(requirement_set.requirements.values()) - ) - self.require_hashes = ( - requirement_set.require_hashes or - any(req.has_hash_options for req in root_reqs) + requirement_set = RequirementSet( + check_supported_wheels=check_supported_wheels ) - - # Display where finder is looking for packages - locations = self.finder.get_formatted_locations() - if locations: - logger.info(locations) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + requirement_set.add_requirement(req) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because - # req.populate_link() needs to be called before we can make decisions + # _populate_link() needs to be called before we can make decisions # based on link type. discovered_reqs = [] # type: List[InstallRequirement] hash_errors = HashErrors() - for req in chain(root_reqs, discovered_reqs): + for req in chain(requirement_set.all_requirements, discovered_reqs): try: - discovered_reqs.extend( - self._resolve_one(requirement_set, req) - ) + discovered_reqs.extend(self._resolve_one(requirement_set, req)) except HashError as exc: exc.req = req hash_errors.append(exc) @@ -137,6 +188,8 @@ class Resolver(object): if hash_errors: raise hash_errors + return requirement_set + def _is_upgrade_allowed(self, req): # type: (InstallRequirement) -> bool if self.upgrade_strategy == "to-satisfy-only": @@ -145,7 +198,7 @@ class Resolver(object): return True else: assert self.upgrade_strategy == "only-if-needed" - return req.is_direct + return req.user_supplied or req.constraint def _set_req_to_reinstall(self, req): # type: (InstallRequirement) -> None @@ -155,10 +208,9 @@ class Resolver(object): # Don't uninstall the conflict if doing a user install and the # conflict is not a user install. if not self.use_user_site or dist_in_usersite(req.satisfied_by): - req.conflicts_with = req.satisfied_by + req.should_reinstall = True req.satisfied_by = None - # XXX: Stop passing requirement_set for options def _check_skip_installed(self, req_to_install): # type: (InstallRequirement) -> Optional[str] """Check if req_to_install should be skipped. @@ -212,19 +264,66 @@ class Resolver(object): self._set_req_to_reinstall(req_to_install) return None + def _find_requirement_link(self, req): + # type: (InstallRequirement) -> Optional[Link] + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or '<none given>' + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + u'The candidate selected for download or install is a ' + 'yanked version: {candidate}\n' + 'Reason for being yanked: {reason}' + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return link + + def _populate_link(self, req): + # type: (InstallRequirement) -> None + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug('Using cached wheel link: %s', cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.original_link_is_in_wheel_cache = True + req.link = cache_entry.link + def _get_abstract_dist_for(self, req): - # type: (InstallRequirement) -> DistAbstraction + # type: (InstallRequirement) -> AbstractDistribution """Takes a InstallRequirement and returns a single AbstractDist \ representing a prepared variant of the same. """ - assert self.require_hashes is not None, ( - "require_hashes should have been set in Resolver.resolve()" - ) - if req.editable: - return self.preparer.prepare_editable_requirement( - req, self.require_hashes, self.use_user_site, self.finder, - ) + return self.preparer.prepare_editable_requirement(req) # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. @@ -233,14 +332,12 @@ class Resolver(object): if req.satisfied_by: return self.preparer.prepare_installed_requirement( - req, self.require_hashes, skip_reason + req, skip_reason ) - upgrade_allowed = self._is_upgrade_allowed(req) - abstract_dist = self.preparer.prepare_linked_requirement( - req, self.session, self.finder, upgrade_allowed, - self.require_hashes - ) + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + abstract_dist = self.preparer.prepare_linked_requirement(req) # NOTE # The following portion is for determining if a certain package is @@ -273,7 +370,7 @@ class Resolver(object): def _resolve_one( self, requirement_set, # type: RequirementSet - req_to_install # type: InstallRequirement + req_to_install, # type: InstallRequirement ): # type: (...) -> List[InstallRequirement] """Prepare a single requirements file. @@ -288,30 +385,23 @@ class Resolver(object): req_to_install.prepared = True - # register tmp src for cleanup in case something goes wrong - requirement_set.reqs_to_cleanup.append(req_to_install) - abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies - dist = abstract_dist.dist() - try: - check_dist_requires_python(dist) - except UnsupportedPythonVersion as err: - if self.ignore_requires_python: - logger.warning(err.args[0]) - else: - raise + dist = abstract_dist.get_pkg_resources_distribution() + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): - sub_install_req = install_req_from_req_string( + sub_install_req = self._make_install_req( str(subreq), req_to_install, - isolated=self.isolated, - wheel_cache=self.wheel_cache, - use_pep517=self.use_pep517 ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( @@ -330,7 +420,9 @@ class Resolver(object): # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here - req_to_install.is_direct = True + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied requirement_set.add_requirement( req_to_install, parent_req_name=None, ) @@ -346,7 +438,7 @@ class Resolver(object): ) for missing in missing_requested: logger.warning( - '%s does not provide the extra \'%s\'', + "%s does not provide the extra '%s'", dist, missing ) @@ -360,7 +452,7 @@ class Resolver(object): # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. - requirement_set.successfully_downloaded.append(req_to_install) + req_to_install.successfully_downloaded = True return more_reqs diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 00000000..a155a110 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,82 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import FrozenSet, Iterable, Optional, Tuple + + from pip._vendor.packaging.version import _BaseVersion + + from pip._internal.models.link import Link + from pip._internal.req.req_install import InstallRequirement + + CandidateLookup = Tuple[ + Optional["Candidate"], + Optional[InstallRequirement], + ] + + +def format_name(project, extras): + # type: (str, FrozenSet[str]) -> str + if not extras: + return project + canonical_extras = sorted(canonicalize_name(e) for e in extras) + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +class Requirement(object): + @property + def name(self): + # type: () -> str + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return False + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + raise NotImplementedError("Subclass should override") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") + + +class Candidate(object): + @property + def name(self): + # type: () -> str + raise NotImplementedError("Override in subclass") + + @property + def version(self): + # type: () -> _BaseVersion + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self): + # type: () -> bool + raise NotImplementedError("Override in subclass") + + @property + def source_link(self): + # type: () -> Optional[Link] + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + raise NotImplementedError("Override in subclass") + + def format_for_error(self): + # type: () -> str + raise NotImplementedError("Subclass should override") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 00000000..c289bb58 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,594 @@ +import logging +import sys + +from pip._vendor.contextlib2 import suppress +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import HashError, MetadataInconsistent +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_is_editable, normalize_version_info +from pip._internal.utils.packaging import get_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .base import Candidate, format_name + +if MYPY_CHECK_RUNNING: + from typing import Any, FrozenSet, Iterable, Optional, Tuple, Union + + from pip._vendor.packaging.version import _BaseVersion + from pip._vendor.pkg_resources import Distribution + + from pip._internal.distributions import AbstractDistribution + from pip._internal.models.link import Link + + from .base import Requirement + from .factory import Factory + + BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", + ] + + +logger = logging.getLogger(__name__) + + +def make_install_req_from_link(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + ireq.original_link = template.original_link + ireq.link = link + return ireq + + +def make_install_req_from_editable(link, template): + # type: (Link, InstallRequirement) -> InstallRequirement + assert template.editable, "template not editable" + return install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + + +def make_install_req_from_dist(dist, template): + # type: (Distribution, InstallRequirement) -> InstallRequirement + project_name = canonicalize_name(dist.project_name) + if template.req: + line = str(template.req) + elif template.link: + line = "{} @ {}".format(project_name, template.link.url) + else: + line = "{}=={}".format(project_name, dist.parsed_version) + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options + ), + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + is_installed = False + + def __init__( + self, + link, # type: Link + source_link, # type: Link + ireq, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self._dist = None # type: Optional[Distribution] + self._prepared = False + + def __repr__(self): + # type: () -> str + return "{class_name}({link!r})".format( + class_name=self.__class__.__name__, + link=str(self._link), + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self._link)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self._link == other._link + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def source_link(self): + # type: () -> Optional[Link] + return self._source_link + + @property + def name(self): + # type: () -> str + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = canonicalize_name(self.dist.project_name) + return self._name + + @property + def version(self): + # type: () -> _BaseVersion + if self._version is None: + self._version = self.dist.parsed_version + return self._version + + def format_for_error(self): + # type: () -> str + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self): + # type: () -> None + """Check for consistency of project name and version of dist.""" + # TODO: (Longer term) Rather than abort, reject this candidate + # and backtrack. This would need resolvelib support. + dist = self._dist # type: Distribution + name = canonicalize_name(dist.project_name) + if self._name is not None and self._name != name: + raise MetadataInconsistent(self._ireq, "name", dist.project_name) + version = dist.parsed_version + if self._version is not None and self._version != version: + raise MetadataInconsistent(self._ireq, "version", dist.version) + + def _prepare(self): + # type: () -> None + if self._prepared: + return + try: + abstract_dist = self._prepare_abstract_distribution() + except HashError as e: + e.req = self._ireq + raise + + self._dist = abstract_dist.get_pkg_resources_distribution() + assert self._dist is not None, "Distribution already installed" + self._check_metadata_consistency() + self._prepared = True + + def _fetch_metadata(self): + # type: () -> None + """Fetch metadata, using lazy wheel if possible.""" + preparer = self._factory.preparer + use_lazy_wheel = self._factory.use_lazy_wheel + remote_wheel = self._link.is_wheel and not self._link.is_file + if use_lazy_wheel and remote_wheel and not preparer.require_hashes: + assert self._name is not None + logger.info('Collecting %s', self._ireq.req or self._ireq) + # If HTTPRangeRequestUnsupported is raised, fallback silently. + with indent_log(), suppress(HTTPRangeRequestUnsupported): + logger.info( + 'Obtaining dependency information from %s %s', + self._name, self._version, + ) + url = self._link.url.split('#', 1)[0] + session = preparer.downloader._session + self._dist = dist_from_wheel_url(self._name, url, session) + self._check_metadata_consistency() + if self._dist is None: + self._prepare() + + @property + def dist(self): + # type: () -> Distribution + if self._dist is None: + self._fetch_metadata() + return self._dist + + def _get_requires_python_specifier(self): + # type: () -> Optional[SpecifierSet] + requires_python = get_requires_python(self.dist) + if requires_python is None: + return None + try: + spec = SpecifierSet(requires_python) + except InvalidSpecifier as e: + logger.warning( + "Package %r has an invalid Requires-Python: %s", self.name, e, + ) + return None + return spec + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + python_dep = self._factory.make_requires_python_requirement( + self._get_requires_python_specifier(), + ) + if python_dep: + yield python_dep + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + self._prepare() + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + source_link = link + cache_entry = factory.get_wheel_cache_entry(link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + + if (cache_entry is not None and + cache_entry.persistent and + template.link is template.original_link): + ireq.original_link_is_in_wheel_cache = True + + super(LinkCandidate, self).__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + return self._factory.preparer.prepare_linked_requirement( + self._ireq, parallel_builds=True, + ) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link, # type: Link + template, # type: InstallRequirement + factory, # type: Factory + name=None, # type: Optional[str] + version=None, # type: Optional[_BaseVersion] + ): + # type: (...) -> None + super(EditableCandidate, self).__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_abstract_distribution(self): + # type: () -> AbstractDistribution + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist, # type: Distribution + template, # type: InstallRequirement + factory, # type: Factory + ): + # type: (...) -> None + self.dist = dist + self._ireq = make_install_req_from_dist(dist, template) + self._factory = factory + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __repr__(self): + # type: () -> str + return "{class_name}({distribution!r})".format( + class_name=self.__class__.__name__, + distribution=self.dist, + ) + + def __hash__(self): + # type: () -> int + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def name(self): + # type: () -> str + return canonicalize_name(self.dist.project_name) + + @property + def version(self): + # type: () -> _BaseVersion + return self.dist.parsed_version + + @property + def is_editable(self): + # type: () -> bool + return dist_is_editable(self.dist) + + def format_for_error(self): + # type: () -> str + return "{} {} (Installed)".format(self.name, self.version) + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + for r in self.dist.requires(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + def __init__( + self, + base, # type: BaseCandidate + extras, # type: FrozenSet[str] + ): + # type: (...) -> None + self.base = base + self.extras = extras + + def __repr__(self): + # type: () -> str + return "{class_name}(base={base!r}, extras={extras!r})".format( + class_name=self.__class__.__name__, + base=self.base, + extras=self.extras, + ) + + def __hash__(self): + # type: () -> int + return hash((self.base, self.extras)) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + # Needed for Python 2, which does not implement this by default + def __ne__(self, other): + # type: (Any) -> bool + return not self.__eq__(other) + + @property + def name(self): + # type: () -> str + """The normalised name of the project the candidate refers to""" + return format_name(self.base.name, self.extras) + + @property + def version(self): + # type: () -> _BaseVersion + return self.base.version + + def format_for_error(self): + # type: () -> str + return "{} [{}]".format( + self.base.format_for_error(), + ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self): + # type: () -> bool + return self.base.is_installed + + @property + def is_editable(self): + # type: () -> bool + return self.base.is_editable + + @property + def source_link(self): + # type: () -> Optional[Link] + return self.base.source_link + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + factory = self.base._factory + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.extras) + invalid_extras = self.extras.difference(self.base.dist.extras) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra + ) + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + + for r in self.base.dist.requires(valid_extras): + requirement = factory.make_requirement_from_spec( + str(r), self.base._ireq, valid_extras, + ) + if requirement: + yield requirement + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info): + # type: (Optional[Tuple[int, ...]]) -> None + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + @property + def name(self): + # type: () -> str + # Avoid conflicting with the PyPI package "Python". + return "<Python from Requires-Python>" + + @property + def version(self): + # type: () -> _BaseVersion + return self._version + + def format_for_error(self): + # type: () -> str + return "Python {}".format(self.version) + + def iter_dependencies(self): + # type: () -> Iterable[Optional[Requirement]] + return () + + def get_install_requirement(self): + # type: () -> Optional[InstallRequirement] + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 00000000..bd7e3efd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,459 @@ +import collections +import logging + +from pip._vendor import six +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + dist_in_site_packages, + dist_in_usersite, + get_installed_distributions, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .candidates import ( + AlreadyInstalledCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, +) +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, +) + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, + Dict, + Iterable, + List, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + ) + + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.version import _BaseVersion + from pip._vendor.pkg_resources import Distribution + from pip._vendor.resolvelib import ResolutionImpossible + + from pip._internal.cache import CacheEntry, WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.resolution.base import InstallRequirementProvider + + from .base import Candidate, Requirement + from .candidates import BaseCandidate + + C = TypeVar("C") + Cache = Dict[Link, C] + VersionCandidates = Dict[_BaseVersion, Candidate] + + +logger = logging.getLogger(__name__) + + +class Factory(object): + def __init__( + self, + finder, # type: PackageFinder + preparer, # type: RequirementPreparer + make_install_req, # type: InstallRequirementProvider + wheel_cache, # type: Optional[WheelCache] + use_user_site, # type: bool + force_reinstall, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + py_version_info=None, # type: Optional[Tuple[int, ...]] + lazy_wheel=False, # type: bool + ): + # type: (...) -> None + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + self.use_lazy_wheel = lazy_wheel + + self._link_candidate_cache = {} # type: Cache[LinkCandidate] + self._editable_candidate_cache = {} # type: Cache[EditableCandidate] + + if not ignore_installed: + self._installed_dists = { + canonicalize_name(dist.project_name): dist + for dist in get_installed_distributions() + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self): + # type: () -> bool + return self._force_reinstall + + def _make_candidate_from_dist( + self, + dist, # type: Distribution + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + ): + # type: (...) -> Candidate + base = AlreadyInstalledCandidate(dist, template, factory=self) + if extras: + return ExtrasCandidate(base, extras) + return base + + def _make_candidate_from_link( + self, + link, # type: Link + extras, # type: FrozenSet[str] + template, # type: InstallRequirement + name, # type: Optional[str] + version, # type: Optional[_BaseVersion] + ): + # type: (...) -> Candidate + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + if template.editable: + if link not in self._editable_candidate_cache: + self._editable_candidate_cache[link] = EditableCandidate( + link, template, factory=self, name=name, version=version, + ) + base = self._editable_candidate_cache[link] # type: BaseCandidate + else: + if link not in self._link_candidate_cache: + self._link_candidate_cache[link] = LinkCandidate( + link, template, factory=self, name=name, version=version, + ) + base = self._link_candidate_cache[link] + if extras: + return ExtrasCandidate(base, extras) + return base + + def _iter_found_candidates( + self, + ireqs, # type: Sequence[InstallRequirement] + specifier, # type: SpecifierSet + ): + # type: (...) -> Iterable[Candidate] + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + name = canonicalize_name(template.req.name) + + hashes = Hashes() + extras = frozenset() # type: FrozenSet[str] + for ireq in ireqs: + specifier &= ireq.req.specifier + hashes |= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + # We use this to ensure that we only yield a single candidate for + # each version (the finder's preferred one for that version). The + # requirement needs to return only one candidate per version, so we + # implement that logic here so that requirements using this helper + # don't all have to do the same thing later. + candidates = collections.OrderedDict() # type: VersionCandidates + + # Get the installed version, if it matches, unless the user + # specified `--force-reinstall`, when we want the version from + # the index instead. + installed_version = None + installed_candidate = None + if not self._force_reinstall and name in self._installed_dists: + installed_dist = self._installed_dists[name] + installed_version = installed_dist.parsed_version + if specifier.contains(installed_version, prereleases=True): + installed_candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + + found = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + for ican in found.iter_applicable(): + if ican.version == installed_version and installed_candidate: + candidate = installed_candidate + else: + candidate = self._make_candidate_from_link( + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + candidates[ican.version] = candidate + + # Yield the installed version even if it is not found on the index. + if installed_version and installed_candidate: + candidates[installed_version] = installed_candidate + + return six.itervalues(candidates) + + def find_candidates(self, requirements, constraint): + # type: (Sequence[Requirement], SpecifierSet) -> Iterable[Candidate] + explicit_candidates = set() # type: Set[Candidate] + ireqs = [] # type: List[InstallRequirement] + for req in requirements: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates(ireqs, constraint) + + if constraint: + name = explicit_candidates.pop().name + raise InstallationError( + "Could not satisfy constraints for {!r}: installation from " + "path or url cannot be constrained to a version".format(name) + ) + + return ( + c for c in explicit_candidates + if all(req.is_satisfied_by(c) for req in requirements) + ) + + def make_requirement_from_install_req(self, ireq, requested_extras): + # type: (InstallRequirement, Iterable[str]) -> Optional[Requirement] + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, ireq.markers, + ) + return None + if not ireq.link: + return SpecifierRequirement(ireq) + if ireq.link.is_wheel: + wheel = Wheel(ireq.link.filename) + if not wheel.supported(self._finder.target_python.get_tags()): + msg = "{} is not a supported wheel on this platform.".format( + wheel.filename, + ) + raise UnsupportedWheel(msg) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + return self.make_requirement_from_candidate(cand) + + def make_requirement_from_candidate(self, candidate): + # type: (Candidate) -> ExplicitRequirement + return ExplicitRequirement(candidate) + + def make_requirement_from_spec( + self, + specifier, # type: str + comes_from, # type: InstallRequirement + requested_extras=(), # type: Iterable[str] + ): + # type: (...) -> Optional[Requirement] + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self.make_requirement_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement(self, specifier): + # type: (Optional[SpecifierSet]) -> Optional[Requirement] + if self._ignore_requires_python or specifier is None: + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry(self, link, name): + # type: (Link, Optional[str]) -> Optional[CacheEntry] + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None or self.preparer.require_hashes: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate): + # type: (Candidate) -> Optional[Distribution] + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist_in_usersite(dist): + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist_in_site_packages(dist): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to {} in {}".format( + dist.project_name, dist.location, + ) + ) + return None + + def _report_requires_python_error( + self, + requirement, # type: RequiresPythonRequirement + template, # type: Candidate + ): + # type: (...) -> UnsupportedPythonVersion + message_format = ( + "Package {package!r} requires a different Python: " + "{version} not in {specifier!r}" + ) + message = message_format.format( + package=template.name, + version=self._python_candidate.version, + specifier=str(requirement.specifier), + ) + return UnsupportedPythonVersion(message) + + def get_installation_error(self, e): + # type: (ResolutionImpossible) -> InstallationError + + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + for cause in e.causes: + if isinstance(cause.requirement, RequiresPythonRequirement): + return self._report_requires_python_error( + cause.requirement, + cause.parent, + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if parent is None: + req_disp = str(req) + else: + req_disp = '{} (from {})'.format(req, parent.name) + logger.critical( + "Could not find a version that satisfies the requirement %s", + req_disp, + ) + return DistributionNotFound( + 'No matching distribution found for {}'.format(req) + ) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts): + # type: (List[str]) -> str + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def readable_form(cand): + # type: (Candidate) -> str + return "{} {}".format(cand.name, cand.version) + + def describe_trigger(parent): + # type: (Candidate) -> str + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return "{} {}".format(parent.name, parent.version) + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = [] + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.append(trigger) + + if triggers: + info = text_join(triggers) + else: + info = "the requested packages" + + msg = "Cannot install {} because these package versions " \ + "have conflicting dependencies.".format(info) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + for req, parent in e.causes: + msg = msg + "\n " + if parent: + msg = msg + "{} {} depends on ".format( + parent.name, + parent.version + ) + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + + msg = msg + "\n\n" + \ + "To fix this you could try to:\n" + \ + "1. loosen the range of package versions you've specified\n" + \ + "2. remove package versions to allow pip attempt to solve " + \ + "the dependency conflict\n" + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible For help visit: " + "https://pip.pypa.io/en/stable/user_guide/" + "#fixing-conflicting-dependencies" + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 00000000..72f16205 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,150 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.resolvelib.providers import AbstractProvider + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, + Dict, + Iterable, + Optional, + Sequence, + Set, + Tuple, + Union, + ) + + from .base import Requirement, Candidate + from .factory import Factory + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +class PipProvider(AbstractProvider): + def __init__( + self, + factory, # type: Factory + constraints, # type: Dict[str, SpecifierSet] + ignore_dependencies, # type: bool + upgrade_strategy, # type: str + user_requested, # type: Set[str] + ): + # type: (...) -> None + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self.user_requested = user_requested + + def _sort_matches(self, matches): + # type: (Iterable[Candidate]) -> Sequence[Candidate] + + # The requirement is responsible for returning a sequence of potential + # candidates, one per version. The provider handles the logic of + # deciding the order in which these candidates should be passed to + # the resolver. + + # The `matches` argument is a sequence of candidates, one per version, + # which are potential options to be installed. The requirement will + # have already sorted out whether to give us an already-installed + # candidate or a version from PyPI (i.e., it will deal with options + # like --force-reinstall and --ignore-installed). + + # We now work out the correct order. + # + # 1. If no other considerations apply, later versions take priority. + # 2. An already installed distribution is preferred over any other, + # unless the user has requested an upgrade. + # Upgrades are allowed when: + # * The --upgrade flag is set, and + # - The project was specified on the command line, or + # - The project is a dependency and the "eager" upgrade strategy + # was requested. + def _eligible_for_upgrade(name): + # type: (str) -> bool + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + return (name in self.user_requested) + return False + + def sort_key(c): + # type: (Candidate) -> int + """Return a sort key for the matches. + + The highest priority should be given to installed candidates that + are not eligible for upgrade. We use the integer value in the first + part of the key to sort these before other candidates. + + We only pull the installed candidate to the bottom (i.e. most + preferred), but otherwise keep the ordering returned by the + requirement. The requirement is responsible for returning a list + otherwise sorted for the resolver, taking account for versions + and binary preferences as specified by the user. + """ + if c.is_installed and not _eligible_for_upgrade(c.name): + return 1 + return 0 + + return sorted(matches, key=sort_key) + + def identify(self, dependency): + # type: (Union[Requirement, Candidate]) -> str + return dependency.name + + def get_preference( + self, + resolution, # type: Optional[Candidate] + candidates, # type: Sequence[Candidate] + information # type: Sequence[Tuple[Requirement, Candidate]] + ): + # type: (...) -> Any + # Use the "usual" value for now + return len(candidates) + + def find_matches(self, requirements): + # type: (Sequence[Requirement]) -> Iterable[Candidate] + if not requirements: + return [] + constraint = self._constraints.get( + requirements[0].name, SpecifierSet(), + ) + candidates = self._factory.find_candidates(requirements, constraint) + return reversed(self._sort_matches(candidates)) + + def is_satisfied_by(self, requirement, candidate): + # type: (Requirement, Candidate) -> bool + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate): + # type: (Candidate) -> Sequence[Requirement] + if self._ignore_dependencies: + return [] + return [r for r in candidate.iter_dependencies() if r is not None] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 00000000..bc1061f4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,137 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .base import Requirement, format_name + +if MYPY_CHECK_RUNNING: + from pip._vendor.packaging.specifiers import SpecifierSet + + from pip._internal.req.req_install import InstallRequirement + + from .base import Candidate, CandidateLookup + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate): + # type: (Candidate) -> None + self.candidate = candidate + + def __repr__(self): + # type: () -> str + return "{class_name}({candidate!r})".format( + class_name=self.__class__.__name__, + candidate=self.candidate, + ) + + @property + def name(self): + # type: () -> str + # No need to canonicalise - the candidate did this + return self.candidate.name + + def format_for_error(self): + # type: () -> str + return self.candidate.format_for_error() + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return self.candidate, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq): + # type: (InstallRequirement) -> None + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(ireq.extras) + + def __str__(self): + # type: () -> str + return str(self._ireq.req) + + def __repr__(self): + # type: () -> str + return "{class_name}({requirement!r})".format( + class_name=self.__class__.__name__, + requirement=str(self._ireq.req), + ) + + @property + def name(self): + # type: () -> str + canonical_name = canonicalize_name(self._ireq.req.name) + return format_name(canonical_name, self._extras) + + def format_for_error(self): + # type: () -> str + + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + return None, self._ireq + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self.name, \ + "Internal issue: Candidate is not for this requirement " \ + " {} vs {}".format(candidate.name, self.name) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata. + """ + def __init__(self, specifier, match): + # type: (SpecifierSet, Candidate) -> None + self.specifier = specifier + self._candidate = match + + def __repr__(self): + # type: () -> str + return "{class_name}({specifier!r})".format( + class_name=self.__class__.__name__, + specifier=str(self.specifier), + ) + + @property + def name(self): + # type: () -> str + return self._candidate.name + + def format_for_error(self): + # type: () -> str + return "Python " + str(self.specifier) + + def get_candidate_lookup(self): + # type: () -> CandidateLookup + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate): + # type: (Candidate) -> bool + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 00000000..43ea2486 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,258 @@ +import functools +import logging + +from pip._vendor import six +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver + +from pip._internal.exceptions import InstallationError +from pip._internal.req.req_install import check_invalid_constraint_type +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.utils.misc import dist_is_editable +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .factory import Factory + +if MYPY_CHECK_RUNNING: + from typing import Dict, List, Optional, Set, Tuple + + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.resolvelib.resolvers import Result + from pip._vendor.resolvelib.structs import Graph + + from pip._internal.cache import WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.req.req_install import InstallRequirement + from pip._internal.resolution.base import InstallRequirementProvider + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + wheel_cache, # type: Optional[WheelCache] + make_install_req, # type: InstallRequirementProvider + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + upgrade_strategy, # type: str + py_version_info=None, # type: Optional[Tuple[int, ...]] + lazy_wheel=False, # type: bool + ): + super(Resolver, self).__init__() + if lazy_wheel: + logger.warning( + 'pip is using lazily downloaded wheels using HTTP ' + 'range requests to obtain dependency information. ' + 'This experimental feature is enabled through ' + '--use-feature=fast-deps and it is not ready for production.' + ) + + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + lazy_wheel=lazy_wheel, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result = None # type: Optional[Result] + + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + + constraints = {} # type: Dict[str, SpecifierSet] + user_requested = set() # type: Set[str] + requirements = [] + for req in root_reqs: + if req.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(req) + if problem: + raise InstallationError(problem) + + name = canonicalize_name(req.name) + if name in constraints: + constraints[name] = constraints[name] & req.specifier + else: + constraints[name] = req.specifier + else: + if req.user_supplied and req.name: + user_requested.add(canonicalize_name(req.name)) + r = self.factory.make_requirement_from_install_req( + req, requested_extras=(), + ) + if r is not None: + requirements.append(r) + + provider = PipProvider( + factory=self.factory, + constraints=constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=user_requested, + ) + reporter = BaseReporter() + resolver = RLResolver(provider, reporter) + + try: + try_to_avoid_resolution_too_deep = 2000000 + self._result = resolver.resolve( + requirements, max_rounds=try_to_avoid_resolution_too_deep, + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error(e) + six.raise_from(error, e) + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for candidate in self._result.mapping.values(): + ireq = candidate.get_install_requirement() + if ireq is None: + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + # * There isn't, good -- no uninstalltion needed. + # * The --force-reinstall flag is set. Always reinstall. + # * The installation is different in version or editable-ness, so + # we need to uninstall it to install the new distribution. + # * The installed version is the same as the pending distribution. + # Skip this distrubiton altogether to save work. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + ireq.should_reinstall = False + elif self.factory.force_reinstall: + ireq.should_reinstall = True + elif installed_dist.parsed_version != candidate.version: + ireq.should_reinstall = True + elif dist_is_editable(installed_dist) != candidate.is_editable: + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + u'The candidate selected for download or install is a ' + u'yanked version: {name!r} candidate (version {version} ' + u'at {link})\nReason for being yanked: {reason}' + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or u'<none given>', + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + return req_set + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, while breaking any cycles in the graph at arbitrary + points. We make no guarantees about where the cycle would be broken, + other than they would be broken. + """ + assert self._result is not None, "must call resolve() first" + + graph = self._result.graph + weights = get_topological_weights(graph) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights(graph): + # type: (Graph) -> Dict[Optional[str], int] + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We take the length for the longest path to any node from root, ignoring any + paths that contain a single node twice (i.e. cycles). This is done through + a depth-first search through the graph, while keeping track of the path to + the node. + + Cycles in the graph result would result in node being revisited while also + being it's own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + """ + path = set() # type: Set[Optional[str]] + weights = {} # type: Dict[Optional[str], int] + + def visit(node): + # type: (Optional[str]) -> None + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity checks + assert weights[None] == 0 + assert len(weights) == len(graph) + + return weights + + +def _req_set_item_sorter( + item, # type: Tuple[str, InstallRequirement] + weights, # type: Dict[Optional[str], int] +): + # type: (...) -> Tuple[int, str] + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/outdated.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/self_outdated_check.py similarity index 50% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/outdated.py rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/self_outdated_check.py index 37c47a4a..fbd9dfd4 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pip/_internal/utils/outdated.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/self_outdated_check.py @@ -1,24 +1,36 @@ from __future__ import absolute_import import datetime +import hashlib import json import logging import os.path import sys -from pip._vendor import lockfile, pkg_resources from pip._vendor.packaging import version as packaging_version - -from pip._internal.index import PackageFinder -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.filesystem import check_path_owner -from pip._internal.utils.misc import ensure_dir, get_installed_version +from pip._vendor.six import ensure_binary + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.utils.filesystem import ( + adjacent_tmp_file, + check_path_owner, + replace, +) +from pip._internal.utils.misc import ( + ensure_dir, + get_distribution, + get_installed_version, +) +from pip._internal.utils.packaging import get_installer from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - import optparse # noqa: F401 - from typing import Any, Dict # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 + import optparse + from typing import Any, Dict, Text, Union + + from pip._internal.network.session import PipSession SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" @@ -27,6 +39,13 @@ SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) +def _get_statefile_name(key): + # type: (Union[str, Text]) -> str + key_bytes = ensure_binary(key) + name = hashlib.sha224(key_bytes).hexdigest() + return name + + class SelfCheckState(object): def __init__(self, cache_dir): # type: (str) -> None @@ -35,15 +54,22 @@ class SelfCheckState(object): # Try to load the existing state if cache_dir: - self.statefile_path = os.path.join(cache_dir, "selfcheck.json") + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) try: with open(self.statefile_path) as statefile: - self.state = json.load(statefile)[sys.prefix] + self.state = json.load(statefile) except (IOError, ValueError, KeyError): # Explicitly suppressing exceptions, since we don't want to # error out if the cache file is invalid. pass + @property + def key(self): + # type: () -> str + return sys.prefix + def save(self, pypi_version, current_time): # type: (str, datetime.datetime) -> None # If we do not have a path to cache in, don't bother saving. @@ -58,22 +84,26 @@ class SelfCheckState(object): # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) - # Attempt to write out our version check file - with lockfile.LockFile(self.statefile_path): - if os.path.exists(self.statefile_path): - with open(self.statefile_path) as statefile: - state = json.load(statefile) - else: - state = {} + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) - state[sys.prefix] = { - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), - "pypi_version": pypi_version, - } + with adjacent_tmp_file(self.statefile_path) as f: + f.write(ensure_binary(text)) - with open(self.statefile_path, "w") as statefile: - json.dump(state, statefile, sort_keys=True, - separators=(",", ":")) + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass def was_installed_by_pip(pkg): @@ -83,15 +113,13 @@ def was_installed_by_pip(pkg): This is used not to display the upgrade message when pip is in fact installed by system package manager, such as dnf on Fedora. """ - try: - dist = pkg_resources.get_distribution(pkg) - return (dist.has_metadata('INSTALLER') and - 'pip' in dist.get_metadata_lines('INSTALLER')) - except pkg_resources.DistributionNotFound: + dist = get_distribution(pkg) + if not dist: return False + return "pip" == get_installer(dist) -def pip_version_check(session, options): +def pip_self_version_check(session, options): # type: (PipSession, optparse.Values) -> None """Check for an update for pip. @@ -122,41 +150,54 @@ def pip_version_check(session, options): # Refresh the version if we need to or just see if we need to warn if pypi_version is None: # Lets use PackageFinder to see what the latest pip version is - finder = PackageFinder( - find_links=options.find_links, - index_urls=[options.index_url] + options.extra_index_urls, + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, allow_all_prereleases=False, # Explicitly set to False - trusted_hosts=options.trusted_hosts, - session=session, ) - all_candidates = finder.find_all_candidates("pip") - if not all_candidates: - return - pypi_version = str( - max(all_candidates, key=lambda c: c.version).version + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) # save that we've performed a check state.save(pypi_version, current_time) remote_version = packaging_version.parse(pypi_version) + local_version_is_older = ( + pip_version < remote_version and + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip') + ) + # Determine if our pypi_version is older - if (pip_version < remote_version and - pip_version.base_version != remote_version.base_version and - was_installed_by_pip('pip')): - # Advise "python -m pip" on Windows to avoid issues - # with overwriting pip.exe. - if WINDOWS: - pip_cmd = "python -m pip" - else: - pip_cmd = "pip" - logger.warning( - "You are using pip version %s, however version %s is " - "available.\nYou should consider upgrading via the " - "'%s install --upgrade pip' command.", - pip_version, pypi_version, pip_cmd - ) + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = "{} -m pip".format(sys.executable) + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) except Exception: logger.debug( "There was an error checking the latest version of pip", diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/appdirs.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/appdirs.py index 9af9fa7b..3989ed31 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/appdirs.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/appdirs.py @@ -1,270 +1,44 @@ """ -This code was taken from https://github.com/ActiveState/appdirs and modified -to suit our purposes. +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. """ + from __future__ import absolute_import import os -import sys -from pip._vendor.six import PY2, text_type +from pip._vendor import appdirs as _appdirs -from pip._internal.utils.compat import WINDOWS, expanduser from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - List, Union - ) + from typing import List def user_cache_dir(appname): # type: (str) -> str - r""" - Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - - Typical user cache directories are: - macOS: ~/Library/Caches/<AppName> - Unix: ~/.cache/<AppName> (XDG default) - Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go - in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the - non-roaming app data dir (the default returned by `user_data_dir`). Apps - typically put cache data somewhere *under* the given dir here. Some - examples: - ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache - ...\Acme\SuperApp\Cache\1.0 - - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - """ - if WINDOWS: - # Get the base path - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - - # When using Python 2, return paths as bytes on Windows like we do on - # other operating systems. See helper function docs for more details. - if PY2 and isinstance(path, text_type): - path = _win_path_to_bytes(path) - - # Add our app name and Cache directory to it - path = os.path.join(path, appname, "Cache") - elif sys.platform == "darwin": - # Get the base path - path = expanduser("~/Library/Caches") - - # Add our app name to it - path = os.path.join(path, appname) - else: - # Get the base path - path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) - - # Add our app name to it - path = os.path.join(path, appname) - - return path - - -def user_data_dir(appname, roaming=False): - # type: (str, bool) -> str - r""" - Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> - for a discussion of issues. - - Typical user data directories are: - macOS: ~/Library/Application Support/<AppName> - if it exists, else ~/.config/<AppName> - Unix: ~/.local/share/<AppName> # or in - $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\<username>\ ... - ...Application Data\<AppName> - Win XP (roaming): C:\Documents and Settings\<username>\Local ... - ...Settings\Application Data\<AppName> - Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> - Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/<AppName>". - """ - if WINDOWS: - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) - elif sys.platform == "darwin": - path = os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) if os.path.isdir(os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) - ) else os.path.join( - expanduser('~/.config/'), - appname, - ) - else: - path = os.path.join( - os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), - appname, - ) - - return path + return _appdirs.user_cache_dir(appname, appauthor=False) def user_config_dir(appname, roaming=True): # type: (str, bool) -> str - """Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default True) can be set False to not use the - Windows roaming appdata directory. That means that for users on a - Windows network setup for roaming profiles, this user data will be - sync'd on login. See - <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> - for a discussion of issues. - - Typical user data directories are: - macOS: same as user_data_dir - Unix: ~/.config/<AppName> - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/<AppName>". - """ - if WINDOWS: - path = user_data_dir(appname, roaming=roaming) - elif sys.platform == "darwin": - path = user_data_dir(appname) - else: - path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) - path = os.path.join(path, appname) - + path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + if _appdirs.system == "darwin" and not os.path.isdir(path): + path = os.path.expanduser('~/.config/') + if appname: + path = os.path.join(path, appname) return path -# for the discussion regarding site_config_dirs locations +# for the discussion regarding site_config_dir locations # see <https://github.com/pypa/pip/issues/1733> def site_config_dirs(appname): # type: (str) -> List[str] - r"""Return a list of potential user-shared config dirs for this application. - - "appname" is the name of application. - - Typical user config directories are: - macOS: /Library/Application Support/<AppName>/ - Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in - $XDG_CONFIG_DIRS - Win XP: C:\Documents and Settings\All Users\Application ... - ...Data\<AppName>\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory - on Vista.) - Win 7: Hidden, but writeable on Win 7: - C:\ProgramData\<AppName>\ - """ - if WINDOWS: - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - pathlist = [os.path.join(path, appname)] - elif sys.platform == 'darwin': - pathlist = [os.path.join('/Library/Application Support', appname)] - else: - # try looking in $XDG_CONFIG_DIRS - xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - if xdg_config_dirs: - pathlist = [ - os.path.join(expanduser(x), appname) - for x in xdg_config_dirs.split(os.pathsep) - ] - else: - pathlist = [] - + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if _appdirs.system not in ["win32", "darwin"]: # always look in /etc directly as well - pathlist.append('/etc') - - return pathlist - - -# -- Windows support functions -- - -def _get_win_folder_from_registry(csidl_name): - # type: (str) -> str - """ - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - directory, _type = _winreg.QueryValueEx(key, shell_folder_name) - return directory - - -def _get_win_folder_with_ctypes(csidl_name): - # type: (str) -> str - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # <http://bugs.activestate.com/show_bug.cgi?id=85099>. - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - - -if WINDOWS: - try: - import ctypes - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -def _win_path_to_bytes(path): - """Encode Windows paths to bytes. Only used on Python 2. - - Motivation is to be consistent with other operating systems where paths - are also returned as bytes. This avoids problems mixing bytes and Unicode - elsewhere in the codebase. For more details and discussion see - <https://github.com/pypa/pip/issues/3463>. - - If encoding using ASCII and MBCS fails, return the original Unicode path. - """ - for encoding in ('ASCII', 'MBCS'): - try: - return path.encode(encoding) - except (UnicodeEncodeError, LookupError): - pass - return path + return dirval.split(os.pathsep) + ['/etc'] + return [dirval] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compat.py index 2d8b3bf0..89c5169a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compat.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compat.py @@ -1,5 +1,9 @@ """Stuff that differs in different Python versions and platform distributions.""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import, division import codecs @@ -9,12 +13,12 @@ import os import shutil import sys -from pip._vendor.six import text_type +from pip._vendor.six import PY2, text_type from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Tuple, Text # noqa: F401 + from typing import Optional, Text, Tuple, Union try: import ipaddress @@ -28,18 +32,14 @@ except ImportError: __all__ = [ - "ipaddress", "uses_pycache", "console_to_str", "native_str", + "ipaddress", "uses_pycache", "console_to_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", - "get_extension_suffixes", ] logger = logging.getLogger(__name__) -if sys.version_info >= (3, 4): - uses_pycache = True - from importlib.util import cache_from_source -else: +if PY2: import imp try: @@ -49,41 +49,66 @@ else: cache_from_source = None uses_pycache = cache_from_source is not None +else: + uses_pycache = True + from importlib.util import cache_from_source -if sys.version_info >= (3, 5): - backslashreplace_decode = "backslashreplace" -else: - # In version 3.4 and older, backslashreplace exists +if PY2: + # In Python 2.7, backslashreplace exists # but does not support use for decoding. # We implement our own replace handler for this # situation, so that we can consistently use # backslash replacement for all versions. def backslashreplace_decode_fn(err): raw_bytes = (err.object[i] for i in range(err.start, err.end)) - if sys.version_info[0] == 2: - # Python 2 gave us characters - convert to numeric bytes - raw_bytes = (ord(b) for b in raw_bytes) - return u"".join(u"\\x%x" % c for c in raw_bytes), err.end + # Python 2 gave us characters - convert to numeric bytes + raw_bytes = (ord(b) for b in raw_bytes) + return u"".join(map(u"\\x{:x}".format, raw_bytes)), err.end codecs.register_error( "backslashreplace_decode", backslashreplace_decode_fn, ) backslashreplace_decode = "backslashreplace_decode" +else: + backslashreplace_decode = "backslashreplace" -def console_to_str(data): - # type: (bytes) -> Text - """Return a string, safe for output, of subprocess output. +def has_tls(): + # type: () -> bool + try: + import _ssl # noqa: F401 # ignore unused + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + return IS_PYOPENSSL - We assume the data is in the locale preferred encoding. - If it won't decode properly, we warn the user but decode as - best we can. - We also ensure that the output can be safely written to - standard output without encoding errors. +def str_to_display(data, desc=None): + # type: (Union[bytes, Text], Optional[str]) -> Text """ + For display or logging purposes, convert a bytes object (or text) to + text (e.g. unicode in Python 2) safe for output. + :param desc: An optional phrase describing the input data, for use in + the log message if a warning is logged. Defaults to "Bytes object". + + This function should never error out and so can take a best effort + approach. It is okay to be lossy if needed since the return value is + just for display. + + We assume the data is in the locale preferred encoding. If it won't + decode properly, we warn the user but decode as best we can. + + We also ensure that the output can be safely written to standard output + without encoding errors. + """ + if isinstance(data, text_type): + return data + + # Otherwise, data is a bytes object (str in Python 2). # First, get the encoding we assume. This is the preferred # encoding for the locale, unless that is not found, or # it is ASCII, in which case assume UTF-8 @@ -97,7 +122,8 @@ def console_to_str(data): decoded_data = data.decode(encoding) except UnicodeDecodeError: logger.warning( - "Subprocess output does not appear to be encoded as %s", + '%s does not appear to be encoded as %s', + desc or 'Bytes object', encoding, ) decoded_data = data.decode(encoding, errors=backslashreplace_decode) @@ -127,20 +153,11 @@ def console_to_str(data): return decoded_data -if sys.version_info >= (3,): - def native_str(s, replace=False): - # type: (str, bool) -> str - if isinstance(s, bytes): - return s.decode('utf-8', 'replace' if replace else 'strict') - return s - -else: - def native_str(s, replace=False): - # type: (str, bool) -> str - # Replace is ignored -- unicode to UTF-8 can't fail - if isinstance(s, text_type): - return s.encode('utf-8') - return s +def console_to_str(data): + # type: (bytes) -> Text + """Return a string, safe for output, of subprocess output. + """ + return str_to_display(data, desc='Subprocess output') def get_path_uid(path): @@ -168,23 +185,12 @@ def get_path_uid(path): else: # raise OSError for parity with os.O_NOFOLLOW above raise OSError( - "%s is a symlink; Will not return uid for symlinks" % path + "{} is a symlink; Will not return uid for symlinks".format( + path) ) return file_uid -if sys.version_info >= (3, 4): - from importlib.machinery import EXTENSION_SUFFIXES - - def get_extension_suffixes(): - return EXTENSION_SUFFIXES -else: - from imp import get_suffixes - - def get_extension_suffixes(): - return [suffix[0] for suffix in get_suffixes()] - - def expanduser(path): # type: (str) -> str """ @@ -253,12 +259,13 @@ else: return cr cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) - os.close(fd) - except Exception: - pass + if sys.platform != "win32": + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass if not cr: cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(cr[1]), int(cr[0]) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compatibility_tags.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 00000000..4f21874e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,166 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +from __future__ import absolute_import + +import re + +from pip._vendor.packaging.tags import ( + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import PythonVersion + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def version_info_to_nodot(version_info): + # type: (Tuple[int, ...]) -> str + # Only use up to the first two numbers. + return ''.join(map(str, version_info[:2])) + + +def _mac_platforms(arch): + # type: (str) -> List[str] + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + '{}_{}'.format(name, arch[len('macosx_'):]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch): + # type: (str) -> List[str] + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch_prefix == 'manylinux2014': + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {'i686', 'x86_64'}: + arches.append('manylinux2010' + arch_sep + arch_suffix) + arches.append('manylinux1' + arch_sep + arch_suffix) + elif arch_prefix == 'manylinux2010': + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append('manylinux1' + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch): + # type: (str) -> List[str] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch.startswith('macosx'): + arches = _mac_platforms(arch) + elif arch_prefix in ['manylinux2014', 'manylinux2010']: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _get_python_version(version): + # type: (str) -> PythonVersion + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter(implementation=None, version=None): + # type: (Optional[str], Optional[str]) -> str + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return "{}{}".format(implementation, version) + + +def get_supported( + version=None, # type: Optional[str] + platform=None, # type: Optional[str] + impl=None, # type: Optional[str] + abi=None # type: Optional[str] +): + # type: (...) -> List[Tag] + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify the exact platform you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abi: specify the exact abi you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported = [] # type: List[Tag] + + python_version = None # type: Optional[PythonVersion] + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + abis = None # type: Optional[List[str]] + if abi is not None: + abis = [abi] + + platforms = None # type: Optional[List[str]] + if platform is not None: + platforms = _get_custom_platforms(platform) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/datetime.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 00000000..4d0503c2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,14 @@ +"""For when pip wants to check the date or time. +""" + +from __future__ import absolute_import + +import datetime + + +def today_is_later_than(year, month, day): + # type: (int, int, int) -> bool + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/deprecation.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/deprecation.py index 0beaf74b..2f20cfd4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/deprecation.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/deprecation.py @@ -1,6 +1,10 @@ """ A module that implements tooling to enable easy warnings about deprecations. """ + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -12,7 +16,10 @@ from pip import __version__ as current_version from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Optional # noqa: F401 + from typing import Any, Optional + + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " class PipDeprecationWarning(Warning): @@ -75,16 +82,23 @@ def deprecated(reason, replacement, gone_in, issue=None): """ # Construct a nice message. - # This is purposely eagerly formatted as we want it to appear as if someone - # typed this entire message out. - message = "DEPRECATION: " + reason - if replacement is not None: - message += " A possible replacement is {}.".format(replacement) - if issue is not None: - url = "https://github.com/pypa/pip/issues/" + str(issue) - message += " You can find discussion regarding this at {}.".format(url) + # This is eagerly formatted as we want it to get logged as if someone + # typed this entire message out. + sentences = [ + (reason, DEPRECATION_MSG_PREFIX + "{}"), + (gone_in, "pip {} will remove support for this functionality."), + (replacement, "A possible replacement is {}."), + (issue, ( + "You can find discussion regarding this at " + "https://github.com/pypa/pip/issues/{}." + )), + ] + message = " ".join( + template.format(val) for val, template in sentences if val is not None + ) # Raise as an error if it has to be removed. if gone_in is not None and parse(current_version) >= parse(gone_in): raise PipDeprecationWarning(message) + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 00000000..f1fe209e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,130 @@ +import logging + +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + ArchiveInfo, + DirectUrl, + DirectUrlValidationError, + DirInfo, + VcsInfo, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs import vcs + +try: + from json import JSONDecodeError +except ImportError: + # PY2 + JSONDecodeError = ValueError # type: ignore + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._internal.models.link import Link + + from pip._vendor.pkg_resources import Distribution + +logger = logging.getLogger(__name__) + + +def direct_url_as_pep440_direct_reference(direct_url, name): + # type: (DirectUrl, str) -> str + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + # pip should never reach this point for editables, since + # pip freeze inspects the editable project location to produce + # the requirement string + assert not direct_url.info.editable + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False): + # type: (Link, Optional[str], bool) -> DirectUrl + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = ( + vcs_backend.get_url_rev_and_auth(link.url_without_fragment) + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = "{}={}".format(hash_name, link.hash) + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) + + +def dist_get_direct_url(dist): + # type: (Distribution) -> Optional[DirectUrl] + """Obtain a DirectUrl from a pkg_resource.Distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + if not dist.has_metadata(DIRECT_URL_METADATA_NAME): + return None + try: + return DirectUrl.from_json(dist.get_metadata(DIRECT_URL_METADATA_NAME)) + except ( + DirectUrlValidationError, + JSONDecodeError, + UnicodeDecodeError + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + dist.project_name, + e, + ) + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/distutils_args.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 00000000..e38e402d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,48 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, List + + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args): + # type: (List[str]) -> Dict[str, str] + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/encoding.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/encoding.py index d36defad..5b83d61b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/encoding.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/encoding.py @@ -6,16 +6,16 @@ import sys from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Tuple, Text # noqa: F401 + from typing import List, Tuple, Text BOMS = [ - (codecs.BOM_UTF8, 'utf8'), - (codecs.BOM_UTF16, 'utf16'), - (codecs.BOM_UTF16_BE, 'utf16-be'), - (codecs.BOM_UTF16_LE, 'utf16-le'), - (codecs.BOM_UTF32, 'utf32'), - (codecs.BOM_UTF32_BE, 'utf32-be'), - (codecs.BOM_UTF32_LE, 'utf32-le'), + (codecs.BOM_UTF8, 'utf-8'), + (codecs.BOM_UTF16, 'utf-16'), + (codecs.BOM_UTF16_BE, 'utf-16-be'), + (codecs.BOM_UTF16_LE, 'utf-16-le'), + (codecs.BOM_UTF32, 'utf-32'), + (codecs.BOM_UTF32_BE, 'utf-32-be'), + (codecs.BOM_UTF32_LE, 'utf-32-le'), ] # type: List[Tuple[bytes, Text]] ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') @@ -32,7 +32,9 @@ def auto_decode(data): # Lets check the first two lines as in PEP263 for line in data.split(b'\n')[:2]: if line[0:1] == b'#' and ENCODING_RE.search(line): - encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode('ascii') return data.decode(encoding) return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/entrypoints.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 00000000..befd01c8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,31 @@ +import sys + +from pip._internal.cli.main import main +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def _wrapper(args=None): + # type: (Optional[List[str]]) -> int + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filesystem.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filesystem.py index 1e6b0338..303243fd 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filesystem.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filesystem.py @@ -1,16 +1,42 @@ +import errno +import fnmatch import os import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile + +# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import. +from pip._vendor.retrying import retry # type: ignore +from pip._vendor.six import PY2 from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast + +if MYPY_CHECK_RUNNING: + from typing import Any, BinaryIO, Iterator, List, Union + + class NamedTemporaryFileResult(BinaryIO): + @property + def file(self): + # type: () -> BinaryIO + pass def check_path_owner(path): # type: (str) -> bool # If we don't have a way to check the effective uid of this process, then # we'll just assume that we own the directory. - if not hasattr(os, "geteuid"): + if sys.platform == "win32" or not hasattr(os, "geteuid"): return True + assert os.path.isabs(path) + previous = None while path != previous: if os.path.lexists(path): @@ -28,3 +54,171 @@ def check_path_owner(path): else: previous, path = path, os.path.dirname(path) return False # assume we don't own the path + + +def copy2_fixed(src, dest): + # type: (str, str) -> None + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except (OSError, IOError): + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError( + "`{f}` is a socket".format(**locals())) + + raise + + +def is_socket(path): + # type: (str) -> bool + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path, **kwargs): + # type: (str, **Any) -> Iterator[NamedTemporaryFileResult] + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix='.tmp', + **kwargs + ) as f: + result = cast('NamedTemporaryFileResult', f) + try: + yield result + finally: + result.file.flush() + os.fsync(result.file.fileno()) + + +_replace_retry = retry(stop_max_delay=1000, wait_fixed=250) + +if PY2: + @_replace_retry + def replace(src, dest): + # type: (str, str) -> None + try: + os.rename(src, dest) + except OSError: + os.remove(dest) + os.rename(src, dest) + +else: + replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path): + # type: (str) -> bool + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == 'posix': + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path): + # type: (str) -> bool + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = 'accesstest_deleteme_fishfingers_custard_' + alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789' + for _ in range(10): + name = basename + ''.join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + # Python 2 doesn't support FileExistsError and PermissionError. + except OSError as e: + # exception FileExistsError + if e.errno == errno.EEXIST: + continue + # exception PermissionError + if e.errno == errno.EPERM or e.errno == errno.EACCES: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + raise + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise EnvironmentError( + 'Unexpected condition testing for writable directory' + ) + + +def find_files(path, pattern): + # type: (str, str) -> List[str] + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result = [] # type: List[str] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path): + # type: (str) -> Union[int, float] + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path): + # type: (str) -> str + return format_size(file_size(path)) + + +def directory_size(path): + # type: (str) -> Union[int, float] + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path): + # type: (str) -> str + return format_size(directory_size(path)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filetypes.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 00000000..daa0ca77 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,16 @@ +"""Filetype information. +""" +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Tuple + +WHEEL_EXTENSION = '.whl' +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') # type: Tuple[str, ...] +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', + '.tar.lz', '.tar.lzma') # type: Tuple[str, ...] +ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) # type: Tuple[str, ...] +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') # type: Tuple[str, ...] +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/glibc.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/glibc.py index 8a51f695..36104244 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/glibc.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/glibc.py @@ -1,18 +1,49 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + from __future__ import absolute_import -import ctypes -import re -import warnings +import os +import sys from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, Tuple # noqa: F401 + from typing import Optional, Tuple def glibc_version_string(): # type: () -> Optional[str] "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr(): + # type: () -> Optional[str] + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes(): + # type: () -> Optional[str] + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the @@ -36,32 +67,6 @@ def glibc_version_string(): return version_str -# Separated out from have_compatible_glibc for easier unit testing -def check_glibc_version(version_str, required_major, minimum_minor): - # type: (str, int, int) -> bool - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) - if not m: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return (int(m.group("major")) == required_major and - int(m.group("minor")) >= minimum_minor) - - -def have_compatible_glibc(required_major, minimum_minor): - # type: (int, int) -> bool - version_str = glibc_version_string() # type: Optional[str] - if version_str is None: - return False - return check_glibc_version(version_str, required_major, minimum_minor) - - # platform.libc_ver regularly returns completely nonsensical glibc # versions. E.g. on my computer, platform says: # diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/hashes.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/hashes.py index c6df7a18..d1b062fe 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/hashes.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/hashes.py @@ -5,20 +5,22 @@ import hashlib from pip._vendor.six import iteritems, iterkeys, itervalues from pip._internal.exceptions import ( - HashMismatch, HashMissing, InstallationError, + HashMismatch, + HashMissing, + InstallationError, ) from pip._internal.utils.misc import read_chunks from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 + from typing import ( Dict, List, BinaryIO, NoReturn, Iterator ) from pip._vendor.six import PY3 if PY3: - from hashlib import _Hash # noqa: F401 + from hashlib import _Hash else: - from hashlib import _hash as _Hash # noqa: F401 + from hashlib import _hash as _Hash # The recommended hash algo of the moment. Change this whenever the state of @@ -44,6 +46,32 @@ class Hashes(object): """ self._allowed = {} if hashes is None else hashes + def __or__(self, other): + # type: (Hashes) -> Hashes + if not isinstance(other, Hashes): + return NotImplemented + new = self._allowed.copy() + for alg, values in iteritems(other._allowed): + try: + new[alg] += values + except KeyError: + new[alg] = values + return Hashes(new) + + @property + def digest_count(self): + # type: () -> int + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed( + self, + hash_name, # type: str + hex_digest, # type: str + ): + # type: (...) -> bool + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + def check_against_chunks(self, chunks): # type: (Iterator[bytes]) -> None """Check good hashes against ones built from iterable of chunks of @@ -57,7 +85,9 @@ class Hashes(object): try: gots[hash_name] = hashlib.new(hash_name) except (ValueError, TypeError): - raise InstallationError('Unknown hash name: %s' % hash_name) + raise InstallationError( + 'Unknown hash name: {}'.format(hash_name) + ) for chunk in chunks: for hash in itervalues(gots): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/inject_securetransport.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 00000000..5b93b1d6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,36 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport(): + # type: () -> None + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/logging.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/logging.py index 579d6962..9a017cf7 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/logging.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/logging.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import contextlib @@ -6,10 +9,12 @@ import logging import logging.handlers import os import sys +from logging import Filter, getLogger from pip._vendor.six import PY2 from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX from pip._internal.utils.misc import ensure_dir try: @@ -19,15 +24,35 @@ except ImportError: try: - from pip._vendor import colorama + # Use "import as" and set colorama in the else clause to avoid mypy + # errors and get the following correct revealed type for colorama: + # `Union[_importlib_modulespec.ModuleType, None]` + # Otherwise, we get an error like the following in the except block: + # > Incompatible types in assignment (expression has type "None", + # variable has type Module) + # TODO: eliminate the need to use "import as" once mypy addresses some + # of its issues with conditional imports. Here is an umbrella issue: + # https://github.com/python/mypy/issues/1297 + from pip._vendor import colorama as _colorama # Lots of different errors can come from this, including SystemError and # ImportError. except Exception: colorama = None +else: + # Import Fore explicitly rather than accessing below as colorama.Fore + # to avoid the following error running mypy: + # > Module has no attribute "Fore" + # TODO: eliminate the need to import Fore once mypy addresses some of its + # issues with conditional imports. This particular case could be an + # instance of the following issue (but also see the umbrella issue above): + # https://github.com/python/mypy/issues/3500 + from pip._vendor.colorama import Fore + + colorama = _colorama _log_state = threading.local() -_log_state.indentation = 0 +subprocess_logger = getLogger('pip.subprocessor') class BrokenStdoutLoggingError(Exception): @@ -78,6 +103,8 @@ def indent_log(num=2): A context manager which will cause the log output to be indented for any log messages emitted inside it. """ + # For thread-safety + _log_state.indentation = get_indentation() _log_state.indentation += num try: yield @@ -90,9 +117,10 @@ def get_indentation(): class IndentingFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): """ - A logging.Formatter obeying containing indent_log contexts. + A logging.Formatter that obeys the indent_log() context manager. :param add_timestamp: A bool indicating output lines should be prefixed with their record's timestamp. @@ -100,15 +128,36 @@ class IndentingFormatter(logging.Formatter): self.add_timestamp = kwargs.pop("add_timestamp", False) super(IndentingFormatter, self).__init__(*args, **kwargs) + def get_message_start(self, formatted, levelno): + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return '' + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return '' + if levelno < logging.ERROR: + return 'WARNING: ' + + return 'ERROR: ' + def format(self, record): """ - Calls the standard formatter, but will indent all of the log messages - by our current indentation level. + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. """ formatted = super(IndentingFormatter, self).format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + prefix = '' if self.add_timestamp: - prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ") + # TODO: Use Formatter.default_time_format after dropping PY2. + t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S") + prefix = '{t},{record.msecs:03.0f} '.format(**locals()) prefix += " " * get_indentation() formatted = "".join([ prefix + line @@ -129,8 +178,8 @@ class ColorizedStreamHandler(logging.StreamHandler): if colorama: COLORS = [ # This needs to be in order from highest logging level to lowest. - (logging.ERROR, _color_wrap(colorama.Fore.RED)), - (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + (logging.ERROR, _color_wrap(Fore.RED)), + (logging.WARNING, _color_wrap(Fore.YELLOW)), ] else: COLORS = [] @@ -205,7 +254,7 @@ class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): return logging.handlers.RotatingFileHandler._open(self) -class MaxLevelFilter(logging.Filter): +class MaxLevelFilter(Filter): def __init__(self, level): self.level = level @@ -214,6 +263,18 @@ class MaxLevelFilter(logging.Filter): return record.levelno < self.level +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record): + # The base Filter class allows only records from a logger (or its + # children). + return not super(ExcludeLoggerFilter, self).filter(record) + + def setup_logging(verbosity, no_color, user_log_file): """Configures and sets up all of the logging @@ -257,6 +318,9 @@ def setup_logging(verbosity, no_color, user_log_file): "stream": "pip._internal.utils.logging.ColorizedStreamHandler", "file": "pip._internal.utils.logging.BetterRotatingFileHandler", } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) logging.config.dictConfig({ "version": 1, @@ -266,6 +330,14 @@ def setup_logging(verbosity, no_color, user_log_file): "()": "pip._internal.utils.logging.MaxLevelFilter", "level": logging.WARNING, }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, }, "formatters": { "indent": { @@ -284,7 +356,7 @@ def setup_logging(verbosity, no_color, user_log_file): "class": handler_classes["stream"], "no_color": no_color, "stream": log_streams["stdout"], - "filters": ["exclude_warnings"], + "filters": ["exclude_subprocess", "exclude_warnings"], "formatter": "indent", }, "console_errors": { @@ -292,6 +364,17 @@ def setup_logging(verbosity, no_color, user_log_file): "class": handler_classes["stream"], "no_color": no_color, "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["restrict_to_subprocess"], "formatter": "indent", }, "user_log": { @@ -304,9 +387,7 @@ def setup_logging(verbosity, no_color, user_log_file): }, "root": { "level": root_level, - "handlers": ["console", "console_errors"] + ( - ["user_log"] if include_user_log else [] - ), + "handlers": handlers, }, "loggers": { "pip._vendor": { diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/misc.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/misc.py index 84605ee3..24a74556 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/misc.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/misc.py @@ -1,41 +1,51 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import contextlib import errno +import getpass +import hashlib import io -import locale -# we have a submodule named 'logging' which would shadow this if we used the -# regular name: -import logging as std_logging +import logging import os import posixpath -import re import shutil import stat -import subprocess import sys -import tarfile -import zipfile from collections import deque +from itertools import tee from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import. from pip._vendor.retrying import retry # type: ignore -from pip._vendor.six import PY2 -from pip._vendor.six.moves import input +from pip._vendor.six import PY2, text_type +from pip._vendor.six.moves import filter, filterfalse, input, map, zip_longest from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote -from pip._internal.exceptions import CommandError, InstallationError +from pip import __version__ +from pip._internal.exceptions import CommandError from pip._internal.locations import ( - running_under_virtualenv, site_packages, user_site, virtualenv_no_global, - write_delete_marker_file, + get_major_minor_version, + site_packages, + user_site, ) from pip._internal.utils.compat import ( - WINDOWS, console_to_str, expanduser, stdlib_pkgs, + WINDOWS, + expanduser, + stdlib_pkgs, + str_to_display, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, ) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING if PY2: from io import BytesIO as StringIO @@ -43,51 +53,58 @@ else: from io import StringIO if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Optional, Tuple, Iterable, List, Match, Union, Any, Mapping, Text, - AnyStr, Container + from typing import ( + Any, AnyStr, Callable, Container, Iterable, Iterator, List, Optional, + Text, Tuple, TypeVar, Union, ) - from pip._vendor.pkg_resources import Distribution # noqa: F401 - from pip._internal.models.link import Link # noqa: F401 - from pip._internal.utils.ui import SpinnerInterface # noqa: F401 + from pip._vendor.pkg_resources import Distribution + + VersionInfo = Tuple[int, int, int] + T = TypeVar("T") __all__ = ['rmtree', 'display_path', 'backup_dir', 'ask', 'splitext', 'format_size', 'is_installable_dir', - 'is_svn_page', 'file_contents', - 'split_leading_dir', 'has_leading_dir', 'normalize_path', 'renames', 'get_prog', - 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', 'captured_stdout', 'ensure_dir', - 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION', 'get_installed_version', 'remove_auth_from_url'] -logger = std_logging.getLogger(__name__) +logger = logging.getLogger(__name__) + + +def get_pip_version(): + # type: () -> str + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return ( + 'pip {} from {} (python {})'.format( + __version__, pip_pkg_dir, get_major_minor_version(), + ) + ) + + +def normalize_version_info(py_version_info): + # type: (Tuple[int, ...]) -> Tuple[int, int, int] + """ + Convert a tuple of ints representing a Python version to one of length + three. -WHEEL_EXTENSION = '.whl' -BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') -XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') -ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) -TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') -ARCHIVE_EXTENSIONS = ( - ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) -SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. -try: - import bz2 # noqa - SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS -except ImportError: - logger.debug('bz2 module is not available') + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] -try: - # Only for Python 3.3+ - import lzma # noqa - SUPPORTED_EXTENSIONS += XZ_EXTENSIONS -except ImportError: - logger.debug('lzma module is not available') + return cast('VersionInfo', py_version_info) def ensure_dir(path): @@ -96,7 +113,8 @@ def ensure_dir(path): try: os.makedirs(path) except OSError as e: - if e.errno != errno.EEXIST: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: raise @@ -105,7 +123,7 @@ def get_prog(): try: prog = os.path.basename(sys.argv[0]) if prog in ('__main__.py', '-c'): - return "%s -m pip" % sys.executable + return "{} -m pip".format(sys.executable) else: return prog except (AttributeError, TypeError, IndexError): @@ -116,7 +134,7 @@ def get_prog(): # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): - # type: (str, bool) -> None + # type: (Text, bool) -> None shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) @@ -125,8 +143,13 @@ def rmtree_errorhandler(func, path, exc_info): """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the read-only attribute, and hopefully continue without problems.""" - # if file type currently read only - if os.stat(path).st_mode & stat.S_IREAD: + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except (IOError, OSError): + # it's equivalent to os.path.exists + return + + if has_attr_readonly: # convert to read/write os.chmod(path, stat.S_IWRITE) # use the original function to repeat the operation @@ -136,6 +159,40 @@ def rmtree_errorhandler(func, path, exc_info): raise +def path_to_display(path): + # type: (Optional[Union[str, Text]]) -> Optional[Text] + """ + Convert a bytes (or text) path to text (unicode in Python 2) for display + and logging purposes. + + This function should never error out. Also, this function is mainly needed + for Python 2 since in Python 3 str paths are already text. + """ + if path is None: + return None + if isinstance(path, text_type): + return path + # Otherwise, path is a bytes object (str in Python 2). + try: + display_path = path.decode(sys.getfilesystemencoding(), 'strict') + except UnicodeDecodeError: + # Include the full bytes to make troubleshooting easier, even though + # it may not be very human readable. + if PY2: + # Convert the bytes to a readable str representation using + # repr(), and then convert the str to unicode. + # Also, we add the prefix "b" to the repr() return value both + # to make the Python 2 output look like the Python 3 output, and + # to signal to the user that this is a bytes representation. + display_path = str_to_display('b{!r}'.format(path)) + else: + # Silence the "F821 undefined name 'ascii'" flake8 error since + # in Python 3 ascii() is a built-in. + display_path = ascii(path) # noqa: F821 + + return display_path + + def display_path(path): # type: (Union[str, Text]) -> str """Gives the display value for a given path, making it relative to cwd @@ -169,36 +226,71 @@ def ask_path_exists(message, options): return ask(message, options) +def _check_no_input(message): + # type: (str) -> None + """Raise an error if no input is allowed.""" + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: {}'.format( + message) + ) + + def ask(message, options): # type: (str, Iterable[str]) -> str """Ask the message interactively, with the given possible responses""" while 1: - if os.environ.get('PIP_NO_INPUT'): - raise Exception( - 'No input was expected ($PIP_NO_INPUT set); question: %s' % - message - ) + _check_no_input(message) response = input(message) response = response.strip().lower() if response not in options: print( - 'Your response (%r) was not one of the expected responses: ' - '%s' % (response, ', '.join(options)) + 'Your response ({!r}) was not one of the expected responses: ' + '{}'.format(response, ', '.join(options)) ) else: return response +def ask_input(message): + # type: (str) -> str + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message): + # type: (str) -> str + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + def format_size(bytes): # type: (float) -> str if bytes > 1000 * 1000: - return '%.1fMB' % (bytes / 1000.0 / 1000) + return '{:.1f} MB'.format(bytes / 1000.0 / 1000) elif bytes > 10 * 1000: - return '%ikB' % (bytes / 1000) + return '{} kB'.format(int(bytes / 1000)) elif bytes > 1000: - return '%.1fkB' % (bytes / 1000.0) + return '{:.1f} kB'.format(bytes / 1000.0) else: - return '%ibytes' % bytes + return '{} bytes'.format(int(bytes)) + + +def tabulate(rows): + # type: (Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]] + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue='')] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes def is_installable_dir(path): @@ -216,21 +308,6 @@ def is_installable_dir(path): return False -def is_svn_page(html): - # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]] - """ - Returns true if the page appears to be the index page of an svn repository - """ - return (re.search(r'<title>[^<]*Revision \d+:', html) and - re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) - - -def file_contents(filename): - # type: (str) -> Text - with open(filename, 'rb') as fp: - return fp.read().decode('utf-8') - - def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: @@ -240,34 +317,6 @@ def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): yield chunk -def split_leading_dir(path): - # type: (Union[str, Text]) -> List[Union[str, Text]] - path = path.lstrip('/').lstrip('\\') - if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or - '\\' not in path): - return path.split('/', 1) - elif '\\' in path: - return path.split('\\', 1) - else: - return [path, ''] - - -def has_leading_dir(paths): - # type: (Iterable[Union[str, Text]]) -> bool - """Returns true if all the paths have the same leading path name - (i.e., everything is in one subdirectory in an archive)""" - common_prefix = None - for path in paths: - prefix, rest = split_leading_dir(path) - if not prefix: - return False - elif common_prefix is None: - common_prefix = prefix - elif prefix != common_prefix: - return False - return True - - def normalize_path(path, resolve_symlinks=True): # type: (str, bool) -> str """ @@ -317,10 +366,12 @@ def is_local(path): If we're not in a virtualenv, all paths are considered "local." + Caution: this function assumes the head of path has been normalized + with normalize_path. """ if not running_under_virtualenv(): return True - return normalize_path(path).startswith(normalize_path(sys.prefix)) + return path.startswith(normalize_path(sys.prefix)) def dist_is_local(dist): @@ -340,8 +391,7 @@ def dist_in_usersite(dist): """ Return True if given Distribution is installed in user site. """ - norm_path = normalize_path(dist_location(dist)) - return norm_path.startswith(normalize_path(user_site)) + return dist_location(dist).startswith(normalize_path(user_site)) def dist_in_site_packages(dist): @@ -350,9 +400,7 @@ def dist_in_site_packages(dist): Return True if given Distribution is installed in sysconfig.get_python_lib(). """ - return normalize_path( - dist_location(dist) - ).startswith(normalize_path(site_packages)) + return dist_location(dist).startswith(normalize_path(site_packages)) def dist_is_editable(dist): @@ -367,12 +415,15 @@ def dist_is_editable(dist): return False -def get_installed_distributions(local_only=True, - skip=stdlib_pkgs, - include_editables=True, - editables_only=False, - user_only=False): - # type: (bool, Container[str], bool, bool, bool) -> List[Distribution] +def get_installed_distributions( + local_only=True, # type: bool + skip=stdlib_pkgs, # type: Container[str] + include_editables=True, # type: bool + editables_only=False, # type: bool + user_only=False, # type: bool + paths=None # type: Optional[List[str]] +): + # type: (...) -> List[Distribution] """ Return a list of installed Distribution objects. @@ -389,7 +440,14 @@ def get_installed_distributions(local_only=True, If ``user_only`` is True , only report installations in the user site directory. + If ``paths`` is set, only report the distributions present at the + specified list of locations. """ + if paths: + working_set = pkg_resources.WorkingSet(paths) + else: + working_set = pkg_resources.working_set + if local_only: local_test = dist_is_local else: @@ -416,8 +474,7 @@ def get_installed_distributions(local_only=True, def user_test(d): return True - # because of pkg_resources vendoring, mypy cannot find stub in typeshed - return [d for d in pkg_resources.working_set # type: ignore + return [d for d in working_set if local_test(d) and d.key not in skip and editable_test(d) and @@ -426,6 +483,40 @@ def get_installed_distributions(local_only=True, ] +def search_distribution(req_name): + + # Canonicalize the name before searching in the list of + # installed distributions and also while creating the package + # dictionary to get the Distribution object + req_name = canonicalize_name(req_name) + packages = get_installed_distributions(skip=()) + pkg_dict = {canonicalize_name(p.key): p for p in packages} + return pkg_dict.get(req_name) + + +def get_distribution(req_name): + """Given a requirement name, return the installed Distribution object""" + + # Search the distribution by looking through the working set + dist = search_distribution(req_name) + + # If distribution could not be found, call working_set.require + # to update the working set, and try to find the distribution + # again. + # This might happen for e.g. when you install a package + # twice, once using setup.py develop and again using setup.py install. + # Now when run pip uninstall twice, the package gets removed + # from the working set in the first uninstall, so we have to populate + # the working set again so that pip knows about it and the packages + # gets picked up and is successfully uninstalled the second time too. + if not dist: + try: + pkg_resources.working_set.require(req_name) + except pkg_resources.DistributionNotFound: + return None + return search_distribution(req_name) + + def egg_link_path(dist): # type: (Distribution) -> Optional[str] """ @@ -447,12 +538,9 @@ def egg_link_path(dist): """ sites = [] if running_under_virtualenv(): - if virtualenv_no_global(): - sites.append(site_packages) - else: - sites.append(site_packages) - if user_site: - sites.append(user_site) + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) else: if user_site: sites.append(user_site) @@ -473,350 +561,28 @@ def dist_location(dist): packages, where dist.location is the source code location, and we want to know where the egg-link file is. + The returned location is normalized (in particular, with symlinks removed). """ egg_link = egg_link_path(dist) if egg_link: - return egg_link - return dist.location - - -def current_umask(): - """Get the current umask which involves having to set it temporarily.""" - mask = os.umask(0) - os.umask(mask) - return mask - - -def unzip_file(filename, location, flatten=True): - # type: (str, str, bool) -> None - """ - Unzip the file (with path `filename`) to the destination `location`. All - files are written based on system defaults and umask (i.e. permissions are - not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - zipfp = open(filename, 'rb') - try: - zip = zipfile.ZipFile(zipfp, allowZip64=True) - leading = has_leading_dir(zip.namelist()) and flatten - for info in zip.infolist(): - name = info.filename - fn = name - if leading: - fn = split_leading_dir(name)[1] - fn = os.path.join(location, fn) - dir = os.path.dirname(fn) - if fn.endswith('/') or fn.endswith('\\'): - # A directory - ensure_dir(fn) - else: - ensure_dir(dir) - # Don't use read() to avoid allocating an arbitrarily large - # chunk of memory for the file's content - fp = zip.open(name) - try: - with open(fn, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - finally: - fp.close() - mode = info.external_attr >> 16 - # if mode and regular file and any execute permissions for - # user/group/world? - if mode and stat.S_ISREG(mode) and mode & 0o111: - # make dest file have execute for user/group/world - # (chmod +x) no-op on windows per python docs - os.chmod(fn, (0o777 - current_umask() | 0o111)) - finally: - zipfp.close() - - -def untar_file(filename, location): - # type: (str, str) -> None - """ - Untar the file (with path `filename`) to the destination `location`. - All files are written based on system defaults and umask (i.e. permissions - are not preserved), except that regular file members with any execute - permissions (user, group, or world) have "chmod +x" applied after being - written. Note that for windows, any execute changes using os.chmod are - no-ops per the python docs. - """ - ensure_dir(location) - if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): - mode = 'r:gz' - elif filename.lower().endswith(BZ2_EXTENSIONS): - mode = 'r:bz2' - elif filename.lower().endswith(XZ_EXTENSIONS): - mode = 'r:xz' - elif filename.lower().endswith('.tar'): - mode = 'r' - else: - logger.warning( - 'Cannot determine compression type for file %s', filename, - ) - mode = 'r:*' - tar = tarfile.open(filename, mode) - try: - leading = has_leading_dir([ - member.name for member in tar.getmembers() - ]) - for member in tar.getmembers(): - fn = member.name - if leading: - # https://github.com/python/mypy/issues/1174 - fn = split_leading_dir(fn)[1] # type: ignore - path = os.path.join(location, fn) - if member.isdir(): - ensure_dir(path) - elif member.issym(): - try: - # https://github.com/python/typeshed/issues/2673 - tar._extract_member(member, path) # type: ignore - except Exception as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - else: - try: - fp = tar.extractfile(member) - except (KeyError, AttributeError) as exc: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warning( - 'In the tar file %s the member %s is invalid: %s', - filename, member.name, exc, - ) - continue - ensure_dir(os.path.dirname(path)) - with open(path, 'wb') as destfp: - shutil.copyfileobj(fp, destfp) - fp.close() - # Update the timestamp (useful for cython compiled files) - # https://github.com/python/typeshed/issues/2673 - tar.utime(member, path) # type: ignore - # member have any execute permissions for user/group/world? - if member.mode & 0o111: - # make dest file have execute for user/group/world - # no-op on windows per python docs - os.chmod(path, (0o777 - current_umask() | 0o111)) - finally: - tar.close() - - -def unpack_file( - filename, # type: str - location, # type: str - content_type, # type: Optional[str] - link # type: Optional[Link] -): - # type: (...) -> None - filename = os.path.realpath(filename) - if (content_type == 'application/zip' or - filename.lower().endswith(ZIP_EXTENSIONS) or - zipfile.is_zipfile(filename)): - unzip_file( - filename, - location, - flatten=not filename.endswith('.whl') - ) - elif (content_type == 'application/x-gzip' or - tarfile.is_tarfile(filename) or - filename.lower().endswith( - TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): - untar_file(filename, location) - elif (content_type and content_type.startswith('text/html') and - is_svn_page(file_contents(filename))): - # We don't really care about this - from pip._internal.vcs.subversion import Subversion - Subversion('svn+' + link.url).unpack(location) - else: - # FIXME: handle? - # FIXME: magic signatures? - logger.critical( - 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' - 'cannot detect archive format', - filename, location, content_type, - ) - raise InstallationError( - 'Cannot determine archive format of %s' % location - ) - - -def call_subprocess( - cmd, # type: List[str] - show_stdout=True, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - unset_environ=None, # type: Optional[Iterable[str]] - spinner=None # type: Optional[SpinnerInterface] -): - # type: (...) -> Optional[Text] - """ - Args: - extra_ok_returncodes: an iterable of integer return codes that are - acceptable, in addition to 0. Defaults to None, which means []. - unset_environ: an iterable of environment variable names to unset - prior to calling subprocess.Popen(). - """ - if extra_ok_returncodes is None: - extra_ok_returncodes = [] - if unset_environ is None: - unset_environ = [] - # This function's handling of subprocess output is confusing and I - # previously broke it terribly, so as penance I will write a long comment - # explaining things. - # - # The obvious thing that affects output is the show_stdout= - # kwarg. show_stdout=True means, let the subprocess write directly to our - # stdout. Even though it is nominally the default, it is almost never used - # inside pip (and should not be used in new code without a very good - # reason); as of 2016-02-22 it is only used in a few places inside the VCS - # wrapper code. Ideally we should get rid of it entirely, because it - # creates a lot of complexity here for a rarely used feature. - # - # Most places in pip set show_stdout=False. What this means is: - # - We connect the child stdout to a pipe, which we read. - # - By default, we hide the output but show a spinner -- unless the - # subprocess exits with an error, in which case we show the output. - # - If the --verbose option was passed (= loglevel is DEBUG), then we show - # the output unconditionally. (But in this case we don't want to show - # the output a second time if it turns out that there was an error.) - # - # stderr is always merged with stdout (even if show_stdout=True). - if show_stdout: - stdout = None - else: - stdout = subprocess.PIPE - if command_desc is None: - cmd_parts = [] - for part in cmd: - if ' ' in part or '\n' in part or '"' in part or "'" in part: - part = '"%s"' % part.replace('"', '\\"') - cmd_parts.append(part) - command_desc = ' '.join(cmd_parts) - logger.debug("Running command %s", command_desc) - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - for name in unset_environ: - env.pop(name, None) - try: - proc = subprocess.Popen( - cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, - stdout=stdout, cwd=cwd, env=env, - ) - proc.stdin.close() - except Exception as exc: - logger.critical( - "Error %s while executing command %s", exc, command_desc, - ) - raise - all_output = [] - if stdout is not None: - while True: - line = console_to_str(proc.stdout.readline()) - if not line: - break - line = line.rstrip() - all_output.append(line + '\n') - if logger.getEffectiveLevel() <= std_logging.DEBUG: - # Show the line immediately - logger.debug(line) - else: - # Update the spinner - if spinner is not None: - spinner.spin() - try: - proc.wait() - finally: - if proc.stdout: - proc.stdout.close() - if spinner is not None: - if proc.returncode: - spinner.finish("error") - else: - spinner.finish("done") - if proc.returncode and proc.returncode not in extra_ok_returncodes: - if on_returncode == 'raise': - if (logger.getEffectiveLevel() > std_logging.DEBUG and - not show_stdout): - logger.info( - 'Complete output from command %s:', command_desc, - ) - logger.info( - ''.join(all_output) + - '\n----------------------------------------' - ) - raise InstallationError( - 'Command "%s" failed with error code %s in %s' - % (command_desc, proc.returncode, cwd)) - elif on_returncode == 'warn': - logger.warning( - 'Command "%s" had error code %s in %s', - command_desc, proc.returncode, cwd, - ) - elif on_returncode == 'ignore': - pass - else: - raise ValueError('Invalid value: on_returncode=%s' % - repr(on_returncode)) - if not show_stdout: - return ''.join(all_output) - return None - + return normalize_path(egg_link) + return normalize_path(dist.location) -def read_text_file(filename): - # type: (str) -> str - """Return the contents of *filename*. - - Try to decode the file contents with utf-8, the preferred system encoding - (e.g., cp1252 on some Windows machines), and latin1, in that order. - Decoding a byte string with latin1 will never raise an error. In the worst - case, the returned string will contain some garbage characters. - - """ - with open(filename, 'rb') as fp: - data = fp.read() - - encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] - for enc in encodings: - try: - # https://github.com/python/mypy/issues/1174 - data = data.decode(enc) # type: ignore - except UnicodeDecodeError: - continue - break - assert not isinstance(data, bytes) # Latin1 should have worked. - return data - - -def _make_build_dir(build_dir): - os.makedirs(build_dir) - write_delete_marker_file(build_dir) +def write_output(msg, *args): + # type: (Any, Any) -> None + logger.info(msg, *args) class FakeFile(object): """Wrap a list of lines in an object with readline() to make ConfigParser happy.""" def __init__(self, lines): - self._gen = (l for l in lines) + self._gen = iter(lines) def readline(self): try: - try: - return next(self._gen) - except NameError: - return self._gen.next() + return next(self._gen) except StopIteration: return '' @@ -871,26 +637,6 @@ def captured_stderr(): return captured_output('stderr') -class cached_property(object): - """A property that is only computed once per instance and then replaces - itself with an ordinary attribute. Deleting the attribute resets the - property. - - Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 - """ - - def __init__(self, func): - self.__doc__ = getattr(func, '__doc__') - self.func = func - - def __get__(self, obj, cls): - if obj is None: - # We're being accessed from the class itself, not from an object - return self - value = obj.__dict__[self.func.__name__] = self.func(obj) - return value - - def get_installed_version(dist_name, working_set=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. @@ -922,20 +668,38 @@ def enum(*sequential, **named): return type('Enum', (), enums) -def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): +def build_netloc(host, port): + # type: (str, Optional[int]) -> str """ - Return the URL for a VCS requirement. + Build a netloc from a host-port pair + """ + if port is None: + return host + if ':' in host: + # Only wrap host with square brackets when it is IPv6 + host = '[{}]'.format(host) + return '{}:{}'.format(host, port) + - Args: - repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). - project_name: the (unescaped) project name. +def build_url_from_netloc(netloc, scheme='https'): + # type: (str, str) -> str """ - egg_project_name = pkg_resources.to_filename(project_name) - req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) - if subdir: - req += '&subdirectory={}'.format(subdir) + Build a full URL from a netloc. + """ + if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = '[{}]'.format(netloc) + return '{}://{}'.format(scheme, netloc) - return req + +def parse_netloc(netloc): + # type: (str) -> Tuple[str, Optional[int]] + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib_parse.urlparse(url) + return parsed.hostname, parsed.port def split_auth_from_netloc(netloc): @@ -969,49 +733,127 @@ def split_auth_from_netloc(netloc): def redact_netloc(netloc): # type: (str) -> str """ - Replace the password in a netloc with "****", if it exists. + Replace the sensitive data in a netloc with "****", if it exists. - For example, "user:pass@example.com" returns "user:****@example.com". + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" """ netloc, (user, password) = split_auth_from_netloc(netloc) if user is None: return netloc - password = '' if password is None else ':****' - return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user), + if password is None: + user = '****' + password = '' + else: + user = urllib_parse.quote(user) + password = ':****' + return '{user}{password}@{netloc}'.format(user=user, password=password, netloc=netloc) def _transform_url(url, transform_netloc): + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ purl = urllib_parse.urlsplit(url) - netloc = transform_netloc(purl.netloc) + netloc_tuple = transform_netloc(purl.netloc) # stripped url url_pieces = ( - purl.scheme, netloc, purl.path, purl.query, purl.fragment + purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment ) surl = urllib_parse.urlunsplit(url_pieces) - return surl + return surl, netloc_tuple def _get_netloc(netloc): - return split_auth_from_netloc(netloc)[0] + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc): + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url): + # type: (str) -> Tuple[str, str, Tuple[str, str]] + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth def remove_auth_from_url(url): # type: (str) -> str - # Return a copy of url with 'username:password@' removed. + """Return a copy of url with 'username:password@' removed.""" # username/pass params are passed to subversion through flags # and are not recognized in the url. - return _transform_url(url, _get_netloc) + return _transform_url(url, _get_netloc)[0] -def redact_password_from_url(url): +def redact_auth_from_url(url): # type: (str) -> str """Replace the password in a given url with ****.""" - return _transform_url(url, redact_netloc) + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText(object): + def __init__( + self, + secret, # type: str + redacted, # type: str + ): + # type: (...) -> None + self.secret = secret + self.redacted = redacted + + def __repr__(self): + # type: (...) -> str + return '<HiddenText {!r}>'.format(str(self)) + + def __str__(self): + # type: (...) -> str + return self.redacted + + # This is useful for testing. + def __eq__(self, other): + # type: (Any) -> bool + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return (self.secret == other.secret) + + # We need to provide an explicit __ne__ implementation for Python 2. + # TODO: remove this when we drop PY2 support. + def __ne__(self, other): + # type: (Any) -> bool + return not self == other + + +def hide_value(value): + # type: (str) -> HiddenText + return HiddenText(value, redacted='****') + + +def hide_url(url): + # type: (str) -> HiddenText + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) def protect_pip_from_modification_on_windows(modifying_pip): + # type: (bool) -> None """Protection of pip.exe from modification on Windows On Windows, any operation modifying pip should be run as: @@ -1038,3 +880,63 @@ def protect_pip_from_modification_on_windows(modifying_pip): 'To modify pip, please run the following command:\n{}' .format(" ".join(new_command)) ) + + +def is_console_interactive(): + # type: () -> bool + """Is this console interactive? + """ + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path, blocksize=1 << 20): + # type: (Text, int) -> Tuple[Any, int] + """Return (hash, length) for path using hashlib.sha256() + """ + + h = hashlib.sha256() + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed(): + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True + + +def pairwise(iterable): + # type: (Iterable[Any]) -> Iterator[Tuple[Any, Any]] + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred, # type: Callable[[T], bool] + iterable, # type: Iterable[T] +): + # type: (...) -> Tuple[Iterable[T], Iterable[T]] + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/models.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/models.py index d5cb80a7..d1c2f226 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/models.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/models.py @@ -1,13 +1,17 @@ """Utilities for defining models """ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False import operator class KeyBasedCompareMixin(object): - """Provides comparision capabilities that is based on a key + """Provides comparison capabilities that is based on a key """ + __slots__ = ['_compare_key', '_defining_class'] + def __init__(self, key, defining_class): self._compare_key = key self._defining_class = defining_class diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/packaging.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/packaging.py index 7aaf7b5e..68aa86ed 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/packaging.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/packaging.py @@ -1,79 +1,88 @@ from __future__ import absolute_import import logging -import sys from email.parser import FeedParser from pip._vendor import pkg_resources from pip._vendor.packaging import specifiers, version -from pip._internal import exceptions +from pip._internal.exceptions import NoneMetadataError from pip._internal.utils.misc import display_path from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional # noqa: F401 - from email.message import Message # noqa: F401 - from pip._vendor.pkg_resources import Distribution # noqa: F401 + from typing import Optional, Tuple + from email.message import Message + from pip._vendor.pkg_resources import Distribution logger = logging.getLogger(__name__) -def check_requires_python(requires_python): - # type: (Optional[str]) -> bool +def check_requires_python(requires_python, version_info): + # type: (Optional[str], Tuple[int, ...]) -> bool """ - Check if the python version in use match the `requires_python` specifier. + Check if the given Python version matches a "Requires-Python" specifier. - Returns `True` if the version of python in use matches the requirement. - Returns `False` if the version of python in use does not matches the - requirement. + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). - Raises an InvalidSpecifier if `requires_python` have an invalid format. + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. """ if requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) - # We only use major.minor.micro - python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) + python_version = version.parse('.'.join(map(str, version_info))) return python_version in requires_python_specifier def get_metadata(dist): # type: (Distribution) -> Message + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + metadata_name = 'METADATA' if (isinstance(dist, pkg_resources.DistInfoDistribution) and - dist.has_metadata('METADATA')): - metadata = dist.get_metadata('METADATA') + dist.has_metadata(metadata_name)): + metadata = dist.get_metadata(metadata_name) elif dist.has_metadata('PKG-INFO'): - metadata = dist.get_metadata('PKG-INFO') + metadata_name = 'PKG-INFO' + metadata = dist.get_metadata(metadata_name) else: logger.warning("No metadata found in %s", display_path(dist.location)) metadata = '' + if metadata is None: + raise NoneMetadataError(dist, metadata_name) + feed_parser = FeedParser() + # The following line errors out if with a "NoneType" TypeError if + # passed metadata=None. feed_parser.feed(metadata) return feed_parser.close() -def check_dist_requires_python(dist): +def get_requires_python(dist): + # type: (pkg_resources.Distribution) -> Optional[str] + """ + Return the "Requires-Python" metadata for a distribution, or None + if not present. + """ pkg_info_dict = get_metadata(dist) requires_python = pkg_info_dict.get('Requires-Python') - try: - if not check_requires_python(requires_python): - raise exceptions.UnsupportedPythonVersion( - "%s requires Python '%s' but the running Python is %s" % ( - dist.project_name, - requires_python, - '.'.join(map(str, sys.version_info[:3])),) - ) - except specifiers.InvalidSpecifier as e: - logger.warning( - "Package %s has an invalid Requires-Python entry %s - %s", - dist.project_name, requires_python, e, - ) - return + + if requires_python is not None: + # Convert to a str to satisfy the type checker, since requires_python + # can be a Header object. + requires_python = str(requires_python) + + return requires_python def get_installer(dist): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/parallel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/parallel.py new file mode 100644 index 00000000..9fe1fe8b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/parallel.py @@ -0,0 +1,107 @@ +"""Convenient parallelization of higher order functions. + +This module provides two helper functions, with appropriate fallbacks on +Python 2 and on systems lacking support for synchronization mechanisms: + +- map_multiprocess +- map_multithread + +These helpers work like Python 3's map, with two differences: + +- They don't guarantee the order of processing of + the elements of the iterable. +- The underlying process/thread pools chop the iterable into + a number of chunks, so that for very long iterables using + a large value for chunksize can make the job complete much faster + than using the default value of 1. +""" + +__all__ = ['map_multiprocess', 'map_multithread'] + +from contextlib import contextmanager +from multiprocessing import Pool as ProcessPool +from multiprocessing.dummy import Pool as ThreadPool + +from pip._vendor.requests.adapters import DEFAULT_POOLSIZE +from pip._vendor.six import PY2 +from pip._vendor.six.moves import map + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Callable, Iterable, Iterator, Union, TypeVar + from multiprocessing import pool + + Pool = Union[pool.Pool, pool.ThreadPool] + S = TypeVar('S') + T = TypeVar('T') + +# On platforms without sem_open, multiprocessing[.dummy] Pool +# cannot be created. +try: + import multiprocessing.synchronize # noqa +except ImportError: + LACK_SEM_OPEN = True +else: + LACK_SEM_OPEN = False + +# Incredibly large timeout to work around bpo-8296 on Python 2. +TIMEOUT = 2000000 + + +@contextmanager +def closing(pool): + # type: (Pool) -> Iterator[Pool] + """Return a context manager making sure the pool closes properly.""" + try: + yield pool + finally: + # For Pool.imap*, close and join are needed + # for the returned iterator to begin yielding. + pool.close() + pool.join() + pool.terminate() + + +def _map_fallback(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Make an iterator applying func to each element in iterable. + + This function is the sequential fallback either on Python 2 + where Pool.imap* doesn't react to KeyboardInterrupt + or when sem_open is unavailable. + """ + return map(func, iterable) + + +def _map_multiprocess(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a process pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ProcessPool()) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +def _map_multithread(func, iterable, chunksize=1): + # type: (Callable[[S], T], Iterable[S], int) -> Iterator[T] + """Chop iterable into chunks and submit them to a thread pool. + + For very long iterables using a large value for chunksize can make + the job complete much faster than using the default value of 1. + + Return an unordered iterator of the results. + """ + with closing(ThreadPool(DEFAULT_POOLSIZE)) as pool: + return pool.imap_unordered(func, iterable, chunksize) + + +if LACK_SEM_OPEN or PY2: + map_multiprocess = map_multithread = _map_fallback +else: + map_multiprocess = _map_multiprocess + map_multithread = _map_multithread diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/pkg_resources.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/pkg_resources.py new file mode 100644 index 00000000..0bc129ac --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/pkg_resources.py @@ -0,0 +1,44 @@ +from pip._vendor.pkg_resources import yield_lines +from pip._vendor.six import ensure_str + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterable, List + + +class DictMetadata(object): + """IMetadataProvider that reads metadata files from a dictionary. + """ + def __init__(self, metadata): + # type: (Dict[str, bytes]) -> None + self._metadata = metadata + + def has_metadata(self, name): + # type: (str) -> bool + return name in self._metadata + + def get_metadata(self, name): + # type: (str) -> str + try: + return ensure_str(self._metadata[name]) + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += " in {} file".format(name) + raise + + def get_metadata_lines(self, name): + # type: (str) -> Iterable[str] + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name): + # type: (str) -> bool + return False + + def metadata_listdir(self, name): + # type: (str) -> List[str] + return [] + + def run_script(self, script_name, namespace): + # type: (str, str) -> None + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/setuptools_build.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/setuptools_build.py index 03973e97..2a664b00 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/setuptools_build.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/setuptools_build.py @@ -1,8 +1,181 @@ +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + # Shim to wrap setup.py invocation with setuptools -SETUPTOOLS_SHIM = ( - "import setuptools, tokenize;__file__=%r;" +# +# We set sys.argv[0] to the path to the underlying setup.py file so +# setuptools / distutils don't take the path to the setup.py to be "-c" when +# invoking via the shim. This avoids e.g. the following manifest_maker +# warning: "warning: manifest_maker: standard file '-c' not found". +_SETUPTOOLS_SHIM = ( + "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" "f=getattr(tokenize, 'open', open)(__file__);" "code=f.read().replace('\\r\\n', '\\n');" "f.close();" "exec(compile(code, __file__, 'exec'))" ) + + +def make_setuptools_shim_args( + setup_py_path, # type: str + global_options=None, # type: Sequence[str] + no_user_config=False, # type: bool + unbuffered_output=False # type: bool +): + # type: (...) -> List[str] + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + build_options, # type: Sequence[str] + destination_dir, # type: str +): + # type: (...) -> List[str] + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + no_user_config, # type: bool + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path, # type: str + egg_info_dir, # type: Optional[str] + no_user_config, # type: bool +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, no_user_config=no_user_config + ) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + header_dir, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + no_user_config, # type: bool + pycompile # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/subprocess.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 00000000..d398e68d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,280 @@ +from __future__ import absolute_import + +import logging +import os +import subprocess + +from pip._vendor.six.moves import shlex_quote + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationError +from pip._internal.utils.compat import console_to_str, str_to_display +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import HiddenText, path_to_display +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Mapping, Optional, Text, Union, + ) + + CommandArgs = List[Union[str, HiddenText]] + + +LOG_DIVIDER = '----------------------------------------' + + +def make_command(*args): + # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs + """ + Create a CommandArgs object. + """ + command_args = [] # type: CommandArgs + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args): + # type: (Union[List[str], CommandArgs]) -> str + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return ' '.join( + shlex_quote(str(arg)) if isinstance(arg, HiddenText) + else shlex_quote(arg) for arg in args + ) + + +def reveal_command_args(args): + # type: (Union[List[str], CommandArgs]) -> List[str] + """ + Return the arguments in their raw, unredacted form. + """ + return [ + arg.secret if isinstance(arg, HiddenText) else arg for arg in args + ] + + +def make_subprocess_output_error( + cmd_args, # type: Union[List[str], CommandArgs] + cwd, # type: Optional[str] + lines, # type: List[Text] + exit_status, # type: int +): + # type: (...) -> Text + """ + Create and return the error message to use to log a subprocess error + with command output. + + :param lines: A list of lines, each ending with a newline. + """ + command = format_command_args(cmd_args) + # Convert `command` and `cwd` to text (unicode in Python 2) so we can use + # them as arguments in the unicode format string below. This avoids + # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2 + # if either contains a non-ascii character. + command_display = str_to_display(command, desc='command bytes') + cwd_display = path_to_display(cwd) + + # We know the joined output value ends in a newline. + output = ''.join(lines) + msg = ( + # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' + # codec can't encode character ..." in Python 2 when a format + # argument (e.g. `output`) has a non-ascii character. + u'Command errored out with exit status {exit_status}:\n' + ' command: {command_display}\n' + ' cwd: {cwd_display}\n' + 'Complete output ({line_count} lines):\n{output}{divider}' + ).format( + exit_status=exit_status, + command_display=command_display, + cwd_display=cwd_display, + line_count=len(lines), + output=output, + divider=LOG_DIVIDER, + ) + return msg + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + show_stdout=False, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + unset_environ=None, # type: Optional[Iterable[str]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using DEBUG. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.debug + used_level = logging.DEBUG + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + if command_desc is None: + command_desc = format_command_args(cmd) + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stderr=subprocess.STDOUT, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, cwd=cwd, env=env, + ) + assert proc.stdin + assert proc.stdout + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == 'raise': + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise InstallationError(exc_msg) + elif on_returncode == 'warn': + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode={!r}'.format( + on_returncode)) + return ''.join(all_output) + + +def runner_with_spinner_message(message): + # type: (str) -> Callable[..., None] + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd, # type: List[str] + cwd=None, # type: Optional[str] + extra_environ=None # type: Optional[Mapping[str, Any]] + ): + # type: (...) -> None + with open_spinner(message) as spinner: + call_subprocess( + cmd, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/temp_dir.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/temp_dir.py index 2c81ad55..03aa8286 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/temp_dir.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/temp_dir.py @@ -5,12 +5,95 @@ import itertools import logging import os.path import tempfile +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack +from pip._vendor.six import ensure_text + +from pip._internal.utils.misc import enum, rmtree +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, Iterator, Optional, TypeVar, Union + + _T = TypeVar('_T', bound='TempDirectory') -from pip._internal.utils.misc import rmtree logger = logging.getLogger(__name__) +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager = None # type: Optional[ExitStack] + + +@contextmanager +def global_tempdir_manager(): + # type: () -> Iterator[None] + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry(object): + """Manages temp directory behavior + """ + + def __init__(self): + # type: () -> None + self._should_delete = {} # type: Dict[str, bool] + + def set_delete(self, kind, value): + # type: (str, bool) -> None + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind): + # type: (str) -> bool + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry] + + +@contextmanager +def tempdir_registry(): + # type: () -> Iterator[TempDirectoryTypeRegistry] + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default(object): + pass + + +_default = _Default() + + class TempDirectory(object): """Helper class that owns and cleans up a temporary directory. @@ -19,69 +102,101 @@ class TempDirectory(object): Attributes: path - Location to the created temporary directory or None + Location to the created temporary directory delete Whether the directory should be deleted when exiting (when used as a contextmanager) Methods: - create() - Creates a temporary directory and stores its path in the path - attribute. cleanup() - Deletes the temporary directory and sets path attribute to None + Deletes the temporary directory - When used as a context manager, a temporary directory is created on - entering the context and, if the delete attribute is True, on exiting the - context the created directory is deleted. + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. """ - def __init__(self, path=None, delete=None, kind="temp"): + def __init__( + self, + path=None, # type: Optional[str] + delete=_default, # type: Union[bool, None, _Default] + kind="temp", # type: str + globally_managed=False, # type: bool + ): super(TempDirectory, self).__init__() - if path is None and delete is None: - # If we were not given an explicit directory, and we were not given - # an explicit delete option, then we'll default to deleting. - delete = True + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + if path is None: + path = self._create(kind) - self.path = path + self._path = path + self._deleted = False self.delete = delete self.kind = kind + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self): + # type: () -> str + assert not self._deleted, ( + "Attempted to access deleted path: {}".format(self._path) + ) + return self._path + def __repr__(self): + # type: () -> str return "<{} {!r}>".format(self.__class__.__name__, self.path) def __enter__(self): - self.create() + # type: (_T) -> _T return self def __exit__(self, exc, value, tb): - if self.delete: + # type: (Any, Any, Any) -> None + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: self.cleanup() - def create(self): + def _create(self, kind): + # type: (str) -> str """Create a temporary directory and store its path in self.path """ - if self.path is not None: - logger.debug( - "Skipped creation of temporary directory: {}".format(self.path) - ) - return # We realpath here because some systems have their default tmpdir # symlinked to another directory. This tends to confuse build # scripts, so we canonicalize the path by traversing potential # symlinks here. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ) - logger.debug("Created temporary directory: {}".format(self.path)) + logger.debug("Created temporary directory: %s", path) + return path def cleanup(self): + # type: () -> None """Remove the temporary directory created and reset state """ - if self.path is not None and os.path.exists(self.path): - rmtree(self.path) - self.path = None + self._deleted = True + if os.path.exists(self._path): + # Make sure to pass unicode on Python 2 to make the contents also + # use unicode, ensuring non-ASCII names and can be represented. + rmtree(ensure_text(self._path)) class AdjacentTempDirectory(TempDirectory): @@ -106,11 +221,13 @@ class AdjacentTempDirectory(TempDirectory): LEADING_CHARS = "-~.=%0123456789" def __init__(self, original, delete=None): - super(AdjacentTempDirectory, self).__init__(delete=delete) + # type: (str, Optional[bool]) -> None self.original = original.rstrip('/\\') + super(AdjacentTempDirectory, self).__init__(delete=delete) @classmethod def _generate_names(cls, name): + # type: (str) -> Iterator[str] """Generates a series of temporary names. The algorithm replaces the leading characters in the name @@ -133,7 +250,8 @@ class AdjacentTempDirectory(TempDirectory): if new_name != name: yield new_name - def create(self): + def _create(self, kind): + # type: (str) -> str root, name = os.path.split(self.original) for candidate in self._generate_names(name): path = os.path.join(root, candidate) @@ -144,12 +262,13 @@ class AdjacentTempDirectory(TempDirectory): if ex.errno != errno.EEXIST: raise else: - self.path = os.path.realpath(path) + path = os.path.realpath(path) break - - if not self.path: + else: # Final fallback on the default behavior. - self.path = os.path.realpath( - tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ) - logger.debug("Created temporary directory: {}".format(self.path)) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/typing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/typing.py index e085cdfe..8505a29b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/typing.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/typing.py @@ -21,9 +21,18 @@ In pip, all static-typing related imports should be guarded as follows: from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ... # noqa: F401 + from typing import ... Ref: https://github.com/python/mypy/issues/3216 """ MYPY_CHECK_RUNNING = False + + +if MYPY_CHECK_RUNNING: + from typing import cast +else: + # typing's cast() is needed at runtime, but we don't want to import typing. + # Thus, we use a dummy no-op version, which we tell mypy to ignore. + def cast(type_, value): # type: ignore + return value diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/ui.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/ui.py deleted file mode 100644 index 433675d7..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/ui.py +++ /dev/null @@ -1,441 +0,0 @@ -from __future__ import absolute_import, division - -import contextlib -import itertools -import logging -import sys -import time -from signal import SIGINT, default_int_handler, signal - -from pip._vendor import six -from pip._vendor.progress.bar import ( - Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, - ShadyBar, -) -from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin -from pip._vendor.progress.spinner import Spinner - -from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.logging import get_indentation -from pip._internal.utils.misc import format_size -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import Any, Iterator, IO # noqa: F401 - -try: - from pip._vendor import colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None - -logger = logging.getLogger(__name__) - - -def _select_progress_class(preferred, fallback): - encoding = getattr(preferred.file, "encoding", None) - - # If we don't know what encoding this file is in, then we'll just assume - # that it doesn't support unicode and use the ASCII bar. - if not encoding: - return fallback - - # Collect all of the possible characters we want to use with the preferred - # bar. - characters = [ - getattr(preferred, "empty_fill", six.text_type()), - getattr(preferred, "fill", six.text_type()), - ] - characters += list(getattr(preferred, "phases", [])) - - # Try to decode the characters we're using for the bar using the encoding - # of the given file, if this works then we'll assume that we can use the - # fancier bar and if not we'll fall back to the plaintext bar. - try: - six.text_type().join(characters).encode(encoding) - except UnicodeEncodeError: - return fallback - else: - return preferred - - -_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any - - -class InterruptibleMixin(object): - """ - Helper to ensure that self.finish() gets called on keyboard interrupt. - - This allows downloads to be interrupted without leaving temporary state - (like hidden cursors) behind. - - This class is similar to the progress library's existing SigIntMixin - helper, but as of version 1.2, that helper has the following problems: - - 1. It calls sys.exit(). - 2. It discards the existing SIGINT handler completely. - 3. It leaves its own handler in place even after an uninterrupted finish, - which will have unexpected delayed effects if the user triggers an - unrelated keyboard interrupt some time after a progress-displaying - download has already completed, for example. - """ - - def __init__(self, *args, **kwargs): - """ - Save the original SIGINT handler for later. - """ - super(InterruptibleMixin, self).__init__(*args, **kwargs) - - self.original_handler = signal(SIGINT, self.handle_sigint) - - # If signal() returns None, the previous handler was not installed from - # Python, and we cannot restore it. This probably should not happen, - # but if it does, we must restore something sensible instead, at least. - # The least bad option should be Python's default SIGINT handler, which - # just raises KeyboardInterrupt. - if self.original_handler is None: - self.original_handler = default_int_handler - - def finish(self): - """ - Restore the original SIGINT handler after finishing. - - This should happen regardless of whether the progress display finishes - normally, or gets interrupted. - """ - super(InterruptibleMixin, self).finish() - signal(SIGINT, self.original_handler) - - def handle_sigint(self, signum, frame): - """ - Call self.finish() before delegating to the original SIGINT handler. - - This handler should only be in place while the progress display is - active. - """ - self.finish() - self.original_handler(signum, frame) - - -class SilentBar(Bar): - - def update(self): - pass - - -class BlueEmojiBar(IncrementalBar): - - suffix = "%(percent)d%%" - bar_prefix = " " - bar_suffix = " " - phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any - - -class DownloadProgressMixin(object): - - def __init__(self, *args, **kwargs): - super(DownloadProgressMixin, self).__init__(*args, **kwargs) - self.message = (" " * (get_indentation() + 2)) + self.message - - @property - def downloaded(self): - return format_size(self.index) - - @property - def download_speed(self): - # Avoid zero division errors... - if self.avg == 0.0: - return "..." - return format_size(1 / self.avg) + "/s" - - @property - def pretty_eta(self): - if self.eta: - return "eta %s" % self.eta_td - return "" - - def iter(self, it, n=1): - for x in it: - yield x - self.next(n) - self.finish() - - -class WindowsMixin(object): - - def __init__(self, *args, **kwargs): - # The Windows terminal does not support the hide/show cursor ANSI codes - # even with colorama. So we'll ensure that hide_cursor is False on - # Windows. - # This call neds to go before the super() call, so that hide_cursor - # is set in time. The base progress bar class writes the "hide cursor" - # code to the terminal in its init, so if we don't set this soon - # enough, we get a "hide" with no corresponding "show"... - if WINDOWS and self.hide_cursor: - self.hide_cursor = False - - super(WindowsMixin, self).__init__(*args, **kwargs) - - # Check if we are running on Windows and we have the colorama module, - # if we do then wrap our file with it. - if WINDOWS and colorama: - self.file = colorama.AnsiToWin32(self.file) - # The progress code expects to be able to call self.file.isatty() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.isatty = lambda: self.file.wrapped.isatty() - # The progress code expects to be able to call self.file.flush() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.flush = lambda: self.file.wrapped.flush() - - -class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin): - - file = sys.stdout - message = "%(percent)d%%" - suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" - -# NOTE: The "type: ignore" comments on the following classes are there to -# work around https://github.com/python/typing/issues/241 - - -class DefaultDownloadProgressBar(BaseDownloadProgressBar, - _BaseBar): - pass - - -class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore - pass - - -class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore - IncrementalBar): - pass - - -class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore - ChargingBar): - pass - - -class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore - pass - - -class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore - FillingSquaresBar): - pass - - -class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore - FillingCirclesBar): - pass - - -class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore - BlueEmojiBar): - pass - - -class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, - DownloadProgressMixin, WritelnMixin, Spinner): - - file = sys.stdout - suffix = "%(downloaded)s %(download_speed)s" - - def next_phase(self): - if not hasattr(self, "_phaser"): - self._phaser = itertools.cycle(self.phases) - return next(self._phaser) - - def update(self): - message = self.message % self - phase = self.next_phase() - suffix = self.suffix % self - line = ''.join([ - message, - " " if message else "", - phase, - " " if suffix else "", - suffix, - ]) - - self.writeln(line) - - -BAR_TYPES = { - "off": (DownloadSilentBar, DownloadSilentBar), - "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), - "ascii": (DownloadIncrementalBar, DownloadProgressSpinner), - "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), - "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) -} - - -def DownloadProgressProvider(progress_bar, max=None): - if max is None or max == 0: - return BAR_TYPES[progress_bar][1]().iter - else: - return BAR_TYPES[progress_bar][0](max=max).iter - - -################################################################ -# Generic "something is happening" spinners -# -# We don't even try using progress.spinner.Spinner here because it's actually -# simpler to reimplement from scratch than to coerce their code into doing -# what we need. -################################################################ - -@contextlib.contextmanager -def hidden_cursor(file): - # type: (IO) -> Iterator[None] - # The Windows terminal does not support the hide/show cursor ANSI codes, - # even via colorama. So don't even try. - if WINDOWS: - yield - # We don't want to clutter the output with control characters if we're - # writing to a file, or if the user is running with --quiet. - # See https://github.com/pypa/pip/issues/3418 - elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: - yield - else: - file.write(HIDE_CURSOR) - try: - yield - finally: - file.write(SHOW_CURSOR) - - -class RateLimiter(object): - def __init__(self, min_update_interval_seconds): - # type: (float) -> None - self._min_update_interval_seconds = min_update_interval_seconds - self._last_update = 0 # type: float - - def ready(self): - # type: () -> bool - now = time.time() - delta = now - self._last_update - return delta >= self._min_update_interval_seconds - - def reset(self): - # type: () -> None - self._last_update = time.time() - - -class SpinnerInterface(object): - def spin(self): - # type: () -> None - raise NotImplementedError() - - def finish(self, final_status): - # type: (str) -> None - raise NotImplementedError() - - -class InteractiveSpinner(SpinnerInterface): - def __init__(self, message, file=None, spin_chars="-\\|/", - # Empirically, 8 updates/second looks nice - min_update_interval_seconds=0.125): - self._message = message - if file is None: - file = sys.stdout - self._file = file - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._finished = False - - self._spin_cycle = itertools.cycle(spin_chars) - - self._file.write(" " * get_indentation() + self._message + " ... ") - self._width = 0 - - def _write(self, status): - assert not self._finished - # Erase what we wrote before by backspacing to the beginning, writing - # spaces to overwrite the old text, and then backspacing again - backup = "\b" * self._width - self._file.write(backup + " " * self._width + backup) - # Now we have a blank slate to add our status - self._file.write(status) - self._width = len(status) - self._file.flush() - self._rate_limiter.reset() - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._write(next(self._spin_cycle)) - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._write(final_status) - self._file.write("\n") - self._file.flush() - self._finished = True - - -# Used for dumb terminals, non-interactive installs (no tty), etc. -# We still print updates occasionally (once every 60 seconds by default) to -# act as a keep-alive for systems like Travis-CI that take lack-of-output as -# an indication that a task has frozen. -class NonInteractiveSpinner(SpinnerInterface): - def __init__(self, message, min_update_interval_seconds=60): - # type: (str, float) -> None - self._message = message - self._finished = False - self._rate_limiter = RateLimiter(min_update_interval_seconds) - self._update("started") - - def _update(self, status): - assert not self._finished - self._rate_limiter.reset() - logger.info("%s: %s", self._message, status) - - def spin(self): - # type: () -> None - if self._finished: - return - if not self._rate_limiter.ready(): - return - self._update("still running...") - - def finish(self, final_status): - # type: (str) -> None - if self._finished: - return - self._update("finished with status '%s'" % (final_status,)) - self._finished = True - - -@contextlib.contextmanager -def open_spinner(message): - # type: (str) -> Iterator[SpinnerInterface] - # Interactive spinner goes directly to sys.stdout rather than being routed - # through the logging system, but it acts like it has level INFO, - # i.e. it's only displayed if we're at level INFO or better. - # Non-interactive spinner goes through the logging system, so it is always - # in sync with logging configuration. - if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: - spinner = InteractiveSpinner(message) # type: SpinnerInterface - else: - spinner = NonInteractiveSpinner(message) - try: - with hidden_cursor(sys.stdout): - yield spinner - except KeyboardInterrupt: - spinner.finish("canceled") - raise - except Exception: - spinner.finish("error") - raise - else: - spinner.finish("done") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/unpacking.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 00000000..620f31eb --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,281 @@ +"""Utilities related archives. +""" + +from __future__ import absolute_import + +import logging +import os +import shutil +import stat +import tarfile +import zipfile + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterable, List, Optional, Text, Union + from zipfile import ZipInfo + + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def current_umask(): + # type: () -> int + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path): + # type: (Union[str, Text]) -> List[Union[str, Text]] + path = path.lstrip('/').lstrip('\\') + if ( + '/' in path and ( + ('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path + ) + ): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return [path, ''] + + +def has_leading_dir(paths): + # type: (Iterable[Union[str, Text]]) -> bool + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory, target): + # type: ((Union[str, Text]), (Union[str, Text])) -> bool + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def set_extracted_file_to_default_mode_plus_executable(path): + # type: (Union[str, Text]) -> None + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, (0o777 & ~current_umask() | 0o111)) + + +def zip_item_is_executable(info): + # type: (ZipInfo) -> bool + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename, location, flatten=True): + # type: (str, str, bool) -> None + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + 'The zip file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename, location): + # type: (str, str) -> None + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + leading = has_leading_dir([ + member.name for member in tar.getmembers() + ]) + for member in tar.getmembers(): + fn = member.name + if leading: + # https://github.com/python/mypy/issues/1174 + fn = split_leading_dir(fn)[1] # type: ignore + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + 'The tar file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError( + message.format(filename, path, location) + ) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + # https://github.com/python/typeshed/issues/2673 + tar._extract_member(member, path) # type: ignore + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + # https://github.com/python/typeshed/issues/2673 + tar.utime(member, path) # type: ignore + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + finally: + tar.close() + + +def unpack_file( + filename, # type: str + location, # type: str + content_type=None, # type: Optional[str] +): + # type: (...) -> None + filename = os.path.realpath(filename) + if ( + content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename) + ): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif ( + content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS + ) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of {}'.format(location) + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/urls.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 00000000..f37bc8f9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,55 @@ +import os +import sys + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Text, Union + + +def get_url_scheme(url): + # type: (Union[str, Text]) -> Optional[Text] + if ':' not in url: + return None + return url.split(':', 1)[0].lower() + + +def path_to_url(path): + # type: (Union[str, Text]) -> str + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def url_to_path(url): + # type: (str) -> str + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not {url!r})" + .format(**locals())) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + if not netloc or netloc == 'localhost': + # According to RFC 8089, same as empty authority. + netloc = '' + elif sys.platform == 'win32': + # If we have a UNC path, prepend UNC share notation. + netloc = '\\\\' + netloc + else: + raise ValueError( + 'non-local file URIs are not supported on this platform: {url!r}' + .format(**locals()) + ) + + path = urllib_request.url2pathname(netloc + path) + return path diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/virtualenv.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 00000000..596a69a7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,116 @@ +from __future__ import absolute_import + +import logging +import os +import re +import site +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?P<value>true|false)" +) + + +def _running_under_venv(): + # type: () -> bool + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv(): + # type: () -> bool + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, 'real_prefix') + + +def running_under_virtualenv(): + # type: () -> bool + """Return True if we're running inside a virtualenv, False otherwise. + """ + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines(): + # type: () -> Optional[List[str]] + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg') + try: + with open(pyvenv_cfg_file) as f: + return f.read().splitlines() # avoids trailing newlines + except IOError: + return None + + +def _no_global_under_venv(): + # type: () -> bool + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group('value') == 'false': + return True + return False + + +def _no_global_under_regular_virtualenv(): + # type: () -> bool + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, 'no-global-site-packages.txt', + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global(): + # type: () -> bool + """Returns a boolean, whether running in venv with no system site-packages. + """ + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 00000000..9ce371c7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,225 @@ +"""Support functions for working with wheel files. +""" + +from __future__ import absolute_import + +import logging +from email.parser import Parser +from zipfile import ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import DistInfoDistribution +from pip._vendor.six import PY2, ensure_str + +from pip._internal.exceptions import UnsupportedWheel +from pip._internal.utils.pkg_resources import DictMetadata +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import Dict, Tuple + + from pip._vendor.pkg_resources import Distribution + +if PY2: + from zipfile import BadZipfile as BadZipFile +else: + from zipfile import BadZipFile + + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + def __init__(self, metadata, wheel_name): + # type: (Dict[str, bytes], str) -> None + super(WheelMetadata, self).__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name): + # type: (str) -> str + try: + return super(WheelMetadata, self).get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + "Error decoding metadata for {}: {}".format( + self._wheel_name, e + ) + ) + + +def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + # type: (ZipFile, str, str) -> Distribution + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [ + p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) + ] + + metadata_text = {} # type: Dict[str, bytes] + for path in metadata_files: + # If a flag is set, namelist entries may be unicode in Python 2. + # We coerce them to native str type to match the types used in the rest + # of the code. This cannot fail because unicode can always be encoded + # with UTF-8. + full_path = ensure_str(path) + _, metadata_name = full_path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file( + wheel_zip, full_path + ) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution( + location=location, metadata=metadata, project_name=name + ) + + +def parse_wheel(wheel_zip, name): + # type: (ZipFile, str) -> Tuple[str, Message] + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source, name): + # type: (ZipFile, str) -> str + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = set(p.split("/", 1)[0] for p in source.namelist()) + + info_dirs = [s for s in subdirs if s.endswith('.dist-info')] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format( + ", ".join(info_dirs) + ) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + # Zip file paths can be unicode or str depending on the zip entry flags, + # so normalize it. + return ensure_str(info_dir) + + +def read_wheel_metadata_file(source, path): + # type: (ZipFile, str) -> bytes + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel( + "could not read {!r} file: {!r}".format(path, e) + ) + + +def wheel_metadata(source, dist_info_dir): + # type: (ZipFile, str) -> Message + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = "{}/WHEEL".format(dist_info_dir) + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = ensure_str(wheel_contents) + except UnicodeDecodeError as e: + raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data): + # type: (Message) -> Tuple[int, ...] + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split('.'))) + except ValueError: + raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version)) + + +def check_compatibility(version, name): + # type: (Tuple[int, ...], str) -> None + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/__init__.py index 9cba7646..2a4eb137 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/__init__.py @@ -1,534 +1,15 @@ -"""Handles all VCS (version control) support""" -from __future__ import absolute_import - -import errno -import logging -import os -import shutil -import sys - -from pip._vendor.six.moves.urllib import parse as urllib_parse - -from pip._internal.exceptions import BadCommand -from pip._internal.utils.misc import ( - display_path, backup_dir, call_subprocess, rmtree, ask_path_exists, +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory and imports protected by MYPY_CHECK_RUNNING may +# still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + is_url, + make_vcs_requirement_url, + vcs, ) -from pip._internal.utils.typing import MYPY_CHECK_RUNNING - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type - ) - from pip._internal.utils.ui import SpinnerInterface # noqa: F401 - - AuthInfo = Tuple[Optional[str], Optional[str]] - -__all__ = ['vcs'] - - -logger = logging.getLogger(__name__) - - -class RemoteNotFoundError(Exception): - pass - - -class RevOptions(object): - - """ - Encapsulates a VCS-specific revision to install, along with any VCS - install options. - - Instances of this class should be treated as if immutable. - """ - - def __init__(self, vcs, rev=None, extra_args=None): - # type: (VersionControl, Optional[str], Optional[List[str]]) -> None - """ - Args: - vcs: a VersionControl object. - rev: the name of the revision to install. - extra_args: a list of extra options. - """ - if extra_args is None: - extra_args = [] - - self.extra_args = extra_args - self.rev = rev - self.vcs = vcs - - def __repr__(self): - return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev) - - @property - def arg_rev(self): - # type: () -> Optional[str] - if self.rev is None: - return self.vcs.default_arg_rev - - return self.rev - - def to_args(self): - # type: () -> List[str] - """ - Return the VCS-specific command arguments. - """ - args = [] # type: List[str] - rev = self.arg_rev - if rev is not None: - args += self.vcs.get_base_rev_args(rev) - args += self.extra_args - - return args - - def to_display(self): - # type: () -> str - if not self.rev: - return '' - - return ' (to revision {})'.format(self.rev) - - def make_new(self, rev): - # type: (str) -> RevOptions - """ - Make a copy of the current instance, but with a new rev. - - Args: - rev: the name of the revision for the new object. - """ - return self.vcs.make_rev_options(rev, extra_args=self.extra_args) - - -class VcsSupport(object): - _registry = {} # type: Dict[str, Type[VersionControl]] - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] - - def __init__(self): - # type: () -> None - # Register more schemes with urlparse for various version control - # systems - urllib_parse.uses_netloc.extend(self.schemes) - # Python >= 2.7.4, 3.3 doesn't have uses_fragment - if getattr(urllib_parse, 'uses_fragment', None): - urllib_parse.uses_fragment.extend(self.schemes) - super(VcsSupport, self).__init__() - - def __iter__(self): - return self._registry.__iter__() - - @property - def backends(self): - # type: () -> List[Type[VersionControl]] - return list(self._registry.values()) - - @property - def dirnames(self): - # type: () -> List[str] - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self): - # type: () -> List[str] - schemes = [] # type: List[str] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls): - # type: (Type[VersionControl]) -> None - if not hasattr(cls, 'name'): - logger.warning('Cannot register VCS %s', cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls - logger.debug('Registered VCS backend: %s', cls.name) - - def unregister(self, cls=None, name=None): - # type: (Optional[Type[VersionControl]], Optional[str]) -> None - if name in self._registry: - del self._registry[name] - elif cls in self._registry.values(): - del self._registry[cls.name] - else: - logger.warning('Cannot unregister because no class or name given') - - def get_backend_type(self, location): - # type: (str) -> Optional[Type[VersionControl]] - """ - Return the type of the version control backend if found at given - location, e.g. vcs.get_backend_type('/path/to/vcs/checkout') - """ - for vc_type in self._registry.values(): - if vc_type.controls_location(location): - logger.debug('Determine that %s uses VCS: %s', - location, vc_type.name) - return vc_type - return None - - def get_backend(self, name): - # type: (str) -> Optional[Type[VersionControl]] - name = name.lower() - if name in self._registry: - return self._registry[name] - return None - - -vcs = VcsSupport() - - -class VersionControl(object): - name = '' - dirname = '' - repo_name = '' - # List of supported schemes for this Version Control - schemes = () # type: Tuple[str, ...] - # Iterable of environment variable names to pass to call_subprocess(). - unset_environ = () # type: Tuple[str, ...] - default_arg_rev = None # type: Optional[str] - - def __init__(self, url=None, *args, **kwargs): - self.url = url - super(VersionControl, self).__init__(*args, **kwargs) - - def get_base_rev_args(self, rev): - """ - Return the base revision arguments for a vcs command. - - Args: - rev: the name of a revision to install. Cannot be None. - """ - raise NotImplementedError - - def make_rev_options(self, rev=None, extra_args=None): - # type: (Optional[str], Optional[List[str]]) -> RevOptions - """ - Return a RevOptions object. - - Args: - rev: the name of a revision to install. - extra_args: a list of extra options. - """ - return RevOptions(self, rev, extra_args=extra_args) - - @classmethod - def _is_local_repository(cls, repo): - # type: (str) -> bool - """ - posix absolute paths start with os.path.sep, - win32 ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or bool(drive) - - def export(self, location): - """ - Export the repository at the url to the destination location - i.e. only download the files, without vcs informations - """ - raise NotImplementedError - - def get_netloc_and_auth(self, netloc, scheme): - """ - Parse the repository URL's netloc, and return the new netloc to use - along with auth information. - - Args: - netloc: the original repository URL netloc. - scheme: the repository URL's scheme without the vcs prefix. - - This is mainly for the Subversion class to override, so that auth - information can be provided via the --username and --password options - instead of through the URL. For other subclasses like Git without - such an option, auth information must stay in the URL. - - Returns: (netloc, (username, password)). - """ - return netloc, (None, None) - - def get_url_rev_and_auth(self, url): - # type: (str) -> Tuple[str, Optional[str], AuthInfo] - """ - Parse the repository URL to use, and return the URL, revision, - and auth info to use. - - Returns: (url, rev, (username, password)). - """ - scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) - if '+' not in scheme: - raise ValueError( - "Sorry, {!r} is a malformed VCS url. " - "The format is <vcs>+<protocol>://<url>, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) - ) - # Remove the vcs prefix. - scheme = scheme.split('+', 1)[1] - netloc, user_pass = self.get_netloc_and_auth(netloc, scheme) - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) - return url, rev, user_pass - - def make_rev_args(self, username, password): - """ - Return the RevOptions "extra arguments" to use in obtain(). - """ - return [] - - def get_url_rev_options(self, url): - # type: (str) -> Tuple[str, RevOptions] - """ - Return the URL and RevOptions object to use in obtain() and in - some cases export(), as a tuple (url, rev_options). - """ - url, rev, user_pass = self.get_url_rev_and_auth(url) - username, password = user_pass - extra_args = self.make_rev_args(username, password) - rev_options = self.make_rev_options(rev, extra_args=extra_args) - - return url, rev_options - - def normalize_url(self, url): - # type: (str) -> str - """ - Normalize a URL for comparison by unquoting it and removing any - trailing slash. - """ - return urllib_parse.unquote(url).rstrip('/') - - def compare_urls(self, url1, url2): - # type: (str, str) -> bool - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return (self.normalize_url(url1) == self.normalize_url(url2)) - - def fetch_new(self, dest, url, rev_options): - """ - Fetch a revision from a repository, in the case that this is the - first fetch from the repository. - - Args: - dest: the directory to fetch the repository to. - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def switch(self, dest, url, rev_options): - """ - Switch the repo at ``dest`` to point to ``URL``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def update(self, dest, url, rev_options): - """ - Update an already-existing repo to the given ``rev_options``. - - Args: - rev_options: a RevOptions object. - """ - raise NotImplementedError - - def is_commit_id_equal(self, dest, name): - """ - Return whether the id of the current commit equals the given name. - - Args: - dest: the repository directory. - name: a string name. - """ - raise NotImplementedError - - def obtain(self, dest): - # type: (str) -> None - """ - Install or update in editable mode the package represented by this - VersionControl object. - - Args: - dest: the repository directory in which to install or update. - """ - url, rev_options = self.get_url_rev_options(self.url) - - if not os.path.exists(dest): - self.fetch_new(dest, url, rev_options) - return - - rev_display = rev_options.to_display() - if self.is_repository_directory(dest): - existing_url = self.get_remote_url(dest) - if self.compare_urls(existing_url, url): - logger.debug( - '%s in %s exists, and has correct URL (%s)', - self.repo_name.title(), - display_path(dest), - url, - ) - if not self.is_commit_id_equal(dest, rev_options.rev): - logger.info( - 'Updating %s %s%s', - display_path(dest), - self.repo_name, - rev_display, - ) - self.update(dest, url, rev_options) - else: - logger.info('Skipping because already up-to-date.') - return - - logger.warning( - '%s %s in %s exists with URL %s', - self.name, - self.repo_name, - display_path(dest), - existing_url, - ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) - else: - logger.warning( - 'Directory %s already exists, and is not a %s %s.', - dest, - self.name, - self.repo_name, - ) - # https://github.com/python/mypy/issues/1174 - prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore - ('i', 'w', 'b')) - - logger.warning( - 'The plan is to install the %s repository %s', - self.name, - url, - ) - response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) - - if response == 'a': - sys.exit(-1) - - if response == 'w': - logger.warning('Deleting %s', display_path(dest)) - rmtree(dest) - self.fetch_new(dest, url, rev_options) - return - - if response == 'b': - dest_dir = backup_dir(dest) - logger.warning( - 'Backing up %s to %s', display_path(dest), dest_dir, - ) - shutil.move(dest, dest_dir) - self.fetch_new(dest, url, rev_options) - return - - # Do nothing if the response is "i". - if response == 's': - logger.info( - 'Switching %s %s to %s%s', - self.repo_name, - display_path(dest), - url, - rev_display, - ) - self.switch(dest, url, rev_options) - - def unpack(self, location): - # type: (str) -> None - """ - Clean up current location and download the url repository - (and vcs infos) into location - """ - if os.path.exists(location): - rmtree(location) - self.obtain(location) - - @classmethod - def get_src_requirement(cls, location, project_name): - """ - Return a string representing the requirement needed to - redownload the files currently present in location, something - like: - {repository_url}@{revision}#egg={project_name}-{version_identifier} - """ - raise NotImplementedError - - @classmethod - def get_remote_url(cls, location): - """ - Return the url used at location - - Raises RemoteNotFoundError if the repository does not have a remote - url configured. - """ - raise NotImplementedError - - @classmethod - def get_revision(cls, location): - """ - Return the current commit id of the files at the given location. - """ - raise NotImplementedError - - @classmethod - def run_command( - cls, - cmd, # type: List[str] - show_stdout=True, # type: bool - cwd=None, # type: Optional[str] - on_returncode='raise', # type: str - extra_ok_returncodes=None, # type: Optional[Iterable[int]] - command_desc=None, # type: Optional[str] - extra_environ=None, # type: Optional[Mapping[str, Any]] - spinner=None # type: Optional[SpinnerInterface] - ): - # type: (...) -> Optional[Text] - """ - Run a VCS subcommand - This is simply a wrapper around call_subprocess that adds the VCS - command name, and checks that the VCS is available - """ - cmd = [cls.name] + cmd - try: - return call_subprocess(cmd, show_stdout, cwd, - on_returncode=on_returncode, - extra_ok_returncodes=extra_ok_returncodes, - command_desc=command_desc, - extra_environ=extra_environ, - unset_environ=cls.unset_environ, - spinner=spinner) - except OSError as e: - # errno.ENOENT = no such file or directory - # In other words, the VCS executable isn't available - if e.errno == errno.ENOENT: - raise BadCommand( - 'Cannot find command %r - do you have ' - '%r installed and in your ' - 'PATH?' % (cls.name, cls.name)) - else: - raise # re-raise exception if a different error occurred - - @classmethod - def is_repository_directory(cls, path): - # type: (str) -> bool - """ - Return whether a directory path is a repository directory. - """ - logger.debug('Checking in %s for %s (%s)...', - path, cls.dirname, cls.name) - return os.path.exists(os.path.join(path, cls.dirname)) - - @classmethod - def controls_location(cls, location): - # type: (str) -> bool - """ - Check if a location is controlled by the vcs. - It is meant to be overridden to implement smarter detection - mechanisms for specific vcs. - - This can do more than is_repository_directory() alone. For example, - the Git override checks that Git is actually available. - """ - return cls.is_repository_directory(location) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/bazaar.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/bazaar.py index 4c6ac79d..94408c52 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/bazaar.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/bazaar.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -5,12 +8,17 @@ import os from pip._vendor.six.moves.urllib import parse as urllib_parse -from pip._internal.download import path_to_url -from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, rmtree, -) -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.misc import display_path, rmtree +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import VersionControl, vcs + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + logger = logging.getLogger(__name__) @@ -24,17 +32,19 @@ class Bazaar(VersionControl): 'bzr+lp', ) - def __init__(self, url=None, *args, **kwargs): - super(Bazaar, self).__init__(url, *args, **kwargs) + def __init__(self, *args, **kwargs): + super(Bazaar, self).__init__(*args, **kwargs) # This is only needed for python <2.7.5 # Register lp but do not expose as a scheme to support bzr+lp. if getattr(urllib_parse, 'uses_fragment', None): urllib_parse.uses_fragment.extend(['lp']) - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return ['-r', rev] - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """ Export the Bazaar repository at the url to the destination location """ @@ -42,15 +52,13 @@ class Bazaar(VersionControl): if os.path.exists(location): rmtree(location) - with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) - - self.run_command( - ['export', location], - cwd=temp_dir.path, show_stdout=False, - ) + url, rev_options = self.get_url_rev_options(url) + self.run_command( + make_command('export', location, url, rev_options.to_args()) + ) def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info( 'Checking out %s%s to %s', @@ -58,26 +66,32 @@ class Bazaar(VersionControl): rev_display, display_path(dest), ) - cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] + cmd_args = ( + make_command('branch', '-q', rev_options.to_args(), url, dest) + ) self.run_command(cmd_args) def switch(self, dest, url, rev_options): - self.run_command(['switch', url], cwd=dest) + # type: (str, HiddenText, RevOptions) -> None + self.run_command(make_command('switch', url), cwd=dest) def update(self, dest, url, rev_options): - cmd_args = ['pull', '-q'] + rev_options.to_args() + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command('pull', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) - def get_url_rev_and_auth(self, url): + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url) if url.startswith('ssh://'): url = 'bzr+' + url return url, rev, user_pass @classmethod def get_remote_url(cls, location): - urls = cls.run_command(['info'], show_stdout=False, cwd=location) + urls = cls.run_command(['info'], cwd=location) for line in urls.splitlines(): line = line.strip() for x in ('checkout of branch: ', @@ -92,21 +106,12 @@ class Bazaar(VersionControl): @classmethod def get_revision(cls, location): revision = cls.run_command( - ['revno'], show_stdout=False, cwd=location, + ['revno'], cwd=location, ) return revision.splitlines()[-1] @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo: - return None - if not repo.lower().startswith('bzr:'): - repo = 'bzr+' + repo - current_rev = cls.get_revision(location) - return make_vcs_requirement_url(repo, current_rev, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/git.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/git.py index dd2bd61e..a9c7fb66 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/git.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/git.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -8,13 +11,23 @@ from pip._vendor.packaging.version import parse as parse_version from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request -from pip._internal.exceptions import BadCommand -from pip._internal.utils.compat import samefile -from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, redact_password_from_url, -) +from pip._internal.exceptions import BadCommand, SubProcessError +from pip._internal.utils.misc import display_path, hide_url +from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import ( + RemoteNotFoundError, + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + urlsplit = urllib_parse.urlsplit urlunsplit = urllib_parse.urlunsplit @@ -23,7 +36,7 @@ urlunsplit = urllib_parse.urlunsplit logger = logging.getLogger(__name__) -HASH_REGEX = re.compile('[a-fA-F0-9]{40}') +HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') def looks_like_hash(sha): @@ -42,44 +55,42 @@ class Git(VersionControl): unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') default_arg_rev = 'HEAD' - def __init__(self, url=None, *args, **kwargs): - - # Works around an apparent Git bug - # (see https://article.gmane.org/gmane.comp.version-control.git/146500) - if url: - scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith('file'): - initial_slashes = path[:-len(path.lstrip('/'))] - newpath = ( - initial_slashes + - urllib_request.url2pathname(path) - .replace('\\', '/').lstrip('/') - ) - url = urlunsplit((scheme, netloc, newpath, query, fragment)) - after_plus = scheme.find('+') + 1 - url = scheme[:after_plus] + urlunsplit( - (scheme[after_plus:], netloc, newpath, query, fragment), - ) - - super(Git, self).__init__(url, *args, **kwargs) - - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return [rev] + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool( + self.get_revision_sha(dest, rev_options.rev)[0] + ) + return not is_tag_or_branch + def get_git_version(self): VERSION_PFX = 'git version ' - version = self.run_command(['version'], show_stdout=False) + version = self.run_command(['version']) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX):].split()[0] else: version = '' - # get first 3 positions of the git version becasue + # get first 3 positions of the git version because # on windows it is x.y.z.windows.t, and this parses as # LegacyVersion which always smaller than a Version. version = '.'.join(version.split('.')[:3]) return parse_version(version) - def get_current_branch(self, location): + @classmethod + def get_current_branch(cls, location): """ Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). @@ -89,8 +100,8 @@ class Git(VersionControl): # command to exit with status code 1 instead of 128 in this case # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] - output = self.run_command( - args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + output = cls.run_command( + args, extra_ok_returncodes=(1, ), cwd=location, ) ref = output.strip() @@ -99,19 +110,21 @@ class Git(VersionControl): return None - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" if not location.endswith('/'): location = location + '/' with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) + self.unpack(temp_dir.path, url=url) self.run_command( ['checkout-index', '-a', '-f', '--prefix', location], - show_stdout=False, cwd=temp_dir.path + cwd=temp_dir.path ) - def get_revision_sha(self, dest, rev): + @classmethod + def get_revision_sha(cls, dest, rev): """ Return (sha_or_none, is_branch), where sha_or_none is a commit hash if the revision names a remote branch or tag, otherwise None. @@ -121,8 +134,13 @@ class Git(VersionControl): rev: the revision name. """ # Pass rev to pre-filter the list. - output = self.run_command(['show-ref', rev], cwd=dest, - show_stdout=False, on_returncode='ignore') + + output = '' + try: + output = cls.run_command(['show-ref', rev], cwd=dest) + except SubProcessError: + pass + refs = {} for line in output.strip().splitlines(): try: @@ -145,7 +163,9 @@ class Git(VersionControl): return (sha, False) - def resolve_revision(self, dest, url, rev_options): + @classmethod + def resolve_revision(cls, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> RevOptions """ Resolve a revision to a new RevOptions object with the SHA1 of the branch, tag, or ref if found. @@ -154,7 +174,11 @@ class Git(VersionControl): rev_options: a RevOptions object. """ rev = rev_options.arg_rev - sha, is_branch = self.get_revision_sha(dest, rev) + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) if sha is not None: rev_options = rev_options.make_new(sha) @@ -174,17 +198,18 @@ class Git(VersionControl): return rev_options # If it looks like a ref, we have to fetch it explicitly. - self.run_command( - ['fetch', '-q', url] + rev_options.to_args(), + cls.run_command( + make_command('fetch', '-q', url, rev_options.to_args()), cwd=dest, ) # Change the revision to the SHA of the ref we fetched - sha = self.get_revision(dest, rev='FETCH_HEAD') + sha = cls.get_revision(dest, rev='FETCH_HEAD') rev_options = rev_options.make_new(sha) return rev_options - def is_commit_id_equal(self, dest, name): + @classmethod + def is_commit_id_equal(cls, dest, name): """ Return whether the current commit hash equals the given name. @@ -196,15 +221,13 @@ class Git(VersionControl): # Then avoid an unnecessary subprocess call. return False - return self.get_revision(dest) == name + return cls.get_revision(dest) == name def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() - logger.info( - 'Cloning %s%s to %s', redact_password_from_url(url), - rev_display, display_path(dest), - ) - self.run_command(['clone', '-q', url, dest]) + logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) + self.run_command(make_command('clone', '-q', url, dest)) if rev_options.rev: # Then a specific revision was requested. @@ -214,7 +237,9 @@ class Git(VersionControl): # Only do a checkout if the current commit id doesn't match # the requested revision. if not self.is_commit_id_equal(dest, rev_options.rev): - cmd_args = ['checkout', '-q'] + rev_options.to_args() + cmd_args = make_command( + 'checkout', '-q', rev_options.to_args(), + ) self.run_command(cmd_args, cwd=dest) elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch @@ -229,13 +254,18 @@ class Git(VersionControl): self.update_submodules(dest) def switch(self, dest, url, rev_options): - self.run_command(['config', 'remote.origin.url', url], cwd=dest) - cmd_args = ['checkout', '-q'] + rev_options.to_args() + # type: (str, HiddenText, RevOptions) -> None + self.run_command( + make_command('config', 'remote.origin.url', url), + cwd=dest, + ) + cmd_args = make_command('checkout', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) self.update_submodules(dest) def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote if self.get_git_version() >= parse_version('1.9.0'): # fetch tags in addition to everything else @@ -244,7 +274,7 @@ class Git(VersionControl): self.run_command(['fetch', '-q'], cwd=dest) # Then reset to wanted revision (maybe even origin/master) rev_options = self.resolve_revision(dest, url, rev_options) - cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() + cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) #: update submodules self.update_submodules(dest) @@ -261,7 +291,7 @@ class Git(VersionControl): # exits with return code 1 if there are no matching lines. stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], - extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + extra_ok_returncodes=(1, ), cwd=location, ) remotes = stdout.splitlines() try: @@ -281,89 +311,87 @@ class Git(VersionControl): if rev is None: rev = 'HEAD' current_rev = cls.run_command( - ['rev-parse', rev], show_stdout=False, cwd=location, + ['rev-parse', rev], cwd=location, ) return current_rev.strip() @classmethod - def _get_subdirectory(cls, location): - """Return the relative path of setup.py to the git repo root.""" + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ # find the repo root - git_dir = cls.run_command(['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() + git_dir = cls.run_command( + ['rev-parse', '--git-dir'], + cwd=location).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) - root_dir = os.path.join(git_dir, '..') - # find setup.py - orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without - # finding setup.py - logger.warning( - "Could not find setup.py for directory %s (tried all " - "parent directories)", - orig_location, - ) - return None - # relative path of setup.py to repo root - if samefile(root_dir, location): - return None - return os.path.relpath(location, root_dir) + repo_root = os.path.abspath(os.path.join(git_dir, '..')) + return find_path_to_setup_from_repo_root(location, repo_root) @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo.lower().startswith('git:'): - repo = 'git+' + repo - current_rev = cls.get_revision(location) - subdir = cls._get_subdirectory(location) - req = make_vcs_requirement_url(repo, current_rev, project_name, - subdir=subdir) - - return req - - def get_url_rev_and_auth(self, url): + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + if '://' not in url: assert 'file:' not in url url = url.replace('git+', 'git+ssh://') - url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) url = url.replace('ssh://', '') else: - url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) return url, rev, user_pass - def update_submodules(self, location): + @classmethod + def update_submodules(cls, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return - self.run_command( + cls.run_command( ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location, ) @classmethod - def controls_location(cls, location): - if super(Git, cls).controls_location(location): - return True + def get_repository_root(cls, location): + loc = super(Git, cls).get_repository_root(location) + if loc: + return loc try: - r = cls.run_command(['rev-parse'], - cwd=location, - show_stdout=False, - on_returncode='ignore') - return not r + r = cls.run_command( + ['rev-parse', '--show-toplevel'], + cwd=location, + log_failed_cmd=False, + ) except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) - return False + return None + except SubProcessError: + return None + return os.path.normpath(r.rstrip('\r\n')) vcs.register(Git) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/mercurial.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/mercurial.py index 26e75dee..69763fea 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/mercurial.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/mercurial.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -5,10 +8,22 @@ import os from pip._vendor.six.moves import configparser -from pip._internal.download import path_to_url -from pip._internal.utils.misc import display_path, make_vcs_requirement_url +from pip._internal.exceptions import BadCommand, SubProcessError +from pip._internal.utils.misc import display_path +from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import RevOptions + logger = logging.getLogger(__name__) @@ -17,21 +32,26 @@ class Mercurial(VersionControl): name = 'hg' dirname = '.hg' repo_name = 'clone' - schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + schemes = ( + 'hg', 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http', + ) - def get_base_rev_args(self, rev): + @staticmethod + def get_base_rev_args(rev): return [rev] - def export(self, location): + def export(self, location, url): + # type: (str, HiddenText) -> None """Export the Hg repository at the url to the destination location""" with TempDirectory(kind="export") as temp_dir: - self.unpack(temp_dir.path) + self.unpack(temp_dir.path, url=url) self.run_command( - ['archive', location], show_stdout=False, cwd=temp_dir.path + ['archive', location], cwd=temp_dir.path ) def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info( 'Cloning hg %s%s to %s', @@ -39,16 +59,19 @@ class Mercurial(VersionControl): rev_display, display_path(dest), ) - self.run_command(['clone', '--noupdate', '-q', url, dest]) - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) + self.run_command(make_command('clone', '--noupdate', '-q', url, dest)) + self.run_command( + make_command('update', '-q', rev_options.to_args()), + cwd=dest, + ) def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None repo_config = os.path.join(dest, self.dirname, 'hgrc') - config = configparser.SafeConfigParser() + config = configparser.RawConfigParser() try: config.read(repo_config) - config.set('paths', 'default', url) + config.set('paths', 'default', url.secret) with open(repo_config, 'w') as config_file: config.write(config_file) except (OSError, configparser.NoSectionError) as exc: @@ -56,48 +79,80 @@ class Mercurial(VersionControl): 'Could not switch Mercurial repository to %s: %s', url, exc, ) else: - cmd_args = ['update', '-q'] + rev_options.to_args() + cmd_args = make_command('update', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None self.run_command(['pull', '-q'], cwd=dest) - cmd_args = ['update', '-q'] + rev_options.to_args() + cmd_args = make_command('update', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) @classmethod def get_remote_url(cls, location): url = cls.run_command( ['showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() if cls._is_local_repository(url): url = path_to_url(url) return url.strip() @classmethod def get_revision(cls, location): + """ + Return the repository-local changeset revision number, as an integer. + """ current_revision = cls.run_command( - ['parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() + ['parents', '--template={rev}'], cwd=location).strip() return current_revision @classmethod - def get_revision_hash(cls, location): + def get_requirement_revision(cls, location): + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ current_rev_hash = cls.run_command( ['parents', '--template={node}'], - show_stdout=False, cwd=location).strip() + cwd=location).strip() return current_rev_hash @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if not repo.lower().startswith('hg:'): - repo = 'hg+' + repo - current_rev_hash = cls.get_revision_hash(location) - return make_vcs_requirement_url(repo, current_rev_hash, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False + @classmethod + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ['root'], cwd=location).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location): + loc = super(Mercurial, cls).get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ['root'], + cwd=location, + log_failed_cmd=False, + ) + except BadCommand: + logger.debug("could not determine if %s is under hg control " + "because hg is not available", location) + return None + except SubProcessError: + return None + return os.path.normpath(r.rstrip('\r\n')) + vcs.register(Mercurial) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/subversion.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/subversion.py index 42ac5ac3..14825f79 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/subversion.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/subversion.py @@ -1,3 +1,6 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + from __future__ import absolute_import import logging @@ -6,9 +9,14 @@ import re from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( - display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc, + display_path, + is_console_interactive, + rmtree, + split_auth_from_netloc, ) -from pip._internal.vcs import VersionControl, vcs +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import VersionControl, vcs _svn_xml_url_re = re.compile('url="([^"]+)"') _svn_rev_re = re.compile(r'committed-rev="(\d+)"') @@ -16,6 +24,13 @@ _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') _svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.subprocess import CommandArgs + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + + logger = logging.getLogger(__name__) @@ -25,40 +40,13 @@ class Subversion(VersionControl): repo_name = 'checkout' schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') - def get_base_rev_args(self, rev): - return ['-r', rev] - - def export(self, location): - """Export the svn repository at the url to the destination location""" - url, rev_options = self.get_url_rev_options(self.url) - - logger.info('Exporting svn repository %s to %s', url, location) - with indent_log(): - if os.path.exists(location): - # Subversion doesn't like to check out over an existing - # directory --force fixes this, but was only added in svn 1.5 - rmtree(location) - cmd_args = ['export'] + rev_options.to_args() + [url, location] - self.run_command(cmd_args, show_stdout=False) - - def fetch_new(self, dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Checking out %s%s to %s', - url, - rev_display, - display_path(dest), - ) - cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) - - def switch(self, dest, url, rev_options): - cmd_args = ['switch'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + return True - def update(self, dest, url, rev_options): - cmd_args = ['update'] + rev_options.to_args() + [dest] - self.run_command(cmd_args) + @staticmethod + def get_base_rev_args(rev): + return ['-r', rev] @classmethod def get_revision(cls, location): @@ -68,7 +56,7 @@ class Subversion(VersionControl): # Note: taken from setuptools.command.egg_info revision = 0 - for base, dirs, files in os.walk(location): + for base, dirs, _ in os.walk(location): if cls.dirname not in dirs: dirs[:] = [] continue # no sense walking uncontrolled subdirs @@ -88,7 +76,8 @@ class Subversion(VersionControl): revision = max(revision, localrev) return revision - def get_netloc_and_auth(self, netloc, scheme): + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): """ This override allows the auth information to be passed to svn via the --username and --password options instead of via the URL. @@ -96,20 +85,23 @@ class Subversion(VersionControl): if scheme == 'ssh': # The --username and --password options can't be used for # svn+ssh URLs, so keep the auth information in the URL. - return super(Subversion, self).get_netloc_and_auth( - netloc, scheme) + return super(Subversion, cls).get_netloc_and_auth(netloc, scheme) return split_auth_from_netloc(netloc) - def get_url_rev_and_auth(self, url): + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url) + url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url) if url.startswith('ssh://'): url = 'svn+' + url return url, rev, user_pass - def make_rev_args(self, username, password): - extra_args = [] + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + extra_args = [] # type: CommandArgs if username: extra_args += ['--username', username] if password: @@ -140,7 +132,7 @@ class Subversion(VersionControl): @classmethod def _get_svn_url_rev(cls, location): - from pip._internal.exceptions import InstallationError + from pip._internal.exceptions import SubProcessError entries_path = os.path.join(location, cls.dirname, 'entries') if os.path.exists(entries_path): @@ -159,21 +151,26 @@ class Subversion(VersionControl): elif data.startswith('<?xml'): match = _svn_xml_url_re.search(data) if not match: - raise ValueError('Badly formatted data: %r' % data) + raise ValueError( + 'Badly formatted data: {data!r}'.format(**locals())) url = match.group(1) # get repository URL revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] else: try: # subversion >= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. xml = cls.run_command( ['info', '--xml', location], - show_stdout=False, ) url = _svn_info_xml_url_re.search(xml).group(1) revs = [ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) ] - except InstallationError: + except SubProcessError: url, revs = None, [] if revs: @@ -184,17 +181,154 @@ class Subversion(VersionControl): return url, rev @classmethod - def get_src_requirement(cls, location, project_name): - repo = cls.get_remote_url(location) - if repo is None: - return None - repo = 'svn+' + repo - rev = cls.get_revision(location) - return make_vcs_requirement_url(repo, rev, project_name) - - def is_commit_id_equal(self, dest, name): + def is_commit_id_equal(cls, dest, name): """Always assume the versions don't match""" return False + def __init__(self, use_interactive=None): + # type: (bool) -> None + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version = None # type: Optional[Tuple[int, ...]] + + super(Subversion, self).__init__() + + def call_vcs_version(self): + # type: () -> Tuple[int, ...] + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + version_prefix = 'svn, version ' + version = self.run_command(['--version']) + + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix):].split()[0] + version_list = version.split('.') + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self): + # type: () -> Tuple[int, ...] + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self): + # type: () -> CommandArgs + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - export + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ['--non-interactive'] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ['--force-interactive'] + + return [] + + def export(self, location, url): + # type: (str, HiddenText) -> None + """Export the svn repository at the url to the destination location""" + url, rev_options = self.get_url_rev_options(url) + + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + cmd_args = make_command( + 'export', self.get_remote_call_options(), + rev_options.to_args(), url, location, + ) + self.run_command(cmd_args) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = make_command( + 'checkout', '-q', self.get_remote_call_options(), + rev_options.to_args(), url, dest, + ) + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'switch', self.get_remote_call_options(), rev_options.to_args(), + url, dest, + ) + self.run_command(cmd_args) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'update', self.get_remote_call_options(), rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + vcs.register(Subversion) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/versioncontrol.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 00000000..96f830f9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,811 @@ +"""Handles all VCS (version control) support""" + +from __future__ import absolute_import + +import errno +import logging +import os +import shutil +import subprocess +import sys + +from pip._vendor import pkg_resources +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.exceptions import ( + BadCommand, + InstallationError, + SubProcessError, +) +from pip._internal.utils.compat import console_to_str, samefile +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + rmtree, +) +from pip._internal.utils.subprocess import ( + format_command_args, + make_command, + make_subprocess_output_error, + reveal_command_args, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme + +if MYPY_CHECK_RUNNING: + from typing import ( + Dict, Iterable, Iterator, List, Optional, Text, Tuple, + Type, Union, Mapping, Any + ) + from pip._internal.utils.misc import HiddenText + from pip._internal.utils.subprocess import CommandArgs + + AuthInfo = Tuple[Optional[str], Optional[str]] + + +__all__ = ['vcs'] + + +logger = logging.getLogger(__name__) + + +def is_url(name): + # type: (Union[str, Text]) -> bool + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + # type: (str, str, str, Optional[str]) -> str + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = pkg_resources.to_filename(project_name) + req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) + if subdir: + req += '&subdirectory={}'.format(subdir) + + return req + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + log_failed_cmd: if false, failed commands are not logged, + only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + + # log the subprocess output at DEBUG level. + log_subprocess = subprocess_logger.debug + + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + # Whether the subprocess will be visible in the console. + showing_subprocess = True + + command_desc = format_command_args(cmd) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd + ) + if proc.stdin: + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = None + if proc.stdout: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if proc_had_error: + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise SubProcessError(exc_msg) + return ''.join(all_output) + + +def find_path_to_setup_from_repo_root(location, repo_root): + # type: (str, str) -> Optional[str] + """ + Find the path to `setup.py` by searching up the filesystem from `location`. + Return the path to `setup.py` relative to `repo_root`. + Return None if `setup.py` is in `repo_root` or cannot be found. + """ + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RevOptions(object): + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class, # type: Type[VersionControl] + rev=None, # type: Optional[str] + extra_args=None, # type: Optional[CommandArgs] + ): + # type: (...) -> None + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name = None # type: Optional[str] + + def __repr__(self): + # type: () -> str + return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev) + + @property + def arg_rev(self): + # type: () -> Optional[str] + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self): + # type: () -> CommandArgs + """ + Return the VCS-specific command arguments. + """ + args = [] # type: CommandArgs + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self): + # type: () -> str + if not self.rev: + return '' + + return ' (to revision {})'.format(self.rev) + + def make_new(self, rev): + # type: (str) -> RevOptions + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport(object): + _registry = {} # type: Dict[str, VersionControl] + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # type: () -> None + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + # type: () -> Iterator[str] + return self._registry.__iter__() + + @property + def backends(self): + # type: () -> List[VersionControl] + return list(self._registry.values()) + + @property + def dirnames(self): + # type: () -> List[str] + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + # type: () -> List[str] + schemes = [] # type: List[str] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + # type: (Type[VersionControl]) -> None + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, name): + # type: (str) -> None + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug('Determine that %s uses VCS: %s', + location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + repo_name = '' + # List of supported schemes for this Version Control + schemes = () # type: Tuple[str, ...] + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ = () # type: Tuple[str, ...] + default_arg_rev = None # type: Optional[str] + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith('{}:'.format(cls.name)) + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir): + # type: (str) -> str + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir, project_name): + # type: (str, str) -> Optional[str] + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + if repo_url is None: + return None + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = '{}+{}'.format(cls.name, repo_url) + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, + subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options(cls, rev=None, extra_args=None): + # type: (Optional[str], Optional[CommandArgs]) -> RevOptions + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo): + # type: (str) -> bool + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + def export(self, location, url): + # type: (str, HiddenText) -> None + """ + Export the repository at the url to the destination location + i.e. only download the files, without vcs informations + + :param url: the repository URL starting with a vcs prefix. + """ + raise NotImplementedError + + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) + if '+' not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split('+', 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + if not rev: + raise InstallationError( + "The URL {!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL.".format(url) + ) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev, user_pass + + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url): + # type: (HiddenText) -> Tuple[HiddenText, RevOptions] + """ + Return the URL and RevOptions object to use in obtain() and in + some cases export(), as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password = None # type: Optional[HiddenText] + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url): + # type: (str) -> str + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib_parse.unquote(url).rstrip('/') + + @classmethod + def compare_urls(cls, url1, url2): + # type: (str, str) -> bool + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (cls.normalize_url(url1) == cls.normalize_url(url2)) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest, url): + # type: (str, HiddenText) -> None + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info('Skipping because already up-to-date.') + return + + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore + ('i', 'w', 'b')) + + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? {}'.format( + prompt[0]), prompt[1]) + + if response == 'a': + sys.exit(-1) + + if response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location, url): + # type: (str, HiddenText) -> None + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd, # type: Union[List[str], CommandArgs] + cwd=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + log_failed_cmd=True # type: bool + ): + # type: (...) -> Text + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + try: + return call_subprocess(cmd, cwd, + extra_environ=extra_environ, + extra_ok_returncodes=extra_ok_returncodes, + log_failed_cmd=log_failed_cmd) + except OSError as e: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + if e.errno == errno.ENOENT: + raise BadCommand( + 'Cannot find command {cls.name!r} - do you have ' + '{cls.name!r} installed and in your ' + 'PATH?'.format(**locals())) + else: + raise # re-raise exception if a different error occurred + + @classmethod + def is_repository_directory(cls, path): + # type: (str) -> bool + """ + Return whether a directory path is a repository directory. + """ + logger.debug('Checking in %s for %s (%s)...', + path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location): + # type: (str) -> Optional[str] + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel.py deleted file mode 100644 index 67bcc7f7..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel.py +++ /dev/null @@ -1,1095 +0,0 @@ -""" -Support for installing and building the "wheel" binary package format. -""" -from __future__ import absolute_import - -import collections -import compileall -import csv -import hashlib -import logging -import os.path -import re -import shutil -import stat -import sys -import warnings -from base64 import urlsafe_b64encode -from email.parser import Parser - -from pip._vendor import pkg_resources -from pip._vendor.distlib.scripts import ScriptMaker -from pip._vendor.packaging.utils import canonicalize_name -from pip._vendor.six import StringIO - -from pip._internal import pep425tags -from pip._internal.download import path_to_url, unpack_url -from pip._internal.exceptions import ( - InstallationError, InvalidWheelFilename, UnsupportedWheel, -) -from pip._internal.locations import ( - PIP_DELETE_MARKER_FILENAME, distutils_scheme, -) -from pip._internal.models.link import Link -from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ( - call_subprocess, captured_stdout, ensure_dir, read_chunks, -) -from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM -from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.utils.typing import MYPY_CHECK_RUNNING -from pip._internal.utils.ui import open_spinner - -if MYPY_CHECK_RUNNING: - from typing import ( # noqa: F401 - Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, - Union, Iterable - ) - from pip._vendor.packaging.requirements import Requirement # noqa: F401 - from pip._internal.req.req_install import InstallRequirement # noqa: F401 - from pip._internal.download import PipSession # noqa: F401 - from pip._internal.index import FormatControl, PackageFinder # noqa: F401 - from pip._internal.operations.prepare import ( # noqa: F401 - RequirementPreparer - ) - from pip._internal.cache import WheelCache # noqa: F401 - from pip._internal.pep425tags import Pep425Tag # noqa: F401 - - InstalledCSVRow = Tuple[str, ...] - - -VERSION_COMPATIBLE = (1, 0) - - -logger = logging.getLogger(__name__) - - -def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') - - -def rehash(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[str, str] - """Return (hash, length) for path using hashlib.sha256()""" - h = hashlib.sha256() - length = 0 - with open(path, 'rb') as f: - for block in read_chunks(f, size=blocksize): - length += len(block) - h.update(block) - digest = 'sha256=' + urlsafe_b64encode( - h.digest() - ).decode('latin1').rstrip('=') - # unicode/str python2 issues - return (digest, str(length)) # type: ignore - - -def open_for_csv(name, mode): - # type: (str, Text) -> IO - if sys.version_info[0] < 3: - nl = {} # type: Dict[str, Any] - bin = 'b' - else: - nl = {'newline': ''} # type: Dict[str, Any] - bin = '' - return open(name, mode + bin, **nl) - - -def replace_python_tag(wheelname, new_tag): - # type: (str, str) -> str - """Replace the Python tag in a wheel file name with a new value. - """ - parts = wheelname.split('-') - parts[-3] = new_tag - return '-'.join(parts) - - -def fix_script(path): - # type: (str) -> Optional[bool] - """Replace #!python with #!/path/to/python - Return True if file was changed.""" - # XXX RECORD hashes will need to be updated - if os.path.isfile(path): - with open(path, 'rb') as script: - firstline = script.readline() - if not firstline.startswith(b'#!python'): - return False - exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, 'wb') as script: - script.write(firstline) - script.write(rest) - return True - return None - - -dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) - \.dist-info$""", re.VERBOSE) - - -def root_is_purelib(name, wheeldir): - # type: (str, str) -> bool - """ - Return True if the extracted wheel in wheeldir should go into purelib. - """ - name_folded = name.replace("-", "_") - for item in os.listdir(wheeldir): - match = dist_info_re.match(item) - if match and match.group('name') == name_folded: - with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: - for line in wheel: - line = line.lower().rstrip() - if line == "root-is-purelib: true": - return True - return False - - -def get_entrypoints(filename): - # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] - if not os.path.exists(filename): - return {}, {} - - # This is done because you can pass a string to entry_points wrappers which - # means that they may or may not be valid INI files. The attempt here is to - # strip leading and trailing whitespace in order to make them valid INI - # files. - with open(filename) as fp: - data = StringIO() - for line in fp: - data.write(line.strip()) - data.write("\n") - data.seek(0) - - # get the entry points and then the script names - entry_points = pkg_resources.EntryPoint.parse_map(data) - console = entry_points.get('console_scripts', {}) - gui = entry_points.get('gui_scripts', {}) - - def _split_ep(s): - """get the string representation of EntryPoint, remove space and split - on '='""" - return str(s).replace(" ", "").split("=") - - # convert the EntryPoint objects into strings with module:function - console = dict(_split_ep(v) for v in console.values()) - gui = dict(_split_ep(v) for v in gui.values()) - return console, gui - - -def message_about_scripts_not_on_PATH(scripts): - # type: (Sequence[str]) -> Optional[str] - """Determine if any scripts are not on PATH and format a warning. - - Returns a warning message if one or more scripts are not on PATH, - otherwise None. - """ - if not scripts: - return None - - # Group scripts by the path they were installed in - grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] - for destfile in scripts: - parent_dir = os.path.dirname(destfile) - script_name = os.path.basename(destfile) - grouped_by_dir[parent_dir].add(script_name) - - # We don't want to warn for directories that are on PATH. - not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) for i in - os.environ.get("PATH", "").split(os.pathsep) - ] - # If an executable sits with sys.executable, we don't warn for it. - # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) - warn_for = { - parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs - } - if not warn_for: - return None - - # Format a message - msg_lines = [] - for parent_dir, scripts in warn_for.items(): - scripts = sorted(scripts) - if len(scripts) == 1: - start_text = "script {} is".format(scripts[0]) - else: - start_text = "scripts {} are".format( - ", ".join(scripts[:-1]) + " and " + scripts[-1] - ) - - msg_lines.append( - "The {} installed in '{}' which is not on PATH." - .format(start_text, parent_dir) - ) - - last_line_fmt = ( - "Consider adding {} to PATH or, if you prefer " - "to suppress this warning, use --no-warn-script-location." - ) - if len(msg_lines) == 1: - msg_lines.append(last_line_fmt.format("this directory")) - else: - msg_lines.append(last_line_fmt.format("these directories")) - - # Returns the formatted multiline message - return "\n".join(msg_lines) - - -def sorted_outrows(outrows): - # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] - """ - Return the given rows of a RECORD file in sorted order. - - Each row is a 3-tuple (path, hash, size) and corresponds to a record of - a RECORD file (see PEP 376 and PEP 427 for details). For the rows - passed to this function, the size can be an integer as an int or string, - or the empty string. - """ - # Normally, there should only be one row per path, in which case the - # second and third elements don't come into play when sorting. - # However, in cases in the wild where a path might happen to occur twice, - # we don't want the sort operation to trigger an error (but still want - # determinism). Since the third element can be an int or string, we - # coerce each element to a string to avoid a TypeError in this case. - # For additional background, see-- - # https://github.com/pypa/pip/issues/5868 - return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) - - -def get_csv_rows_for_installed( - old_csv_rows, # type: Iterable[List[str]] - installed, # type: Dict[str, str] - changed, # type: set - generated, # type: List[str] - lib_dir, # type: str -): - # type: (...) -> List[InstalledCSVRow] - """ - :param installed: A map from archive RECORD path to installation RECORD - path. - """ - installed_rows = [] # type: List[InstalledCSVRow] - for row in old_csv_rows: - if len(row) > 3: - logger.warning( - 'RECORD line has more than three elements: {}'.format(row) - ) - # Make a copy because we are mutating the row. - row = list(row) - old_path = row[0] - new_path = installed.pop(old_path, old_path) - row[0] = new_path - if new_path in changed: - digest, length = rehash(new_path) - row[1] = digest - row[2] = length - installed_rows.append(tuple(row)) - for f in generated: - digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) - return installed_rows - - -def move_wheel_files( - name, # type: str - req, # type: Requirement - wheeldir, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - pycompile=True, # type: bool - scheme=None, # type: Optional[Mapping[str, str]] - isolated=False, # type: bool - prefix=None, # type: Optional[str] - warn_script_location=True # type: bool -): - # type: (...) -> None - """Install a wheel""" - # TODO: Investigate and break this up. - # TODO: Look into moving this into a dedicated class for representing an - # installation. - - if not scheme: - scheme = distutils_scheme( - name, user=user, home=home, root=root, isolated=isolated, - prefix=prefix, - ) - - if root_is_purelib(name, wheeldir): - lib_dir = scheme['purelib'] - else: - lib_dir = scheme['platlib'] - - info_dir = [] # type: List[str] - data_dirs = [] - source = wheeldir.rstrip(os.path.sep) + os.path.sep - - # Record details of the files moved - # installed = files copied from the wheel to the destination - # changed = files changed while installing (scripts #! line typically) - # generated = files newly generated during the install (script wrappers) - installed = {} # type: Dict[str, str] - changed = set() - generated = [] # type: List[str] - - # Compile all of the pyc files that we're going to be installing - if pycompile: - with captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore') - compileall.compile_dir(source, force=True, quiet=True) - logger.debug(stdout.getvalue()) - - def record_installed(srcfile, destfile, modified=False): - """Map archive RECORD paths to installation RECORD paths.""" - oldpath = normpath(srcfile, wheeldir) - newpath = normpath(destfile, lib_dir) - installed[oldpath] = newpath - if modified: - changed.add(destfile) - - def clobber(source, dest, is_base, fixer=None, filter=None): - ensure_dir(dest) # common for the 'include' path - - for dir, subdirs, files in os.walk(source): - basedir = dir[len(source):].lstrip(os.path.sep) - destdir = os.path.join(dest, basedir) - if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): - continue - for s in subdirs: - destsubdir = os.path.join(dest, basedir, s) - if is_base and basedir == '' and destsubdir.endswith('.data'): - data_dirs.append(s) - continue - elif (is_base and - s.endswith('.dist-info') and - canonicalize_name(s).startswith( - canonicalize_name(req.name))): - assert not info_dir, ('Multiple .dist-info directories: ' + - destsubdir + ', ' + - ', '.join(info_dir)) - info_dir.append(destsubdir) - for f in files: - # Skip unwanted files - if filter and filter(f): - continue - srcfile = os.path.join(dir, f) - destfile = os.path.join(dest, basedir, f) - # directory creation is lazy and after the file filtering above - # to ensure we don't install empty dirs; empty dirs can't be - # uninstalled. - ensure_dir(destdir) - - # copyfile (called below) truncates the destination if it - # exists and then writes the new contents. This is fine in most - # cases, but can cause a segfault if pip has loaded a shared - # object (e.g. from pyopenssl through its vendored urllib3) - # Since the shared object is mmap'd an attempt to call a - # symbol in it will then cause a segfault. Unlinking the file - # allows writing of new contents while allowing the process to - # continue to use the old copy. - if os.path.exists(destfile): - os.unlink(destfile) - - # We use copyfile (not move, copy, or copy2) to be extra sure - # that we are not moving directories over (copyfile fails for - # directories) as well as to ensure that we are not copying - # over any metadata because we want more control over what - # metadata we actually copy over. - shutil.copyfile(srcfile, destfile) - - # Copy over the metadata for the file, currently this only - # includes the atime and mtime. - st = os.stat(srcfile) - if hasattr(os, "utime"): - os.utime(destfile, (st.st_atime, st.st_mtime)) - - # If our file is executable, then make our destination file - # executable. - if os.access(srcfile, os.X_OK): - st = os.stat(srcfile) - permissions = ( - st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - ) - os.chmod(destfile, permissions) - - changed = False - if fixer: - changed = fixer(destfile) - record_installed(srcfile, destfile, changed) - - clobber(source, lib_dir, True) - - assert info_dir, "%s .dist-info directory not found" % req - - # Get the defined entry points - ep_file = os.path.join(info_dir[0], 'entry_points.txt') - console, gui = get_entrypoints(ep_file) - - def is_entrypoint_wrapper(name): - # EP, EP.exe and EP-script.py are scripts generated for - # entry point EP by setuptools - if name.lower().endswith('.exe'): - matchname = name[:-4] - elif name.lower().endswith('-script.py'): - matchname = name[:-10] - elif name.lower().endswith(".pya"): - matchname = name[:-4] - else: - matchname = name - # Ignore setuptools-generated scripts - return (matchname in console or matchname in gui) - - for datadir in data_dirs: - fixer = None - filter = None - for subdir in os.listdir(os.path.join(wheeldir, datadir)): - fixer = None - if subdir == 'scripts': - fixer = fix_script - filter = is_entrypoint_wrapper - source = os.path.join(wheeldir, datadir, subdir) - dest = scheme[subdir] - clobber(source, dest, False, fixer=fixer, filter=filter) - - maker = ScriptMaker(None, scheme['scripts']) - - # Ensure old scripts are overwritten. - # See https://github.com/pypa/pip/issues/1800 - maker.clobber = True - - # Ensure we don't generate any variants for scripts because this is almost - # never what somebody wants. - # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {''} - - # This is required because otherwise distlib creates scripts that are not - # executable. - # See https://bitbucket.org/pypa/distlib/issue/32/ - maker.set_mode = True - - # Simplify the script and fix the fact that the default script swallows - # every single stack trace. - # See https://bitbucket.org/pypa/distlib/issue/34/ - # See https://bitbucket.org/pypa/distlib/issue/33/ - def _get_script_text(entry): - if entry.suffix is None: - raise InstallationError( - "Invalid script entry point: %s for req: %s - A callable " - "suffix is required. Cf https://packaging.python.org/en/" - "latest/distributing.html#console-scripts for more " - "information." % (entry, req) - ) - return maker.script_template % { - "module": entry.prefix, - "import_name": entry.suffix.split(".")[0], - "func": entry.suffix, - } - # ignore type, because mypy disallows assigning to a method, - # see https://github.com/python/mypy/issues/2427 - maker._get_script_text = _get_script_text # type: ignore - maker.script_template = r"""# -*- coding: utf-8 -*- -import re -import sys - -from %(module)s import %(import_name)s - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(%(func)s()) -""" - - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop('pip', None) - if pip_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'pip = ' + pip_script - generated.extend(maker.make(spec)) - - if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - spec = 'pip%s = %s' % (sys.version[:1], pip_script) - generated.extend(maker.make(spec)) - - spec = 'pip%s = %s' % (sys.version[:3], pip_script) - generated.extend(maker.make(spec)) - # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] - for k in pip_ep: - del console[k] - easy_install_script = console.pop('easy_install', None) - if easy_install_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - spec = 'easy_install = ' + easy_install_script - generated.extend(maker.make(spec)) - - spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) - generated.extend(maker.make(spec)) - # Delete any other versioned easy_install entry points - easy_install_ep = [ - k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) - ] - for k in easy_install_ep: - del console[k] - - # Generate the console and GUI entry points specified in the wheel - if len(console) > 0: - generated_console_scripts = maker.make_multiple( - ['%s = %s' % kv for kv in console.items()] - ) - generated.extend(generated_console_scripts) - - if warn_script_location: - msg = message_about_scripts_not_on_PATH(generated_console_scripts) - if msg is not None: - logger.warning(msg) - - if len(gui) > 0: - generated.extend( - maker.make_multiple( - ['%s = %s' % kv for kv in gui.items()], - {'gui': True} - ) - ) - - # Record pip as the installer - installer = os.path.join(info_dir[0], 'INSTALLER') - temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') - with open(temp_installer, 'wb') as installer_file: - installer_file.write(b'pip\n') - shutil.move(temp_installer, installer) - generated.append(installer) - - # Record details of all files installed - record = os.path.join(info_dir[0], 'RECORD') - temp_record = os.path.join(info_dir[0], 'RECORD.pip') - with open_for_csv(record, 'r') as record_in: - with open_for_csv(temp_record, 'w+') as record_out: - reader = csv.reader(record_in) - outrows = get_csv_rows_for_installed( - reader, installed=installed, changed=changed, - generated=generated, lib_dir=lib_dir, - ) - writer = csv.writer(record_out) - # Sort to simplify testing. - for row in sorted_outrows(outrows): - writer.writerow(row) - shutil.move(temp_record, record) - - -def wheel_version(source_dir): - # type: (Optional[str]) -> Optional[Tuple[int, ...]] - """ - Return the Wheel-Version of an extracted wheel, if possible. - - Otherwise, return None if we couldn't parse / extract it. - """ - try: - dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] - - wheel_data = dist.get_metadata('WHEEL') - wheel_data = Parser().parsestr(wheel_data) - - version = wheel_data['Wheel-Version'].strip() - version = tuple(map(int, version.split('.'))) - return version - except Exception: - return None - - -def check_compatibility(version, name): - # type: (Optional[Tuple[int, ...]], str) -> None - """ - Raises errors or warns if called with an incompatible Wheel-Version. - - Pip should refuse to install a Wheel-Version that's a major series - ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when - installing a version only minor version ahead (e.g 1.2 > 1.1). - - version: a 2-tuple representing a Wheel-Version (Major, Minor) - name: name of wheel or package to raise exception about - - :raises UnsupportedWheel: when an incompatible Wheel-Version is given - """ - if not version: - raise UnsupportedWheel( - "%s is in an unsupported or invalid wheel" % name - ) - if version[0] > VERSION_COMPATIBLE[0]: - raise UnsupportedWheel( - "%s's Wheel-Version (%s) is not compatible with this version " - "of pip" % (name, '.'.join(map(str, version))) - ) - elif version > VERSION_COMPATIBLE: - logger.warning( - 'Installing from a newer Wheel-Version (%s)', - '.'.join(map(str, version)), - ) - - -class Wheel(object): - """A wheel file""" - - # TODO: Maybe move the class into the models sub-package - # TODO: Maybe move the install code into this class - - wheel_file_re = re.compile( - r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) - ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) - \.whl|\.dist-info)$""", - re.VERBOSE - ) - - def __init__(self, filename): - # type: (str) -> None - """ - :raises InvalidWheelFilename: when the filename is invalid for a wheel - """ - wheel_info = self.wheel_file_re.match(filename) - if not wheel_info: - raise InvalidWheelFilename( - "%s is not a valid wheel filename." % filename - ) - self.filename = filename - self.name = wheel_info.group('name').replace('_', '-') - # we'll assume "_" means "-" due to wheel naming scheme - # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group('ver').replace('_', '-') - self.build_tag = wheel_info.group('build') - self.pyversions = wheel_info.group('pyver').split('.') - self.abis = wheel_info.group('abi').split('.') - self.plats = wheel_info.group('plat').split('.') - - # All the tag combinations from this file - self.file_tags = { - (x, y, z) for x in self.pyversions - for y in self.abis for z in self.plats - } - - def support_index_min(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> Optional[int] - """ - Return the lowest index that one of the wheel's file_tag combinations - achieves in the supported_tags list e.g. if there are 8 supported tags, - and one of the file tags is first in the list, then return 0. Returns - None is the wheel is not supported. - """ - if tags is None: # for mock - tags = pep425tags.get_supported() - indexes = [tags.index(c) for c in self.file_tags if c in tags] - return min(indexes) if indexes else None - - def supported(self, tags=None): - # type: (Optional[List[Pep425Tag]]) -> bool - """Is this wheel supported on this system?""" - if tags is None: # for mock - tags = pep425tags.get_supported() - return bool(set(tags).intersection(self.file_tags)) - - -def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): - """Determine whether the string looks like an egg_info. - - :param s: The string to parse. E.g. foo-2.1 - """ - return bool(_egg_info_re.search(s)) - - -def should_use_ephemeral_cache( - req, # type: InstallRequirement - format_control, # type: FormatControl - autobuilding, # type: bool - cache_available # type: bool -): - # type: (...) -> Optional[bool] - """ - Return whether to build an InstallRequirement object using the - ephemeral cache. - - :param cache_available: whether a cache directory is available for the - autobuilding=True case. - - :return: True or False to build the requirement with ephem_cache=True - or False, respectively; or None not to build the requirement. - """ - if req.constraint: - return None - if req.is_wheel: - if not autobuilding: - logger.info( - 'Skipping %s, due to already being wheel.', req.name, - ) - return None - if not autobuilding: - return False - - if req.editable or not req.source_dir: - return None - - if req.link and not req.link.is_artifact: - # VCS checkout. Build wheel just for this run. - return True - - if "binary" not in format_control.get_allowed_formats( - canonicalize_name(req.name)): - logger.info( - "Skipping bdist_wheel for %s, due to binaries " - "being disabled for it.", req.name, - ) - return None - - link = req.link - base, ext = link.splitext() - if cache_available and _contains_egg_info(base): - return False - - # Otherwise, build the wheel just for this run using the ephemeral - # cache since we are either in the case of e.g. a local directory, or - # no cache directory is available to use. - return True - - -def format_command( - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> str - """ - Format command information for logging. - """ - text = 'Command arguments: {}\n'.format(command_args) - - if not command_output: - text += 'Command output: None' - elif logger.getEffectiveLevel() > logging.DEBUG: - text += 'Command output: [use --verbose to show]' - else: - if not command_output.endswith('\n'): - command_output += '\n' - text += ( - 'Command output:\n{}' - '-----------------------------------------' - ).format(command_output) - - return text - - -def get_legacy_build_wheel_path( - names, # type: List[str] - temp_dir, # type: str - req, # type: InstallRequirement - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> Optional[str] - """ - Return the path to the wheel in the temporary build directory. - """ - # Sort for determinism. - names = sorted(names) - if not names: - msg = ( - 'Legacy build of wheel for {!r} created no files.\n' - ).format(req.name) - msg += format_command(command_args, command_output) - logger.warning(msg) - return None - - if len(names) > 1: - msg = ( - 'Legacy build of wheel for {!r} created more than one file.\n' - 'Filenames (choosing first): {}\n' - ).format(req.name, names) - msg += format_command(command_args, command_output) - logger.warning(msg) - - return os.path.join(temp_dir, names[0]) - - -class WheelBuilder(object): - """Build wheels from a RequirementSet.""" - - def __init__( - self, - finder, # type: PackageFinder - preparer, # type: RequirementPreparer - wheel_cache, # type: WheelCache - build_options=None, # type: Optional[List[str]] - global_options=None, # type: Optional[List[str]] - no_clean=False # type: bool - ): - # type: (...) -> None - self.finder = finder - self.preparer = preparer - self.wheel_cache = wheel_cache - - self._wheel_dir = preparer.wheel_download_dir - - self.build_options = build_options or [] - self.global_options = global_options or [] - self.no_clean = no_clean - - def _build_one(self, req, output_dir, python_tag=None): - """Build one wheel. - - :return: The filename of the built wheel, or None if the build failed. - """ - # Install build deps into temporary directory (PEP 518) - with req.build_env: - return self._build_one_inside_env(req, output_dir, - python_tag=python_tag) - - def _build_one_inside_env(self, req, output_dir, python_tag=None): - with TempDirectory(kind="wheel") as temp_dir: - if req.use_pep517: - builder = self._build_one_pep517 - else: - builder = self._build_one_legacy - wheel_path = builder(req, temp_dir.path, python_tag=python_tag) - if wheel_path is not None: - wheel_name = os.path.basename(wheel_path) - dest_path = os.path.join(output_dir, wheel_name) - try: - shutil.move(wheel_path, dest_path) - logger.info('Stored in directory: %s', output_dir) - return dest_path - except Exception: - pass - # Ignore return, we can't do anything else useful. - self._clean_one(req) - return None - - def _base_setup_args(self, req): - # NOTE: Eventually, we'd want to also -S to the flags here, when we're - # isolating. Currently, it breaks Python in virtualenvs, because it - # relies on site.py to find parts of the standard library outside the - # virtualenv. - return [ - sys.executable, '-u', '-c', - SETUPTOOLS_SHIM % req.setup_py - ] + list(self.global_options) - - def _build_one_pep517(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the PEP 517 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert req.metadata_directory is not None - try: - req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,) - logger.debug('Destination directory: %s', tempd) - wheel_name = req.pep517_backend.build_wheel( - tempd, - metadata_directory=req.metadata_directory - ) - if python_tag: - # General PEP 517 backends don't necessarily support - # a "--python-tag" option, so we rename the wheel - # file directly. - new_name = replace_python_tag(wheel_name, python_tag) - os.rename( - os.path.join(tempd, wheel_name), - os.path.join(tempd, new_name) - ) - # Reassign to simplify the return at the end of function - wheel_name = new_name - except Exception: - logger.error('Failed building wheel for %s', req.name) - return None - return os.path.join(tempd, wheel_name) - - def _build_one_legacy(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the "legacy" build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - base_args = self._base_setup_args(req) - - spin_message = 'Building wheel for %s (setup.py)' % (req.name,) - with open_spinner(spin_message) as spinner: - logger.debug('Destination directory: %s', tempd) - wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ - + self.build_options - - if python_tag is not None: - wheel_args += ["--python-tag", python_tag] - - try: - output = call_subprocess(wheel_args, cwd=req.setup_py_dir, - show_stdout=False, spinner=spinner) - except Exception: - spinner.finish("error") - logger.error('Failed building wheel for %s', req.name) - return None - names = os.listdir(tempd) - wheel_path = get_legacy_build_wheel_path( - names=names, - temp_dir=tempd, - req=req, - command_args=wheel_args, - command_output=output, - ) - return wheel_path - - def _clean_one(self, req): - base_args = self._base_setup_args(req) - - logger.info('Running setup.py clean for %s', req.name) - clean_args = base_args + ['clean', '--all'] - try: - call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) - return True - except Exception: - logger.error('Failed cleaning build dir for %s', req.name) - return False - - def build( - self, - requirements, # type: Iterable[InstallRequirement] - session, # type: PipSession - autobuilding=False # type: bool - ): - # type: (...) -> List[InstallRequirement] - """Build wheels. - - :param unpack: If True, replace the sdist we built from with the - newly built wheel, in preparation for installation. - :return: True if all the wheels built correctly. - """ - buildset = [] - format_control = self.finder.format_control - # Whether a cache directory is available for autobuilding=True. - cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) - - for req in requirements: - ephem_cache = should_use_ephemeral_cache( - req, format_control=format_control, autobuilding=autobuilding, - cache_available=cache_available, - ) - if ephem_cache is None: - continue - - buildset.append((req, ephem_cache)) - - if not buildset: - return [] - - # Is any wheel build not using the ephemeral cache? - if any(not ephem_cache for _, ephem_cache in buildset): - have_directory_for_build = self._wheel_dir or ( - autobuilding and self.wheel_cache.cache_dir - ) - assert have_directory_for_build - - # TODO by @pradyunsg - # Should break up this method into 2 separate methods. - - # Build the wheels. - logger.info( - 'Building wheels for collected packages: %s', - ', '.join([req.name for (req, _) in buildset]), - ) - _cache = self.wheel_cache # shorter name - with indent_log(): - build_success, build_failure = [], [] - for req, ephem in buildset: - python_tag = None - if autobuilding: - python_tag = pep425tags.implementation_tag - if ephem: - output_dir = _cache.get_ephem_path_for_link(req.link) - else: - output_dir = _cache.get_path_for_link(req.link) - try: - ensure_dir(output_dir) - except OSError as e: - logger.warning("Building wheel for %s failed: %s", - req.name, e) - build_failure.append(req) - continue - else: - output_dir = self._wheel_dir - wheel_file = self._build_one( - req, output_dir, - python_tag=python_tag, - ) - if wheel_file: - build_success.append(req) - if autobuilding: - # XXX: This is mildly duplicative with prepare_files, - # but not close enough to pull out to a single common - # method. - # The code below assumes temporary source dirs - - # prevent it doing bad things. - if req.source_dir and not os.path.exists(os.path.join( - req.source_dir, PIP_DELETE_MARKER_FILENAME)): - raise AssertionError( - "bad source dir - missing marker") - # Delete the source we built the wheel from - req.remove_temporary_source() - # set the build directory again - name is known from - # the work prepare_files did. - req.source_dir = req.build_location( - self.preparer.build_dir - ) - # Update the link for this. - req.link = Link(path_to_url(wheel_file)) - assert req.link.is_wheel - # extract the wheel into the dir - unpack_url( - req.link, req.source_dir, None, False, - session=session, - ) - else: - build_failure.append(req) - - # notify success/failure - if build_success: - logger.info( - 'Successfully built %s', - ' '.join([req.name for req in build_success]), - ) - if build_failure: - logger.info( - 'Failed to build %s', - ' '.join([req.name for req in build_failure]), - ) - # Return a list of requirements that failed to build - return build_failure diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel_builder.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 00000000..fa08016b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,308 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil + +from pip._internal.models.link import Link +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Optional, Tuple, + ) + + from pip._internal.cache import WheelCache + from pip._internal.req.req_install import InstallRequirement + + BinaryAllowedPredicate = Callable[[InstallRequirement], bool] + BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.IGNORECASE) + + +def _contains_egg_info(s): + # type: (str) -> bool + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req, # type: InstallRequirement + need_wheel, # type: bool + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if req.editable or not req.source_dir: + return False + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries " + "being disabled for it.", req.name, + ) + return False + + if not req.use_pep517 and not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + logger.info( + "Using legacy 'setup.py install' for %s, " + "since package 'wheel' is not installed.", req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req, # type: InstallRequirement +): + # type: (...) -> bool + return _should_build( + req, need_wheel=True, check_binary_allowed=_always_true + ) + + +def should_build_for_install_command( + req, # type: InstallRequirement + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req, # type: InstallRequirement +): + # type: (...) -> Optional[bool] + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req, # type: InstallRequirement + wheel_cache, # type: WheelCache +): + # type: (...) -> str + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_): + # type: (Any) -> bool + return True + + +def _build_one( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + return _build_one_inside_env( + req, output_dir, build_options, global_options + ) + + +def _build_one_inside_env( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + build_options=build_options, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info('Created wheel for %s: ' + 'filename=%s size=%d sha256=%s', + req.name, wheel_name, length, + wheel_hash.hexdigest()) + logger.info('Stored in directory: %s', output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req, global_options): + # type: (InstallRequirement, List[str]) -> bool + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info('Running setup.py clean for %s', req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + +def build( + requirements, # type: Iterable[InstallRequirement] + wheel_cache, # type: WheelCache + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> BuildResult + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, cache_dir, build_options, global_options + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/__init__.py index b919b540..581db54c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/__init__.py @@ -30,24 +30,17 @@ def vendored(modulename): vendored_name = "{0}.{1}".format(__name__, modulename) try: - __import__(vendored_name, globals(), locals(), level=0) + __import__(modulename, globals(), locals(), level=0) except ImportError: - try: - __import__(modulename, globals(), locals(), level=0) - except ImportError: - # We can just silently allow import failures to pass here. If we - # got to this point it means that ``import pip._vendor.whatever`` - # failed and so did ``import whatever``. Since we're importing this - # upfront in an attempt to alias imports, not erroring here will - # just mean we get a regular import error whenever pip *actually* - # tries to import one of these modules to use it, which actually - # gives us a better error message than we would have otherwise - # gotten. - pass - else: - sys.modules[vendored_name] = sys.modules[modulename] - base, head = vendored_name.rsplit(".", 1) - setattr(sys.modules[base], head, sys.modules[modulename]) + # This error used to be silenced in earlier variants of this file, to instead + # raise the error when pip actually tries to use the missing module. + # Based on inputs in #5354, this was changed to explicitly raise the error. + # Re-raising the exception without modifying it is an intentional choice. + raise + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger @@ -61,12 +54,14 @@ if DEBUNDLED: sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path # Actually alias all of our vendored dependencies. + vendored("appdirs") vendored("cachecontrol") + vendored("certifi") vendored("colorama") + vendored("contextlib2") vendored("distlib") vendored("distro") vendored("html5lib") - vendored("lockfile") vendored("six") vendored("six.moves") vendored("six.moves.urllib") @@ -77,9 +72,9 @@ if DEBUNDLED: vendored("pep517") vendored("pkg_resources") vendored("progress") - vendored("pytoml") vendored("retrying") vendored("requests") + vendored("requests.exceptions") vendored("requests.packages") vendored("requests.packages.urllib3") vendored("requests.packages.urllib3._collections") @@ -108,4 +103,8 @@ if DEBUNDLED: vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("toml") + vendored("toml.encoder") + vendored("toml.decoder") vendored("urllib3") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/appdirs.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/appdirs.py index 2bd39110..33a3b774 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/appdirs.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/appdirs.py @@ -13,8 +13,8 @@ See <http://github.com/ActiveState/appdirs> for details and usage. # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) +__version__ = "1.4.4" +__version_info__ = tuple(int(segment) for segment in __version__.split(".")) import sys @@ -37,6 +37,10 @@ if sys.platform.startswith('java'): # are actually checked for and the rest of the module expects # *sys.platform* style strings. system = 'linux2' +elif sys.platform == 'cli' and os.name == 'nt': + # Detect Windows in IronPython to match pip._internal.utils.compat.WINDOWS + # Discussion: <https://github.com/pypa/pip/pull/7501> + system = 'win32' else: system = sys.platform @@ -64,7 +68,7 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): for a discussion of issues. Typical user data directories are: - Mac OS X: ~/Library/Application Support/<AppName> + Mac OS X: ~/Library/Application Support/<AppName> # or ~/.config/<AppName>, if the other does not exist Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> @@ -150,7 +154,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) @@ -203,6 +207,8 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): return path +# for the discussion regarding site_config_dir locations +# see <https://github.com/pypa/pip/issues/1733> def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. @@ -238,14 +244,15 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False) if appname and version: path = os.path.join(path, version) else: - # XDG default for $XDG_CONFIG_DIRS + # XDG default for $XDG_CONFIG_DIRS (missing or empty) + # see <https://github.com/pypa/pip/pull/7501#discussion_r360624829> # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + path = os.getenv('XDG_CONFIG_DIRS') or '/etc/xdg' + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) if x] if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) @@ -291,6 +298,10 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if not PY3 and isinstance(path, unicode): + path = _win_path_to_bytes(path) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) @@ -567,6 +578,24 @@ if system == "win32": _get_win_folder = _get_win_folder_from_registry +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + <https://github.com/pypa/pip/issues/3463>. + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path + + #---- self test code if __name__ == "__main__": diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py index 8fdee66f..a1bbbbe3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -4,7 +4,7 @@ Make it easy to import from cachecontrol without long namespaces. """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.5" +__version__ = "0.12.6" from .wrapper import CacheControl from .adapter import CacheControlAdapter diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py index 780eb288..815650e8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -24,7 +24,7 @@ class CacheControlAdapter(HTTPAdapter): **kw ): super(CacheControlAdapter, self).__init__(*args, **kw) - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py index 1ba00806..607b9452 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -69,8 +69,8 @@ class FileCache(BaseCache): raise ValueError("Cannot use use_dir_lock and lock_class together") try: - from pip._vendor.lockfile import LockFile - from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile except ImportError: notice = dedent( """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/controller.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/controller.py index 1b2b943c..dafe55ca 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/controller.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/controller.py @@ -34,7 +34,7 @@ class CacheController(object): def __init__( self, cache=None, cache_etags=True, serializer=None, status_codes=None ): - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags self.serializer = serializer or Serializer() self.cacheable_status_codes = status_codes or (200, 203, 300, 301) @@ -293,6 +293,15 @@ class CacheController(object): if no_store: return + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + # If we've been given an etag, then keep the response if self.cache_etags and "etag" in response_headers: logger.debug("Caching due to etag") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py index ec43ff27..3b6ec2de 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -107,6 +107,8 @@ class Serializer(object): """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): return @@ -179,7 +181,7 @@ class Serializer(object): def _loads_v4(self, request, data): try: - cached = msgpack.loads(data, encoding="utf-8") + cached = msgpack.loads(data, raw=False) except ValueError: return diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py index 265bfc8b..d8e6fc6a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -13,7 +13,7 @@ def CacheControl( cacheable_methods=None, ): - cache = cache or DictCache() + cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( cache, diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__init__.py index ef71f3af..5d52a62e 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__init__.py @@ -1,3 +1,3 @@ -from .core import where +from .core import contents, where -__version__ = "2018.11.29" +__version__ = "2020.06.20" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__main__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__main__.py index ae2aff5c..00376349 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__main__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/__main__.py @@ -1,2 +1,12 @@ -from pip._vendor.certifi import where -print(where()) +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/cacert.pem b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/cacert.pem index db68797e..0fd855f4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/cacert.pem +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/cacert.pem @@ -58,38 +58,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -152,39 +120,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -771,36 +706,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep +OkuE6N36B9K -----END CERTIFICATE----- -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - # Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. # Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. # Label: "DST Root CA X3" @@ -1219,36 +1124,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - # Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc # Subject: CN=Cybertrust Global Root O=Cybertrust, Inc # Label: "Cybertrust Global Root" @@ -1559,47 +1434,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -2200,6 +2034,45 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + # Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority # Label: "Hellenic Academic and Research Institutions RootCA 2011" @@ -3453,46 +3326,6 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ 5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su -----END CERTIFICATE----- -# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Root CA" -# Serial: 1 -# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f -# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 -# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 ------BEGIN CERTIFICATE----- -MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb -BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz -MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx -FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g -Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 -fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl -LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV -WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF -TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb -5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc -CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri -wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ -wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG -m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 -F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng -WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 -2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF -AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ -0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw -F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS -g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj -qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN -h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ -ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V -btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj -Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ -8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW -gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= ------END CERTIFICATE----- - # Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed # Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed # Label: "OISTE WISeKey Global Root GB CA" @@ -3849,47 +3682,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW 1KyLa2tJElMzrdfkviT8tQp21KW8EA== -----END CERTIFICATE----- -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" @@ -4510,3 +4302,319 @@ Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw 3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= -----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/core.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/core.py index 2d02ea44..8987449f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/core.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/certifi/core.py @@ -1,20 +1,60 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- """ certifi.py ~~~~~~~~~~ -This module returns the installation location of cacert.pem. +This module returns the installation location of cacert.pem or its contents. """ import os +try: + from importlib.resources import path as get_path, read_text -def where(): - f = os.path.dirname(__file__) + _CACERT_CTX = None + _CACERT_PATH = None - return os.path.join(f, 'cacert.pem') + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + return _CACERT_PATH -if __name__ == '__main__': - print(where()) + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/colorama/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/colorama/__init__.py index 2a3bf471..34c263cc 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/colorama/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/colorama/__init__.py @@ -3,4 +3,4 @@ from .initialise import init, deinit, reinit, colorama_text from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.4.1' +__version__ = '0.4.3' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/contextlib2.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/contextlib2.py new file mode 100644 index 00000000..3aae8f41 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/contextlib2.py @@ -0,0 +1,518 @@ +"""contextlib2 - backports and enhancements to the contextlib module""" + +import abc +import sys +import warnings +from collections import deque +from functools import wraps + +__all__ = ["contextmanager", "closing", "nullcontext", + "AbstractContextManager", + "ContextDecorator", "ExitStack", + "redirect_stdout", "redirect_stderr", "suppress"] + +# Backwards compatibility +__all__ += ["ContextStack"] + + +# Backport abc.ABC +if sys.version_info[:2] >= (3, 4): + _abc_ABC = abc.ABC +else: + _abc_ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) + + +# Backport classic class MRO +def _classic_mro(C, result): + if C in result: + return + result.append(C) + for B in C.__bases__: + _classic_mro(B, result) + return result + + +# Backport _collections_abc._check_methods +def _check_methods(C, *methods): + try: + mro = C.__mro__ + except AttributeError: + mro = tuple(_classic_mro(C, [])) + + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + return True + + +class AbstractContextManager(_abc_ABC): + """An abstract base class for context managers.""" + + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + @classmethod + def __subclasshook__(cls, C): + """Check whether subclass is considered a subclass of this ABC.""" + if cls is AbstractContextManager: + return _check_methods(C, "__enter__", "__exit__") + return NotImplemented + + +class ContextDecorator(object): + """A base class or mixin that enables context managers to work as decorators.""" + + def refresh_cm(self): + """Returns the context manager used to actually wrap the call to the + decorated function. + + The default implementation just returns *self*. + + Overriding this method allows otherwise one-shot context managers + like _GeneratorContextManager to support use as decorators via + implicit recreation. + + DEPRECATED: refresh_cm was never added to the standard library's + ContextDecorator API + """ + warnings.warn("refresh_cm was never added to the standard library", + DeprecationWarning) + return self._recreate_cm() + + def _recreate_cm(self): + """Return a recreated instance of self. + + Allows an otherwise one-shot context manager like + _GeneratorContextManager to support use as + a decorator via implicit recreation. + + This is a private interface just for _GeneratorContextManager. + See issue #11647 for details. + """ + return self + + def __call__(self, func): + @wraps(func) + def inner(*args, **kwds): + with self._recreate_cm(): + return func(*args, **kwds) + return inner + + +class _GeneratorContextManager(ContextDecorator): + """Helper for @contextmanager decorator.""" + + def __init__(self, func, args, kwds): + self.gen = func(*args, **kwds) + self.func, self.args, self.kwds = func, args, kwds + # Issue 19330: ensure context manager instances have good docstrings + doc = getattr(func, "__doc__", None) + if doc is None: + doc = type(self).__doc__ + self.__doc__ = doc + # Unfortunately, this still doesn't provide good help output when + # inspecting the created context manager instances, since pydoc + # currently bypasses the instance docstring and shows the docstring + # for the class instead. + # See http://bugs.python.org/issue19404 for more details. + + def _recreate_cm(self): + # _GCM instances are one-shot context managers, so the + # CM must be recreated each time a decorated function is + # called + return self.__class__(self.func, self.args, self.kwds) + + def __enter__(self): + try: + return next(self.gen) + except StopIteration: + raise RuntimeError("generator didn't yield") + + def __exit__(self, type, value, traceback): + if type is None: + try: + next(self.gen) + except StopIteration: + return + else: + raise RuntimeError("generator didn't stop") + else: + if value is None: + # Need to force instantiation so we can reliably + # tell if we get the same exception back + value = type() + try: + self.gen.throw(type, value, traceback) + raise RuntimeError("generator didn't stop after throw()") + except StopIteration as exc: + # Suppress StopIteration *unless* it's the same exception that + # was passed to throw(). This prevents a StopIteration + # raised inside the "with" statement from being suppressed. + return exc is not value + except RuntimeError as exc: + # Don't re-raise the passed in exception + if exc is value: + return False + # Likewise, avoid suppressing if a StopIteration exception + # was passed to throw() and later wrapped into a RuntimeError + # (see PEP 479). + if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value: + return False + raise + except: + # only re-raise if it's *not* the exception that was + # passed to throw(), because __exit__() must not raise + # an exception unless __exit__() itself failed. But throw() + # has to raise the exception to signal propagation, so this + # fixes the impedance mismatch between the throw() protocol + # and the __exit__() protocol. + # + if sys.exc_info()[1] is not value: + raise + + +def contextmanager(func): + """@contextmanager decorator. + + Typical usage: + + @contextmanager + def some_generator(<arguments>): + <setup> + try: + yield <value> + finally: + <cleanup> + + This makes this: + + with some_generator(<arguments>) as <variable>: + <body> + + equivalent to this: + + <setup> + try: + <variable> = <value> + <body> + finally: + <cleanup> + + """ + @wraps(func) + def helper(*args, **kwds): + return _GeneratorContextManager(func, args, kwds) + return helper + + +class closing(object): + """Context to automatically close something at the end of a block. + + Code like this: + + with closing(<module>.open(<arguments>)) as f: + <block> + + is equivalent to this: + + f = <module>.open(<arguments>) + try: + <block> + finally: + f.close() + + """ + def __init__(self, thing): + self.thing = thing + + def __enter__(self): + return self.thing + + def __exit__(self, *exc_info): + self.thing.close() + + +class _RedirectStream(object): + + _stream = None + + def __init__(self, new_target): + self._new_target = new_target + # We use a list of old targets to make this CM re-entrant + self._old_targets = [] + + def __enter__(self): + self._old_targets.append(getattr(sys, self._stream)) + setattr(sys, self._stream, self._new_target) + return self._new_target + + def __exit__(self, exctype, excinst, exctb): + setattr(sys, self._stream, self._old_targets.pop()) + + +class redirect_stdout(_RedirectStream): + """Context manager for temporarily redirecting stdout to another file. + + # How to send help() to stderr + with redirect_stdout(sys.stderr): + help(dir) + + # How to write help() to a file + with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) + """ + + _stream = "stdout" + + +class redirect_stderr(_RedirectStream): + """Context manager for temporarily redirecting stderr to another file.""" + + _stream = "stderr" + + +class suppress(object): + """Context manager to suppress specified exceptions + + After the exception is suppressed, execution proceeds with the next + statement following the with statement. + + with suppress(FileNotFoundError): + os.remove(somefile) + # Execution still resumes here if the file was already removed + """ + + def __init__(self, *exceptions): + self._exceptions = exceptions + + def __enter__(self): + pass + + def __exit__(self, exctype, excinst, exctb): + # Unlike isinstance and issubclass, CPython exception handling + # currently only looks at the concrete type hierarchy (ignoring + # the instance and subclass checking hooks). While Guido considers + # that a bug rather than a feature, it's a fairly hard one to fix + # due to various internal implementation details. suppress provides + # the simpler issubclass based semantics, rather than trying to + # exactly reproduce the limitations of the CPython interpreter. + # + # See http://bugs.python.org/issue12029 for more details + return exctype is not None and issubclass(exctype, self._exceptions) + + +# Context manipulation is Python 3 only +_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3 +if _HAVE_EXCEPTION_CHAINING: + def _make_context_fixer(frame_exc): + def _fix_exception_context(new_exc, old_exc): + # Context may not be correct, so find the end of the chain + while 1: + exc_context = new_exc.__context__ + if exc_context is old_exc: + # Context is already set correctly (see issue 20317) + return + if exc_context is None or exc_context is frame_exc: + break + new_exc = exc_context + # Change the end of the chain to point to the exception + # we expect it to reference + new_exc.__context__ = old_exc + return _fix_exception_context + + def _reraise_with_existing_context(exc_details): + try: + # bare "raise exc_details[1]" replaces our carefully + # set-up context + fixed_ctx = exc_details[1].__context__ + raise exc_details[1] + except BaseException: + exc_details[1].__context__ = fixed_ctx + raise +else: + # No exception context in Python 2 + def _make_context_fixer(frame_exc): + return lambda new_exc, old_exc: None + + # Use 3 argument raise in Python 2, + # but use exec to avoid SyntaxError in Python 3 + def _reraise_with_existing_context(exc_details): + exc_type, exc_value, exc_tb = exc_details + exec("raise exc_type, exc_value, exc_tb") + +# Handle old-style classes if they exist +try: + from types import InstanceType +except ImportError: + # Python 3 doesn't have old-style classes + _get_type = type +else: + # Need to handle old-style context managers on Python 2 + def _get_type(obj): + obj_type = type(obj) + if obj_type is InstanceType: + return obj.__class__ # Old-style class + return obj_type # New-style class + + +# Inspired by discussions on http://bugs.python.org/issue13585 +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = _get_type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with statement + _cm_type = _get_type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + received_exc = exc_details[0] is not None + + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + _fix_exception_context = _make_context_fixer(frame_exc) + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + pending_raise = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + pending_raise = False + exc_details = (None, None, None) + except: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + pending_raise = True + exc_details = new_exc_details + if pending_raise: + _reraise_with_existing_context(exc_details) + return received_exc and suppressed_exc + + +# Preserve backwards compatibility +class ContextStack(ExitStack): + """Backwards compatibility alias for ExitStack""" + + def __init__(self): + warnings.warn("ContextStack has been renamed to ExitStack", + DeprecationWarning) + super(ContextStack, self).__init__() + + def register_exit(self, callback): + return self.push(callback) + + def register(self, callback, *args, **kwds): + return self.callback(callback, *args, **kwds) + + def preserve(self): + return self.pop_all() + + +class nullcontext(AbstractContextManager): + """Context manager that does no additional processing. + Used as a stand-in for a normal context manager, when a particular + block of code is only sometimes used with a normal context manager: + cm = optional_cm if condition else nullcontext() + with cm: + # Perform operation, using optional_cm if condition is True + """ + + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, *excinfo): + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/__init__.py index a786b4d3..63d916e3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/__init__.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 Vinay Sajip. +# Copyright (C) 2012-2019 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import logging -__version__ = '0.2.8' +__version__ = '0.3.1' class DistlibException(Exception): pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py index 159e49ee..10ed3625 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -14,7 +14,10 @@ import sys import stat from os.path import abspath import fnmatch -import collections +try: + from collections.abc import Callable +except ImportError: + from collections import Callable import errno from . import tarfile @@ -528,7 +531,7 @@ def register_archive_format(name, function, extra_args=None, description=''): """ if extra_args is None: extra_args = [] - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') @@ -621,7 +624,7 @@ def _check_unpack_options(extensions, function, extra_args): raise RegistryError(msg % (extension, existing_extensions[extension])) - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The registered function must be a callable') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py index 1df3aba1..b470a373 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -119,11 +119,9 @@ def _expand_globals(config): #_expand_globals(_SCHEMES) - # FIXME don't rely on sys.version here, its format is an implementation detail - # of CPython, use sys.version_info or sys.hexversion -_PY_VERSION = sys.version.split()[0] -_PY_VERSION_SHORT = sys.version[:3] -_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] _PREFIX = os.path.normpath(sys.prefix) _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _CONFIG_VARS = None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/compat.py index ff328c8e..c316fd97 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/compat.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/compat.py @@ -319,7 +319,7 @@ except ImportError: # pragma: no cover try: callable = callable except NameError: # pragma: no cover - from collections import Callable + from collections.abc import Callable def callable(obj): return isinstance(obj, Callable) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/database.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/database.py index b13cdac9..0a90c300 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/database.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/database.py @@ -550,7 +550,7 @@ class InstalledDistribution(BaseInstalledDistribution): r = finder.find(WHEEL_METADATA_FILENAME) # Temporary - for legacy support if r is None: - r = finder.find('METADATA') + r = finder.find(LEGACY_METADATA_FILENAME) if r is None: raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) @@ -567,7 +567,7 @@ class InstalledDistribution(BaseInstalledDistribution): p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: - data = f.read() + data = f.read().decode('utf-8') self.modules = data.splitlines() def __repr__(self): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/index.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/index.py index 2406be21..7a87cdcf 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/index.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/index.py @@ -22,7 +22,7 @@ from .util import cached_property, zip_dir, ServerProxy logger = logging.getLogger(__name__) -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' DEFAULT_REALM = 'pypi' class PackageIndex(object): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/locators.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/locators.py index 5c655c3e..12a1d063 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/locators.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/locators.py @@ -36,7 +36,7 @@ logger = logging.getLogger(__name__) HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' def get_all_distribution_names(url=None): """ @@ -197,7 +197,7 @@ class Locator(object): is_downloadable = basename.endswith(self.downloadable_extensions) if is_wheel: compatible = is_compatible(Wheel(basename), self.wheel_tags) - return (t.scheme == 'https', 'pypi.python.org' in t.netloc, + return (t.scheme == 'https', 'pypi.org' in t.netloc, is_downloadable, is_wheel, compatible, basename) def prefer_url(self, url1, url2): @@ -304,18 +304,25 @@ class Locator(object): def _get_digest(self, info): """ - Get a digest from a dictionary by looking at keys of the form - 'algo_digest'. + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """ result = None - for algo in ('sha256', 'md5'): - key = '%s_digest' % algo - if key in info: - result = (algo, info[key]) - break + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break return result def _update_version_data(self, result, info): @@ -1049,7 +1056,7 @@ class AggregatingLocator(Locator): # versions which don't conform to PEP 426 / PEP 440. default_locator = AggregatingLocator( JSONLocator(), - SimpleScrapingLocator('https://pypi.python.org/simple/', + SimpleScrapingLocator('https://pypi.org/simple/', timeout=3.0), scheme='legacy') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/metadata.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/metadata.py index 77eed7f9..6d5e2360 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/metadata.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/metadata.py @@ -5,7 +5,7 @@ # """Implementation of the Metadata for Python packages PEPs. -Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). """ from __future__ import unicode_literals @@ -91,9 +91,11 @@ _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') -# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include -# it in the tuple literal below to allow it (for now) -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires') +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides') _566_MARKERS = ('Description-Content-Type',) @@ -192,38 +194,12 @@ def _best_version(fields): return '2.0' +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 _ATTR2FIELD = { - 'metadata_version': 'Metadata-Version', - 'name': 'Name', - 'version': 'Version', - 'platform': 'Platform', - 'supported_platform': 'Supported-Platform', - 'summary': 'Summary', - 'description': 'Description', - 'keywords': 'Keywords', - 'home_page': 'Home-page', - 'author': 'Author', - 'author_email': 'Author-email', - 'maintainer': 'Maintainer', - 'maintainer_email': 'Maintainer-email', - 'license': 'License', - 'classifier': 'Classifier', - 'download_url': 'Download-URL', - 'obsoletes_dist': 'Obsoletes-Dist', - 'provides_dist': 'Provides-Dist', - 'requires_dist': 'Requires-Dist', - 'setup_requires_dist': 'Setup-Requires-Dist', - 'requires_python': 'Requires-Python', - 'requires_external': 'Requires-External', - 'requires': 'Requires', - 'provides': 'Provides', - 'obsoletes': 'Obsoletes', - 'project_url': 'Project-URL', - 'private_version': 'Private-Version', - 'obsoleted_by': 'Obsoleted-By', - 'extension': 'Extension', - 'provides_extra': 'Provides-Extra', + name.lower().replace("-", "_"): name for name in _ALL_FIELDS } +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') _VERSIONS_FIELDS = ('Requires-Python',) @@ -260,7 +236,7 @@ def _get_name_and_version(name, version, for_filename=False): class LegacyMetadata(object): """The legacy metadata of a release. - Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can instantiate the class with one of these arguments (or none): - *path*, the path to a metadata file - *fileobj* give a file-like object with metadata as content @@ -379,6 +355,11 @@ class LegacyMetadata(object): value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] # logger.debug('Attempting to set metadata for %s', self) # self.set_metadata_version() @@ -565,57 +546,21 @@ class LegacyMetadata(object): Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. """ self.set_metadata_version() - mapping_1_0 = ( - ('metadata_version', 'Metadata-Version'), - ('name', 'Name'), - ('version', 'Version'), - ('summary', 'Summary'), - ('home_page', 'Home-page'), - ('author', 'Author'), - ('author_email', 'Author-email'), - ('license', 'License'), - ('description', 'Description'), - ('keywords', 'Keywords'), - ('platform', 'Platform'), - ('classifiers', 'Classifier'), - ('download_url', 'Download-URL'), - ) + fields = _version2fieldlist(self['Metadata-Version']) data = {} - for key, field_name in mapping_1_0: + + for field_name in fields: if not skip_missing or field_name in self._fields: - data[key] = self[field_name] - - if self['Metadata-Version'] == '1.2': - mapping_1_2 = ( - ('requires_dist', 'Requires-Dist'), - ('requires_python', 'Requires-Python'), - ('requires_external', 'Requires-External'), - ('provides_dist', 'Provides-Dist'), - ('obsoletes_dist', 'Obsoletes-Dist'), - ('project_url', 'Project-URL'), - ('maintainer', 'Maintainer'), - ('maintainer_email', 'Maintainer-email'), - ) - for key, field_name in mapping_1_2: - if not skip_missing or field_name in self._fields: - if key != 'project_url': - data[key] = self[field_name] - else: - data[key] = [','.join(u) for u in self[field_name]] - - elif self['Metadata-Version'] == '1.1': - mapping_1_1 = ( - ('provides', 'Provides'), - ('requires', 'Requires'), - ('obsoletes', 'Obsoletes'), - ) - for key, field_name in mapping_1_1: - if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] return data @@ -1001,10 +946,14 @@ class Metadata(object): LEGACY_MAPPING = { 'name': 'Name', 'version': 'Version', - 'license': 'License', + ('extensions', 'python.details', 'license'): 'License', 'summary': 'Summary', 'description': 'Description', - 'classifiers': 'Classifier', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', } def _to_legacy(self): @@ -1032,16 +981,29 @@ class Metadata(object): assert self._data and not self._legacy result = LegacyMetadata() nmd = self._data + # import pdb; pdb.set_trace() for nk, ok in self.LEGACY_MAPPING.items(): - if nk in nmd: - result[ok] = nmd[nk] + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d r1 = process_entries(self.run_requires + self.meta_requires) r2 = process_entries(self.build_requires + self.dev_requires) if self.extras: result['Provides-Extra'] = sorted(self.extras) result['Requires-Dist'] = sorted(r1) result['Setup-Requires-Dist'] = sorted(r2) - # TODO: other fields such as contacts + # TODO: any other fields wanted return result def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/scripts.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/scripts.py index f598a413..03f8f21e 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/scripts.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/scripts.py @@ -39,31 +39,16 @@ _DEFAULT_MANIFEST = ''' # check if Python is called on the first line with this expression FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s if __name__ == '__main__': - import sys, re - - def _resolve(module, func): - __import__(module) - mod = sys.modules[module] - parts = func.split('.') - result = getattr(mod, parts.pop(0)) - for p in parts: - result = getattr(result, p) - return result - - try: - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - - func = _resolve('%(module)s', '%(func)s') - rc = func() # None interpreted as 0 - except Exception as e: # only supporting Python >= 2.6 - sys.stderr.write('%%s\n' %% e) - rc = 1 - sys.exit(rc) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) ''' -def _enquote_executable(executable): +def enquote_executable(executable): if ' ' in executable: # make sure we quote only the executable in case of env # for example /usr/bin/env "/dir with spaces/bin/jython" @@ -78,6 +63,8 @@ def _enquote_executable(executable): executable = '"%s"' % executable return executable +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable class ScriptMaker(object): """ @@ -103,6 +90,7 @@ class ScriptMaker(object): self._is_nt = os.name == 'nt' or ( os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): if options.get('gui', False) and self._is_nt: # pragma: no cover @@ -173,7 +161,7 @@ class ScriptMaker(object): executable = self.executable enquote = False # assume this will be taken care of elif not sysconfig.is_python_build(): - executable = os.path.join(os.path.basename(get_executable())) + executable = get_executable() elif in_venv(): # pragma: no cover executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) @@ -187,12 +175,20 @@ class ScriptMaker(object): if sys.platform.startswith('java'): # pragma: no cover executable = self._fix_jython_executable(executable) - # Normalise case for Windows - executable = os.path.normcase(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + # If the user didn't specify an executable, it may be necessary to # cater for executable paths with spaces (not uncommon on Windows) if enquote: - executable = _enquote_executable(executable) + executable = enquote_executable(executable) # Issue #51: don't use fsencode, since we later try to # check that the shebang is decodable using utf-8. executable = executable.encode('utf-8') @@ -225,6 +221,7 @@ class ScriptMaker(object): def _get_script_text(self, entry): return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], func=entry.suffix) manifest = _DEFAULT_MANIFEST @@ -299,9 +296,10 @@ class ScriptMaker(object): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, sys.version[0])) + scriptnames.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - scriptnames.add('%s-%s' % (name, sys.version[:3])) + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: @@ -381,8 +379,12 @@ class ScriptMaker(object): # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] - result = finder(distlib_package).find(name).bytes - return result + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes # Public API follows diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/t32.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/t32.exe index a09d926872d84ae22a617dfe9ebb560d420b37de..8932a18e4596952373a38c60b81b7116d4ef9ee8 100644 GIT binary patch delta 28766 zcmd?Sdtg(=_AkDZz5=AAKnsPIw$K*JJ8hCSY1%XhEd^T)6k4i01!{~ahzX*Hl5)~2 zaZ?4y2R={`P*jcwdHVnhMG6Xn$Wav3dgS2LaI^?Q)yn>T)+8-@&i&r|`{Vw1i)+uC zHEXYVtywd(W_GMO9Z_>7Vs)OXbxr;l@2WSCEje=XrSuNC?S<U*qlkY}+9kb3gn8*r zBGjb6iSWRd%=F_3kDT0?{*<6=L;U7YSRlg5cg}T^OzA)V9EFlpqKc5bgMXinq*RgI zEU8u9R9z+MQ$*Jbk`$JOFb2@dhN_agoK#6txFqGYv&pJK-Sx35DUbMOtE5eiH3NES z8&uNDE_nHpBqgpHkkH$qQ%QE>==f`uq-m-(1NyO6l_tC2X^=@cWeBl<{7Zq<k&EZe zanF&Y?!%FSH>Ht)FW5+Ra@Q0Tgx=JsScN*N3ko1jMl_x+SNH3Fp*v6%MR2%wysW-e zwMMUbJUr5?k)#c^14m1S_x*+KzG{u+DXcGyaVQ>beO>!CMR6)&lH{baIQSijlH@xS z8%%IG^SX|fxJw6GgWbXRb)!VBOQpm~t#(N+Z8sd}ccbl&Hcpi<uDBQu5&xYNO9<~X z<E8QrxxXoj!?~2?N*!_!#{DnkhIPo5OEnb+rM{@$q1zwPFmklyn8a708BmhC26w_+ z4I|lO;i>lW3g=o#tuJghD9+hdu=%Uo{nf4fP6#p_wKnOVIN)3lj!8UOQ52WDLh-co zO9_%h;gxuHDLmn3(Ap?^C`fjOqdi@!GJYaflFH;(Zo!+rLn+oKcOSWRSYudWn=9g( z%KGZlmpA(hx%^XL`F?ajcsZXQ&E`d<c;^J=l%vT!SNJ`Uf)H{(y{;&7tD(VqP4{Gj z^Ieq6@44vUQ;A%7aaiFMR|I5Ng87*n!nqSQ@f{j2m!1zRZFWgRO1a{}f9<}nBz~=* zsK|R%;tDFV3YmFP?;{Bf`<s!$wKL@o{`2pOQYClW+YW;7P+FB$<B9OkXbnt}3wcC4 za(_T=XmCl;jduQWP@)jFtCu9nZ76S3LW2;r6@nn3er&Q5=w2nqyhReLtUcUO#n#6A zeh7s53tJVBx1A^V$D4(%m9u6FCGlO<mb*qO9*yCu-_yqb1>LNyL^t*?cRKS&2eq&q z$>mC%1}cUWcTwQp3a8YKy7r0#-Rt0KqE)ZRqN|<}st{|v?ipWS*yeaI2AtHjfs}IR zot-jKqZ=|kvBX(X&S#>Hphniip3{La!_^|Hb|i*!<t+Jdu)O7Rs}dKBlK2KmF11VL zlFCVxLy0S<5ztz9)uUD7?5!+0Dt=nwXew|QYQMNdBpVv+GiYS^&bO_&c&>989`e_w zok2`4)RdJ&wJR$8W786qI5SB+Q$IE>#^G9oV5!5^rA&#_x3b5g;-cbFF9+|%UXSYU z1*6j};sFdN*VvimU^iAcc-9Q6P^{mbmLZ$+z~;KDeCj~rJbnee+Z3KQO}w@fuT@sM zehwx&`DejIw@7?Cn0O_axPZSMOk6DzA4FmtUR62&(aSD1jZK>_LF1ITv8`;nCV@(3 zZSv$(lh~z-x5%hWs(&Jl!!}Kx<Ul{7x0JXvB-YoaHG}F<+-dR7ZIE5TU;LGnDuyWX z*tE2=jZU<v5|;vQ(9l99z+6fYk0%+_R>5cnqR#NaT56<&kW*O+dQ8wz9YY^XLQ~86 z#h2{TI~m&1><3MndR-3|+hy2nBi<?FbI>7%qje3i5@k*YNRxOr#MJ*KUbiR8cPPFJ zJ=@Eraq!VSLMj`9?hRnLlsTUexkic7Ykf^>yg}^UiM?kJ_F|Vl6MjLxNzpwT-(hu? zCDQYqZcikoH-{438bd>sKN6`Gd{n!llwCXHJO;T5ic6Zd;yY4gm<k7f?D4LByizn( z^WI++CHT@IE#C7rs*$%#=PX8Q8NVw^=r+ocT;=;q(m;1Sczw2kWn^)jLVz9up#CTc z6?YI7$JS9C;`T=dL>-275W688aI?R3?d{b={taV~)>Y!d(FavN^FVjFD<u%&8t7{X zubkxIIZy&ocWqPM={7})MP~=n>KCa4Jt)h!F%1YC5R#6vc)s)RBF>;S+fUZK+yzY- zE;7ic__Tp;jeplUFa@HU>L|P&R3BH%Pg2Biw0>PP!YaS^ro+KmgMW#*e8$jFcb1x9 zuV{k)pjFoU$Z8s{R>~h*&#=3?CFh+Ki5D<AR#sNj*Ij|Na~zJ8hFvZ%b7`C{zmpOh zQ5dq}hO^eoB(Pn)v;7)7+%3)UH_XPOQo;UC@_Z*+Kh>Gh9Ub`uI?^SCyVSFSZM>{I z>#yzWoz{_@Ba(w@B^|=LhFzo1vNn2necc%p-BIpeM<FkR%8-GsQ>ZAbAjQG2TqXUm z2u9~#5=;v86{s%VZDkcgPxyD}92j!6`4Y^UURc?>ix}c!=5z4s;LIGP!K#&D{8ah8 zNdrB@Trs!Rk4te4ba1U2whRqXcNH06h>Lbuifmwsgcj9@&vJxB)YqMd(m|2~lIr(S zL~g2gsH<d+uR&eqtvl-ZdDy{^+vHYRl)JtdNimdEcM2&7K5p~ZouqygUH;#x@)23+ zm=GB+-a8wV7<MGAp5An+>)n}}az2(y-Kh;%M2j7|`DaY<LiH4(TgonK-3n)h4jNcq z#xpTF_zvmI_#hPgSj%f@b!#-Otvu?>jV(j^V6~ui#>8N8V^3l$qI*Ywg6XeG>J3B3 zm-J_wq6dtB3e&2;Py;I&&7Tpe@kk9E#cHL>V)dl(+Rj&8*Xs*g9Zr(ZcO;A99N2*A zCk7>Xst@blJ$}enU~a^O#OFjn;TmNHrmkG}XfQAJ;)9Xs>OIr1=N~4rTe_#IuO%{1 z_mN(dQBP%zMY^E@+y+G~L{O$U{u)+*p%RzUV(^F(xEWehtwrZz-Sf2{6Dc>-kVc8i zdCwGTMp{o|($$u8)c~sAd6e)8r1%acHWkvkLg8pR%y{Sd`9yE1$aj#CV3BE6>K=i5 zV3eCuvdHSs#_)nx#+UQHC<aZ#G^IHBQB3}o{>Yg#f^Sap&2N{=X<2~msUhybx>;_e z#o<*?yrr<+-HV^WD==_f`3WF$DT~u3PFQM2Ig2!M@dAH$x!CES7%q3WMmvX%mfSt% z=#(Lt%8M6yqWIu!@I}f+;ns+Z3?%lJ@lt2HhCeit{Y6(4-5-)et=7<weWY95b40Er z>Grfz4`M!W@cy}Ma*Qp*fyznco(IsRg)zF?71S!CyZUegG$SPA)4ptP%nC2Hgyf0m zt8D1VB>rh0*69k~;1?`tgvz?J?r^ygD;F%zZ#3LIvP*4sif52ZJJS-e(~MF_m+{q* zK@-rcfhb<u7nRcUA`!?<vt|Zj%lJZK;8~zGNm^uLXz<=Pqj9g4GFoapcI?=OhH|pP zu`?kfYOjRR6Nt1Pu8xuqZ=@7gIOZ}tm1^UrB3cJ|l3d~CeiNnfravV_xcW_-*=RWR z7Ksi-Y#@HWR_!~G;yW0=;+Gnz`?Bv{*Jm1foO&w;EEV#>18os<cwJ2)E9{wC1=6yQ zW*Hi0HuAY+B&jBU88wqTf{#L=m`EIt!N~l>a1gbywk{sH;`(fezrUaS(Xb;2eu?lM zh?&GUjFqGn6SYzeN+0OzJDh^Wr?ZuP)U(etH<IK+?TX@HDs-+;>uX4X{c@s}Ln?@M zoT!TRz~bb5AwGV^Y^~x|lwk)@a9=}=gHJ|5D`zwY-(Aqk@_VK9N&Pd8<e{C9u|>TO z+is^NOoxh3`r|w+UoxDnuwM4W@@kls;P4&*tYx?gwWq<EP{`b|lZ|Vs>_@P4@t3w) zKlJqBWQLlgq3BxvGwix7wY&k`QmVPHhcMIp>p1pnY<ku@dhw98(Vf^N9Xp18?&07q zv?c_pOOy2Fml)R#8o9JhK2vuVR%~S)n-teK`3kCo+%V93l!<ce4sI9a)_|5f;+Q`! zsrOLKYDYsu{$dO)S$%0MFK7GWCaa%~Vm*56qo2Z(JTwR_v20@R!KzZWr1wJg^b+=6 z?_0ZkaWmS{!M~iqCiLm2=3`h@AA6S)%2>;X_F{kUlM&@0)8^>KzUq^n3T{d{-m_Ea zBA|u_KDC!HLh*b)8x)_d+Q4SSrw?63Z*C@rv;G<9VHvA6*5AnNHDU~!Z^$^?gKdpZ z=-G%lpwd61{pvWa-LtTQkDb6ik5AHI%^HY!Yw0ki#1HU_$lj$eD_8AlHSSpcGM0~_ z`i%$bD?P7RW}Nq=R`68n6WEIw2bxb?!MmgIu%y_gY!<Wde_7I>@xHMAOQ|3sbGTQ5 zfvB<+YCPeFhNJAozP<X6z|6lVd~~2cSxJ|wzEFdw61S+8eb_fOx&@27P=Qv(u;2P7 zMmOA`%#C4b{oXJ{+@NfsP5Tk0Wj?g6981+=Ds(0Ga+X=))nWcw;n;hY^KYlJh=hDq zAsdsBqn@E-%Mx-E(G-~X<gf!5BCn3shu63tz=jGM+s1oOWv3Dh>Tjm7sKkk*R#PvW z!jgwfqz26oTN1|?fDkIC4k%4a8D#FGSZ!ij_oE<*rYOO(qmnHwNztipys#-+lGqC+ z@wzX7goIQR?fsHj1FB(ED=U3%ihH0^dVXn?T-dxaKG37Su-Q?@XCsjnmX6RR-FWi= zj3Zl7I&$(iK!Va*WExZ-Bd&N76;Hh4iM9T=WT<Q&Q&#Rwudz#f4dy@$4qqF#seB6< zu}t>N;4c94o3Z_luOLRIn%LjFIFve=J1I3jkW|<3K!3y0Dt~u+J%&#Lxk(yGSzhTT znqt;WSZXo%BXergSPZSueE4%P=TJJW4df#279Zqv5prM@L~GvRYyQz!9iw7jB}T=Z zz;IW=)Xk5PD$DsTCiZipzDq0FD+k{>iH+>vd-OKyrZ18IpStOO5JD~7FIcuff$WK? zyXx-hyRLFa_^zuxUr@iPF0;D+L%ift5*=s1+^NI9{2G{Wsa!TKfMF*+4%}aX*?gqM zG!F##9sHI#lC+VJL;>!cO;h^=UHIp-2=Uw;d<prD7HbMC<@FNMD&@97hJ!y%sI_TH zKRL3>m$#ISNa~_~e<m|0rRq;og$~2T;)>!t8*JpFXb)O}>tqEE!{$z+bg`uRnaN4| zoL^^3QvD*8OI^+*e-OJVe-u_zO)$5=VZ{@iNqH+!nFezpp$uLatHgdvO3#}DS&bF+ z$Gd?6*_N7+ZLlM$qflvglw7UFTpq_)5sS!K4~*S1AuGCsNCPYV8u?P-T{bFt#CTs8 zwObocLj#rbVzi=zPeBu5SB+MR@SRWsJ>{ZA*ywH!{_;f>xGGVq^i^pkS_RB##LBRh znjx8OPBwQL2KkNUJinD)NKTvhNb4VZ=pq_ljVaeQLQFdR;b9$1lczIK2TWbASZ7QR zi25sStgl*3>(Qs4Zsl-=VR~KK%BH21cr&o{MW<<8iF^juh?V?!dWaCiO9Wx+d<+42 zWt8EfpEa%ozGf&gP~zOJc4_1DG}k{n587~^wBa9fLtGku7jf~4Xa`hW<Lbh1C4x)E zGpT&m!)`3eyyJ+rfs|^a6Ay=CnvOs*O~aF?wgmgLu-W1{?}>Eq6GJfKuHq3G2`8Pb zk-PXy&%qhFo^%S`HlNFd=lq4uzUuQ5@$Vx3f8Y^X0>iTK`><<n<i}9rmM5=cg&+p) zii;Uxs6rdoFaGE&^o(nzXa9@zG|{@opMM_Ds#S3@dJ3Y$MbN)l5F^fsApML8Mw~|Q z{9_Rsu*ikd#&$&I!tjj};;{W2wM4IL#sbOP6j6*s75;6Y)Q#<E#HNT=JiCNT4NVbO zfT>^~#QMWucLwilSc&zMzwR7RC2l44fgK{yu1}&)m9RZ=wIDj*#r95#s{!iECk6?f ztm5|)kCS8zr&=62%9erW#*$z|ymiN}f$ylI4nY8a!^Q4T8&-4-csaj2*dNdrI10M1 z_>jEkfzjBGpApFu|Ck&@$!aMu&=uvpD3V9DFrGF}efmz8nchn^k`<;87^NruoQ9@1 zoHZ!^>hmb!eWOr9%;_}to2_E?s;mEinqX5Adoq2Hci~7dz)e*9M34w|F>Wj8tG|c( zR*I#p9KP+UUF&me9p(HxK`sick=r0YFxO?~y-DZfXtC%Ge-lfpiO%(yQ#)P_t`JwD zQR*55&C?4R@ZbLh`H1&H<0ozlBp{O@C#^$4XOjjQZ}VYM-YAQD_%JR*mITYwjj>SZ z&<qe%wm+;W<r$g^-*1XLs*LBsR8;yllWs_^671E1z?;cExvY86n-=PB7xosxK86PZ z$>>4qW%No6%#=1HXq>RPrR}o}$8Y27|H(cW+&=+aq3)g;nyXAyEG6!VR;JF#Qw?MV z83{Sit{IIHW1(HH?v>=cPCbCd=8Q&8n^EKvxwkD98xqhOd2R=(J(aD=7&Vwy&-in$ zw%Db43T!E?E<A+(1!*z$fMLz>Qla9kIin1@8HWt$sr?2+aH1A_y08-lt(mtsvpa^| z+tc41B$o?vE`OkzeLp010L@D76z5tpI-kHW`3{A{lIrBC(Cx`e+}a{Ga%ewD9zQg_ z=jE^eEaKa*S<TR7B(5Kt-hG?3vw&2dPllH0UaFF$w`i=%V>Enc6&pEhq&nhjHgniK zuY#<|&15)gIAu_j3)teLA}(pNvpmlZKjr>cBuT6QrpgP_G?OR6@fpi^iD_Ru<Df?B zbPAO77mPF`oP<E(oLa^QehVYlkYa5W)2xp76mdJ^v$3mGBAQ2zPL-||X|*VtXJT1J zeO_HWI?Y!gy+f{CPf8hoJr&c@^A%H!De2fpRcRHP2dm=AWA3ZcNR=Hsbqor-JJPol zJN<sxtZVor6odV^wV!vz#iijPBO}hJgkxOxXtB|et7$1yU%v$l5?CF2?rWy!x_8C1 zO*~JE=N0ihBc6N3^E95^hMwTpv7hqSJ&(9$9ZB&F<pGeyQpGFrM}eEykyqT^Rj%8H z&^LS0@3hXn(p8eIm))myd%7D=`PXqI`PY$$%)jmsJ?qcoS!MlDSNkS7UFzGyF8lTo zm*n5ofIzqE8PMxTH&^-Errod8?Kz;^Uy~uB37d7~J=X27^&Tun+!b}8E&Nu0-Fm!a zJ*ay)1+OV-b$!R%(fPi5svK&t7J<J`)UHm{x=z%$u1(OXzAfK%A;-f3A5U>l@bNHr zwvTJv>BAb`afs>ajv(1rPyCvecLnaHNO;S(&IDD<Cg3XsQw>zI<w-=ltRuFTM}SuO zei`Z+?rRDU_J!<i_wS>^!1?HRbxH}pwvLD`(?F3$Vee|_s@rq4eyrvsoOqO!Pt2RW zR5z)Gd}~7Tpg&8dRFd37NEYqT5+P&~{zsV<Z}}eU4XS?Icz1=b&FL=kwJmUC;eEfZ z?gWy8wUU6fG~fY`_yVt|PuS%U9XcKCx0a1a4YpHE%hR2BN{b8}PFP=Cwg-;&<!YlR z-gh`W^g3EA^!hw{rOgoJ$qMpprWRDZ=s}?xEq1{(&#SA=5!ju&+K~cTsH;sEh(}kO zD3GPP+Fk;grmO8r$Y#<Lv@0p|r21W?aZPon@gy6<m%%c(e1o-yXo-2c$3|gl(A75J z>1!*~J(d72_i`T}?T#7N=yg{@J2b-@b#-4yfZ;$&lh8Ag6x8ovJJ@~usD-8ab?pd* z3&_8&6{ughjCJ*oLwr*m@kEl=TtxJm&`do_49x>EC@H1oUEm#Q%=xC)yN+JN?BR)c zhuR1#a8<mT@&_HGymYnKU@I`yblcF3jjg|qh`!A<A|-hjMd03lGzRr}B(fIMbR@8# z`Oxidz+proEkWqSQB_o=h;mzKIL}a)ooqP^$v%!2{uw1lbBH;9*g;+GRTxuhp0~k3 z$3acuI4tZa-!5wPkX5SN1l)bo%DN*6>+81Q>5%KTuu0pKyZ`leNEhpHoqyL}wtRb9 z6iLFH?A7fVUXnsB-f{^G65nQuN-f_3|3f_;e9;^fzvU#cv@Oy-)`mB9was|?+UDz4 z?HBYrbdR>s@od|j?t6W0_qa2BZ3}gEFCi^h<eH{BGFs>p$vr2exN0w!wNvGs&Y{PD zm`(Gou68aOFjQ5ywo0xeO{0?`O%U}*HVJbiihhL}P?!ySFb~);Nfv}_2c#M*>nmWC zqvVQkUz4Wg6GS|{t9<*;|2A4t?(obAY7fZ@OtJ_~S{~^a;R>O98<Da+N8Uxsvf}Uz zFqZRsunMU3*BxT2ow43Cfv!Or#EE=NPhi=Bto(JW@rpdI-Cy?vkWkksyVfG2+b`Nf z5mJL4=}>l&M%9bA%CIkp$4XfFR5>Z{qcIG(_KNRN9m(B4G$wGQOug+@;Jh)Vg_tWV zMb)Tg@zB^Hi{MD<<fyTZ@bn0E0;)46IHJm0BB64@novLapL`eD2GV@|K3(md7~G_t zt7wWRll~4~Y^o!UX@kN1r(T9ZL#r<Ut4lwv%3pU3T(aQxHB3VMBgAorglEe*Frl09 zhOVxZWMd2-NO8wgldu<Nr$*Amd-VX@wCfJl8K&JmAeGeUnaPfdGCq1Xdj5dkzit&s z@)`M8-!GQskzw`kGRN+WxwA0d;3%#!>)lh7dK#p%7`MRzcv#X0^2LZg-#Ma#={GD| zpzF;zS(MySIMRXB@uy+peL4-Ie)8?x%f8s1<E3hl^2lO38n%zphV3J&A1bCOTeP0E zKvK|Xx>ABx^A`RS4t;`iwo>U1uT#8V;n)Y<^1kQkDb@@1qTz6gMe5J<aCi`GWld$J zdsGk&_skh2i);e@T>7AJx$zeM#D9war*EGa-JZK-v684eyL^tkk6Pu1#Xv;awiQ*W z7bc~`aWjvYf#uSrJ#o?Jh*Ma6p7As5wRgBqP=ZCfD+gQ?_xAQuegC_z?_K%Ny88AV z0;lBerR*YvU;6|cn+2^>4lZH#U<{HtIwc+@KS?u>ELLIik(RqqJFbRnAmsJg{G+1O z?A!SpW;v;)2NEel^8c`|3R(_aADT6j_6P5MOitm|ui5R=6E?ea7?4_Emlgq*0U7}x zBF%-{A<#|QVm^;SEe`9SJg||@NaO>-9R;om)g5&wxS9>8n0|N{^&LMl>+rOm!2=~a z7r_Q1%a^~?%<dh2pL)Y!c42t3Dvd>s$cx9mL##cq#I5644LX!1;Y!^4R(9)%QuSAZ z*sCLksr$`hpN>dUAD_Xlj!0E+nZXi9Zc04+5tSZ}69?@kGyaX{c!pLdZ5vXCu(KnR z)%Jh0_L1=xIy}mTL#H3yA6tS&(XF^HrGXq(&PSc3{T-b!`NuhBlR!M@!MdrHm5v%- zc01Dj{nKdaef_#8sVRa`QgtP68-$r>w7U!8k@wA6^m`!Bcl~!y&p)zk1xw(p?|QqZ zC+_0N_HasM$3~@MKhioXTb02E>kV-`MX7YP<&axn$Z?=Y^<SuuQ=^R@(ig_Qor-#u zo`*;MuA_SFkL%yGXRxpJiFm(VUyNsA=G}v+V7j^~WIv85oqc@jL(xjH&PCKp$=|ax znfG`@&Co+Ga^?pc!oj1Cqapk=on><VCX`3{>}t^>wXW`aRP|~E9m}b7bzdUl!s#j8 z;+1s#Cyxo=Xe2clquFQ{p>|^)LPG;wZoamN>X<6IWK0b10u*L|Y!7geQjxDhO{=3H zDTz;9xiV!CrL>5Yd}`Y2m`7H{_Xo0aWyWAiyP$}E>FmB<qQ5#zKJgfpynjXpOEe6+ zMf7VKk0&CZ#5=}EAlx^<8Sa>7{uyYEzWi0Rsr2-8{m#qLWR*=3L_nq}d{C#$;0J{g ztTEiI8p6Ibl&ZJ1vf;*YB4S{#7?aiJk?e?ZVA1^_z-H{Gg%$5RofbLrLpu?HhoO%p z-qXv$%Wp6|b{jD$ySEXy?FQZO0n?k3y*)vZA3BD__iFGM*5u&IQ6ilT#V#SH>~27e zUjT|_jr`DQ#PT!HL+%92TFRO7e6j*6x|@>tJ}_2R)-QK76>4aGxWe0(M%+E*D3q}q z$_SU-T4GAI%xcFe?6~k7^c6(+HN_~U8c$O>Uwo8ZG8L$XvdrxGUaLu^UV(#z$E%^B ztu*kaXQ#!z4H5<>e~NDGke(<e=P>qU_DJt)XjY>Vw->>|aO{uE71JUd(I|26f+8+; zB`Bt`P!)1c5T%o$fue_JHf~q|emXShjm1GpC#Q1K7fmV329ihV-Qn{%gqM8LB8Qf3 z@b#3#X|;_C{rY*_v?%!*Y94ol1bZ}1Mbo!w@Fq6S24$Rod8~i=qQei-%6ZJ;hi-;m zjA<%z(pZ81KLu+Ttk)8!FcfQZ(pAIQ{+tB$ob&8#&HxRam@B3Q!&ta^h?R<ED*i)J zVa;(^P|n*(@!(NCNyVHPu3&N3rJP4Jfoix4BXF#hIn8O_GgRVv@DGUdYF%v`3MCie z<_Nh`_Z|kfTr@lwdo36<24l|$V|l^YqrsRx7^?}!#s*`HgR$aZY+f)nJ)c^{9f4fR z4zn0b-`Gy`sRXS?1s`;n<y%ILLKWSyE0onk5o+P1@I+Pcp_qLtJ1!w4q0Hs{qxV7L z8~(|jvkdV5q50|1ybCvC?QCe6)rfI-8ZF{niT<fjK6NgmaHE|X@Kfg^5fLi#snaPU zqG>;cyAvFeY$8qBNSfj9?aL=doWlF^iAlmDqrsE5fs}1yHYIH!4QYxHvdipn`4XFW zIjE!?L)xUPJ%9?3YO;HCV=IYYHjy%8X)5$JRJ>q6ZwwYMsGl2y#S=pLg>JOAch8mc zNxhv7Q%Oxk?w=_qH$q>&%1utbp)VkUJArg%IM$v)U15pV9QDdWY^v3$Zu*GH)^yeV z>`iM%e8yEs6Z(vo;+*k_Vho&aWtXf2RIRL6-r#<Ze5okqJaq<jMtAWHgF!0i7jI=# z@(d0#iSEA6)d>0HMNo1G>I#pM5=W#;K6wHT8<xVY>zs`T-vC=gTRP8_@(Sl0Pl0kA zGWSkH+WzTyRI-9aab92k6C`)mOm~8lFrE>TtaZ`NP8#Z<8X9nNk>Zqkg5k5TuxQrs zU$BW0!we2FOGGW&TvvMr-LSC+=p%(OFVXb_U$qvq-g?nHD<LPi@lZaM&-iDY#61mX zxMHd;Vhi)l-H=Lj5#OGl)@2!rxT5;}%1i9){P-@bM67xBrN?bU)U{b`m~D`{<x5s( zTROPmx*f|XyRWVH;vcA-a`^j)K<f1CwyT5_js0XB;w`|r6b?02y4omEU15AA5zt1^ z!5XY}knjTvyW)B~p1Rs>Fge>5v}kM?jQ&d0sDeLtiYz-WS?lWdf=gTp+y6F_%FC)Y z9yFpF;zHg#zrpH4a<cOn1mN&@?kU9^e9^5nG&tywPd-aMep)X{b`&db$<T7%gh+jO zAt|YRWY|Zpu#bOn9)g5npO-oK4$NS<J_fP-obQ6TpUQ>v=0SpvGI=&DyTW0(K<n8_ zTjPvLQAy54N+gCVf9zJ4Y>!jtwX$q`uKj)LZY<z6c*%i<)fs4h=kg?apIDxR@k5YB z?dFa{o9&?p)r{JVZL_DxZNp`nP<tyTiU{xZ_x23+`BoM?Iz5UGxlQguW*<ESr{Av8 zWBY?reb;-n2wZko3O|OH7XF7xz6kJrIJ&4Cl9s&`h?M&yo_N!ce$6NuG(KOx=DUzA zH{l8ij?BZ1bp2)4O^NDNt*rW{R7l@&)5zF7szsZRoDOx0+Xidh+{(VaX*hB97&DN{ zqr{zSWw~P#(pK+>?QW}tksEA@*o(q1Nk%(dgoOHsIGWcAB70zrsn7qE_Wqc(q1!59 z+}cnB#WUPesCEBuMD-dw*sHyj*hz~7f6V`eALR+7C1We5P0zm2d1Q<;r6-1>`!m>N zx|D$<nl&gY9nrYLX8Eg|t@(7rsCtTgO`Mik61QO?96Eu~IkI^SmgnMhKTN?|P>EfZ zA^1AaPdKVCD0Is5;M-uP)07{_#;XmVu`Y#qU0%dJ0-So!pUox~&K{7Bn1g>j8?<X@ zDia#Wg-hqAu}(XcGviZsqOkuclt@?a!<Z40>%+HM%<h>ou7KcJs2s;aE1_<cm4;I{ z`KPv1b_qojy<`^XBx#LEYwVb@dO`|LvQ@Y`=MKa9B7gNCw5m~Y+8kCAl1%0gYeifo zzxrMSy49EW^SdzKDo_9V@Spbr^P9yHlcM)E#Vc{avn98-$ag($*(We+vt~B>(!7Eb z_AhWrR?6i5)Gp=w>Fo;H2G#22TOO`*M^`AHRPc$oWCHJgrSGt2#q~48p}yw?=PWrA zdfSCKJz);zlN;$5X5JRsp%rQr<NG95iCcgy#F4WTP1l3hpGF^J%~O5eaMsd4C5tT` zmx|@g^W*xe1D~)x<5C9)OYZ4Y!K?S6&U`nvs=*U>Xhxu$-xk1pfBPP$D(;my2+|r| zUHOnu>)_A?#|q{3lsVHEC#iA6%Tb&bJzU73OA!3TDYl}xUv$I`%9c}XOL3Wcw~GB< zJTlq?dl=f~#-C!|(iAprd`wSsC$;5al9F+X9Vm?(_S+%ML;GNBLbpD~aRu94y7f`X ze+GtI9}aQr<F`ZX?ePiP4@0Tx{J_WToAJG(w}KG5<#Ef$Of_K?V-wQUt*z{>34^`C zgRbCnQhiC9X4nzmH@dI`2}ipfsBF3QPP%m;qDFO4$AT)3f+pc^0d8uhVwdVK#Mzg& za?4+FU*QC{m`xIhSOjWQ=<H(@Y}FRKJDTqlNdu7-__oY&b_*-J*^n^-eISmG+*)oE zI2{{B;}DRavOqUsII*$Kb+I>Y9;qIFEBoqZLp;n;AR=@fDm=v<>r$7MP2&G>vLPh{ z)%z-0X-P(Jy7_?H9&!AE6P@jyc!J$K(X39rlf6E14oTrX;N12#Cs!V5jtq;coF$~P zv|H}&dK(73F9jEHKVmQ5(ys)(i<5aKi2iuq2SKok{B7(rsg4uj-jol+##X@C;&iJ9 zFUV0axQ~?R;r>$L&c_w7;^i-nvk1qWqDO+UMF>L5&<kiFTR!+P_m{@t5@49Wkgm4z zOYpVGXJD#Y;jv2MHt~j*bfoAw+v@12etJGT;&@qorIjt3v{QQp!vSs9ReVmOpKMeA zIF~J&d|zLTFUidx>80EKfo{LQ8Btu4j^^Kfz}hF5sN?3c31vwcM!f7Vyn^?Q9T#O+ zViQ;CE4`>-%Ws9H#`-zt4Ys=M?LX&KKBY{(a}Ilb%J{ytU2sLr#6_1r{L3SqP4%xM zEP85s;z}}bzk!ScW8ZJrJqdg(BGlAtj<AVS`wXF`?#JswMAr=g(Le*I<VLizXkqr~ z)U<e8h%Z&}(EtqWDdYR*vJa-FNB@A$N2sH&HnH}peTUu-wnpS!#v|sD;S-vS5sezh z@z({r7{n8sSV4K3_CZjBw`G!=*uwI@wyp)VpwI@Ipr3R@O2acq)A8j*b&TYG$Kui6 z*PZL>t<Z&<?XKR_VA+FXj5U-G=z{uT8`5%h9{axB!MaWxs9yCm8#yfzPL!?B_G9y> zSqI$o8hOL8Z-JZncN|4r^$g>evD*k~%2V&N1Jlwq5ikptZ{vpGEzkAGJbw3c>3C*U z&K&;dbHM|h$AWNguYx%1fzD&Wsmph~TN$TT57M(+UPxd+-1bk^2=?*xdsL4y`;3R8 zg&(yUuQ>Q!!`UY@7OTG+#>UP3TAltK%b1m^#*zDNvqr1qN3s`Y4OOR&WFO2*R;P^I z5t!vtsjm%X6?5)~yc2UiRX2=duiWlYy~+B|9i{F&j!m39TpcK4%jVvqUiSh!K6g@q z?|I?A`_W^Uez?yrCAs0Y0~`T-0_Y3+_$Br~o<cu06+K9^JZ|~C&1TH&JGTv*Czdg^ z$R{<Vc@=Ys*1_{=PR2;Zod+7G;u<jKxC_Y`qxE!;Fy;5owZaxs(OkUHsiJ|mf+~9Q zE%wIR4i!BVBEE_+q-7;Q|D>Wntc_PCv8?%nqHnpuRq+<-qP7u~pe|NTW#(toSlZwE zq>jhD?JLx%eKw`iqw^IE^~AUe<VKzedL?ZSyeHY3XJey6N8Rj!zfB$R2%3&hz-$^k znilF}MKq)?U+rN({_VbmZCy~`;Aycee6)8pbkXgu)t~G#npM9sK>fbFWBnWb)ano3 zV()Idh<<zZO+)ya0q_Gizqva6crp<8_NBT`-@1dpgMBdjZTs!vd4$Z_@wob<&)LsA zgkEIq6nb&jPN5f1?@S@Rcx&f!Rk++A%+}pi$l;~ko7MR{*!Vru)n9LCuk0xbci{3m zywfS_`?kX+eTTa1cD8J9iu$HC?1jDahQvKaI#NGgn(nhp(^uG~M*%GW(_ih<Xu!jO z<w#?X&QF?qdTVDB+z%8DqIg>_V7Su6g+}Vfx8LBH+R4!fj*9n)+4}X8IR5Ajo=l)E zk0F^3VQ}O4jnzz7HMjeu60!ievFu6Y>aDD%%5-z^dcLl14VcBJvi<np*E>sd8_-~B zYLPCNYT<4E8mnr>Yw^S^iRJY-SoN<ze#fK3=Vf6_iFIqc99aQpmP5`bLtR$p!YNRh z@|nM|4b;&twS&KTioJcu0JT)YzPw{lciW>-nm_=HI@!cWvL4RNguz&6U^an=k#0p| zMv0(7F<+$5aK?MDV7-9UFpQef<rVmrbUpNuc5j~XzQ*u+<8mBzOP==?$ipCcp&qH} zgi5|txE|=nr3!U2G*sS(^&p%^$b>HyT!e}%qPP+r+|B*A1z*3A+bmAPb`{_L9JR75 z&U9Nhlp(moh(=mF4eF*@x6N$CbrmiRpvKk)E_7Z;!OdpR*q|kFJ2qf<Q*mdKcLFFb z6|OLjrX*a|bSIW`6X9?olt3aXB&J9*7_bu9RjwI>zW6WOP<a@}0SCoyT%|sT@(LRQ zt0JyB#yF4kMEx#<-f1c}A>u2<bmj*VS_ol*P>WWnJG)e4#r8~0aMi>Iw%a?|cm&Y@ zv>pRKL$QMH>8PtHips4m<NqMbPxnA|JZ1}vc{WkI3a<uj$*8gHSy!T3mf2p{Ed##7 zY73iuxF?;1){sM`7Ip9N377M&jXN^#8mo#yw@ID|{xOUUROS3gwroMXcSR56T<?xl z(Gj@kdsq0l3Vr~q58Ai+^GPSsEFTg(J`@ZX%DDqC)Y$SX>!+#Q1L@;o25#_1L7585 zu^pe9C^`>qKMtpYVV7&5N=FnSfEV~ll}MK48%(M+>OXafa^!sl8=-&JrRO)Xo9?bu z_ZrXE-TiI)$YK;7xkf({iQ%ObYFDa?lqjH&M2bitns+N^o9~%044I(fx4aYjR&9Sw zE;%Mv_K&sF-Xa`&xq)tnVmF?}`Yjx*KKucjy)aEfS3CW<=E+tq?5CD4u`LS+sWq3_ z$%XyZIUlee7p9ENdWibS(~eJXg$5V42k0|g!by8>qsxlrd!?As(tbQx{-Pv}h9Y<% zlE}P^G*}U=Tr@BnewM&|e4Eq;)s3ftg6qWizC11#xS=)ZaHTLt#OX^RB~E*ZH80AD z#jDpo^_%Fj<;+>g(wDvn&VhT!sLzaJbMC#VcPmD>Ki=0};hz8tu5|8S&)&NCSnuwg zNta&iNP6-`_T1tj>M2?5@Zy_IUs}-zAHvUz4;uz>H@KkB{v=)jtVxQ8!t)ZK_&{W~ zf&iEH^Aj(zqWfI*Il~`k-s>G}&97cSoAK><(lzI{ma1^Fwaw&>5F>W1oc-S;G&p;Q zWWdMH*OC<Rni%L;J%8h~Kx&erQKZ0nUmF|cj_-A^8N)qNK8e-;fnOu!*e78*ZgnSP z3H6|RM9h8UpeBcd^*2vn{?$6R$DOLqKf%tq<6}>e?jfboeGooBNy+B1-|o<Ox~N9{ zXrz>wZU^1Ra2*>7UxAQ%5F{%O+8@1+b_}a8Jnult+2hQ%WRm(`BU`himwLL9y}V?2 z?i<gAS{ffhw9HcBAVj&2ftgU@Qsb4OxMe8v)zu!vn(T1lb#=fdvWfYIY41-`-;vIS z-#-vFxaIx<c;0)z8PC`5Pgj?;vXAdi@+Lw-&{v&H`&XVtB_r{151QbY2w7WCyB6$( z>c^_WYf`Qw%Xr*=tZdov8~C#2k8Uspd(R)<T}CfW{T1IC(;r{;3=BG)7`!K0eMlGg zVd>kzczhYi?pYe8?FMR4&;I@lTd{Pu+VU*>c4?{(Ip4^3r-{WqFpt$du(q$J7e40~ zH!(M$_auk!B7N;n*Bvt-e7&oA18f==;pc8*7akm>_B_D4|HaVV4r=J)CNbXfmnG>$ z^2ErKwDM%FWfFd_U@ux%YAl=dPAe8Pfo=j}-~VNx7kWzHN9g$Cr#kF6c}eRUV7uXJ zgYE9gD>{?-uTR2efA!yOsN0W5^flW<Za&||7!;-BaF`Y#SBtdv3SAw!Ow0KM>?kYW zq3?Ur>9l8LIkyl61#u57=Och)%~~GXSMvTJu?4Rt4{ayw8e(q^;)4A=@ISJjKF)T5 zea9czU-<+3bN|6EA7|HJO;e9Ev9#B+)xS2f>938N_&mlsjsiQ*+BYttg_v*_DasxR zMjK9bj8Xr?gu}M`l#Im}umTX3_8n0fj#?rTVN&s3JWko!zgIrWcE4s)|MPzK@7Lz4 zjje3*>n3$K#{38nYd~;j3A^_CSZ3e)Y-F(YSkGnSDDk#s<9l~)fJ*+y$7SXBv$vO- z$+vN38P-DYvDwve>Sva)`>Sux*Fg1QQ7X9o0bv}j@Ejn}-pcy<=V6UeGsAIy_TFH% z;N?M<P%K*IMY)+$ZG$bgEgwMH&skp3yZiqC&i+;p+r2zV{Y4Y|booHzB5IAowldyz zX-7U6JM(#C(f=Z!FBY@m-m%_(i(xF6Xn0Q%YI#=?#&ER=WBIQrqp~EP|3qOn4~S?A z|5k))q6*5A4E~9T59c3>P|q7hXyosRFp=*Np_#uf!aTkaq3@7g@h-9RwSqR5uNGl3 zUn#;8UL!&WUn;^We31yJai<8U^VuSt&8LZQ9(Rb)$%{p}fZIj5h?_;|=6Vq><ryMe z##0b_sSvwsFgSTnBJZY!6OR(;N<!PwcFOKmgkBQpYC<mv^a(=G3Un=@Ck6UEq3;WH zJ)v(4bQ7U}7w8s3HwpAzTws&<GXifQ^ihExBGf0)BZMv$=rKa?1}e&Yl47$&0-ba4 zDFQu1=y-viBh)6)W<m`DJx^$cKsit^?=SF+MD8ij(}YF|w3X2Ih13F92)!iGHbO55 zw4Kni0+nD{_(_3=6Z*bDHH5wmRFqjuv8^H@hR_WQy(Bx9@TUYho={n!iG<b&G=)%) zK+_0y2{ePySppqS=oEqK2^}v`BcV2dnh7-sG>^~>f!bAQf8Jl<V~N~Tpv8nn0TqQV zq1Z3?P(dAp{vgmPgnlc~X@s5;=yXDl3v@Q22Z3U*29HeV{sEV1D&<IPgc+909p!cK z7r}!=Xnd-jCrA!nAEaOcu!ksr2~zad2xo|Lr=Tn`T1^WCrJPp~<<B|kYUu_Fe5KZi zv7z@egOXyb5yeFD!oQXrB*j`IN<vK0K}x(e!V#kUcDE>GqBUYlh|&_Iq*x=Sg(#l| zDQVV-=^@JdL5kTLu_#2@5v1f{ZWEL#4!$8svRfmThL|1;QpQ>%mW3$QK}xYTq9#PS zOHh`SSWUHpQqC)hB79%WQae{W3@i8G%zt7f+fqAJ{qst8y7n^zrc_)Tt0?z<r}JIV zS+CEW<%wu<>T$8RDGZI}i9OJw_Wd)=_lsunkjtBuv?rzQ?5n!oNwIHY?oX?a*>0Cs zBYXnU)oFM*0PZ(g&xZ!9!D)Rcje<E3C8kT@gzw-Q($53V0Zs#s0S*D)1?+s2z4TCV zEOE*TNsb4k0nFRk^@rv~Kfi+#u6>UDN_Mb?5BKf89*Kxi!s>0nPXpSwvlkvtRzJUk zz4vei1uYNj(}@GQI}l6*%mz3CivUXjH2}7Q*;W?E1~b_T!cM>;z?mKF*_Gw2#y>`z zf%OnSUkiuN)D7%}Kc(mxKGEU7gV5--??hR|JiIXjb8V+{bK`KtHtOkNq=y-g&;X!S zIX(a!%}kFBkE6ukRTVnCE~cE!wQR{FL;H8ZW`MpuI)n4KSn_w%XxmR>Hw?G3eUA*9 z@5bR_$luflXyC2=By9sd0=@TB-W%u<=(?YB-$0M1!t;<qr}}h#0AIY}1BL2l94UVa zs->{m6NSUm^PBOl1rDt+EFSwiE0!%I?C@jZi#a)ZERe8hidss~Rri%k&)s!YD_2bS zkL{sR3eN?;<cHU>H{_A(lsDK{@^IUCf2BP#{j>#Yz0r;m+-QW2V{b(98$ZEw_X@>C zhwyYO0*CI!s|Kj+hOxU=4fHCnLbK^)qIdlmEbSND@F7Y&ZwXP|qYfx)N4;@xs4s*J zd@q|Xu2_6Vzs?h0u6(w!7P3(i@=<o|2VMBd8$MOYq1!^izNGgBGkjJNuJ~y)2R7o^ zjDENP*9D!_ORHBvAGY8EgSeiA^>8~|lh{YI734<#@CH^|pXe29nzS2>P~=>OHped! zlvlu&z+15Q@Xf!75ABN46zC4_sdnx}<KmZ-E<x+@k-n~$CJ8zJG`h>V1xfHd(cSJ} z$)<@5Bo5qZSxX5UNr>_p#cM^ptfZ6R6a+^{eB(4=*sI4l9YK+JY2y^oHqb*Z<-yLd z9&nGdL=&&O037(b(#mE&YRM+K)<W(c0wSagR8IN}F9PYifpVY}T`)zXcrJ=WTx+qi z<Bz6h)XwQ#hS6!!pT3j52NPk?TJ`j@_{WOWiC#AQF^l$_<sJ08<!tj~KK10~Z0PDi zDX@7n8aL4Qto~?v;#0FeU=0n4x2xHr)xEuzdC;2~xDq7%k2vGPN5v>ZC5}r=D*}4- zud0ZCwhWuf(9f5oQPb0fqK=>7u$S|Hy@Xd#!~VYO%}YKk=l`HMUt3MNY0(uy=M;Y! z1Q~7t@`K~olMw5TA3O>ClP~;JLUJ`rSd%^=_YbcJM(h4w8iOCYx@4K5ad)F)ZXd+v ztVvBdyNjkzEqTg2J{8}49xhv+g-evQ*V?qkIBeH4I2FwNIRv5V+kleQkRzce!o_yI ztUi%$B7{9|Fjr7&ip2A7yzup3;A!m-?yF<XIN;OK%^JEl^h~VSV3+e%xRl54dAyIh zVJ&<3@&4-Jm)OgXC#f?ovBQtw9e#o%HHQrfFC%0QO9>xENF5tRPMY0pu=-3Z`x?J~ zh`{{BZ1vVu_UseG)ZSFq^u(7$eEG>U6x2NR5xwAedKAT0JRPgHx3Z_7E}+<%r~6av z+S7v)XJ9uJIsljt)UOW|D7(g@g#6AW%(6Buy1!t6^Go9COPFhIa(bYGRL>RTTq&$9 z(mNf9EuLU=eB}AVS;N5)@fC!jtR?{cPj8_*u?N#UlOA1qgKesZ4ja4&KnXes=k>r= zj<ViDS<eGKlyxtz?tG1UYXi#r2bX0}{;j(+^M`;2zPND~hk<%I=oFBnu=S0^)>q~J zsEkjGL1wT@Hk^iD1zF|JEaPW!d6E3EdMG!HXt$t_l0OsITF=8_ejfL>xF1>Iny_EP z7r2FwlY<bff*gbM5=*@EZ&(9jyKYlF-RY!6Cx5uDfKUD#^FKRKoTg13uoLaf-S>&3 zk>5inY4Z^cDW>K=Ht@N0&HZq=|JO+xd+zo>otf0X3jx716Ahi2#5|v>)_uIg^nAQJ zy!uP1*Q6Kvsu#Y>7QB$6E`60f`oey3me!Y5y*O3<ukLK$i+?&$JMcMktiPv=`E!&C zHgp1;ICCItS-(fU{0a8b262Y+j}4)-wSmLvhZ6t%O-RkYA3^K<B&35OWh4vCO4(8O zQn4z0@)?xs<Bdzi8Qc(d@1|wyi!ZSsH{GUw>?KyRdAzz?7xwh#RqEXvSo+J!>T5T% zqL=3m`EWd`CYCBsAHhxf!?;Ng7}#W&?g2aqXaM|#H1_q&Mcxf+jK~xCQI{r94?gvd z4h4~$<j(>V#5qa(B>XrBdYcaC=%k_vpCSvR1QXW9!N=j9%D{>L$*=c2_UQ6dT=B<f ziFF<!l^u&W$nQAgoPUNF6PWPVB=U<@!Ey63!r%~*8|6=eT71KK(~lnt@x9u~$4h_) zK9=dHeCqRr7Yx^j#L=Lv)9o+G4EKGbavyf^BvkfKzW79zx}|^gxEqpgsbZyDk_PSu zMf@Jct@K84(ILKq4SsyU*VnW9Ew+*66>uw62Hi^cO%nNwj5`XU!{MYaue;(giK}CC z-bzh0(u5G@PmDA)+!pA~r_b-GET1=@t$!<JIF{4*mC5vrLGmTk;g5F_g69X>zC69+ z)(eRj-%9cZCHl0HxUL$A<DX)V3R#%1fWkX`M7d$@L#-Xp4<e0>_A8z2;QN!CtRH_N z#N+Ga;e&t%{t>Kc(BD{0-p6MAmWu|);kX=*kF>F@;ndL^z>FWy5qH<bEw+veY@KUI zT=AKy@tANy=Y`em<F|X$eYKXihm(W3eymvwt#d-_==Z{Ihpa}!S%?Gk?8l&UolC+0 zcHY4gN$-Yk7R5U?u_(>ixu`LIXNP>6=Um-*772m*(vYp3gM|@j%|!E8vnSr^(F<k3 zPubPJ1uKe=9OUqQnC|wzla%x{@fgmI!Ev5%IetYqLENgUFQOb#{44LI=1`%p7NI>7 zaAH8E@Ex`Xd~)>}+0%^T&wP}M@2fs1c`h6V8~pdfj3j>IyD9x#k023;L(LdobLzDq zV@aYCw**(dOTVRa_}xKW*WHfRKzX5uSKmDtWrk91Tu{i~+h<Q`g|j}m@Oc85d^msw zNj&+%dKR*N`!nJmhmRLa0KOTip`U?zb~dZnpQL?|-W*836PGlbt=L~G4n+1R(k%JI z{$zF2TBdu?(t8FKz7jvoSBT#aS>6pR=XgH;Q8x3vM0MUJw(Pz1?sQNXNUJ~!O#cq{ z_xA?%B+`L4^|dp!ojyD)VnG)B?!95LhaW--TTxq2JJwG0^Bhep*q{bmv^<jv*PABX zku%x74Sm)3jb^JF62_4rY<}_dbmiHYJ~j%<Vb4<W`yIHy;Wpq`Jx1{rJCP>NIG@`= z7Wx7Fx{5&D6M5K7_CrI;sJc4TWFs9#;f*i+dQ0$!W_r28xl1GkMZ4eg>t&p_vit+1 z)MH1pdkzfFux-byVT}%+3r0FmvtD=Kiai8vBL;@jD)^&iY~O()L#9Ikpl3Q?=@o^y zPw!~<3jU@YQY@Y}H!MUu-)LvLgCqM;rd$uP=p>q0Ou<UBDF;VHiL{HgY~{hEzJ#}Y zejNvX^v9pN4SbMtxq!WQFiBl%XI~%mBqdyd1t{d=2*W@1A~~BY>9Q4djdGbSZp=!V zg3>{A=OMuFLZh60xEZoSKUp*4Hg>4dpw@oHerX(;9O9h~Uc=dzhp2mw+=`N#BRt*s z>$kF^L+R@9%k1t$iKC{0=h_+inc(7Q@FT#f2mMp8&|QxqyaaCq&vN(_pur_B+j}TU z{mf77%%Krb@yPezw3#bARp9J2Qt4?>o#^S<6Zr8PsiB=;=nk7>4OO$_4qIQ|K8?NF zl%W248av!HHe+!Cir9c3H^2?GDuH+2Q2X@&dH{9C4Yk{UVp&H<smuP!<{mNh#i0A+ z4Yjy3TE9->zqPWz9~r7X`wcsLBu(Q|^K}nGz>ZJfk5GB(o;1d+#?_mz16p}CVm<Z| zYIhI(NG?{_c5H@UnIgIsSQfn3I69|I9sLq_9Iq_ztZw^MaFH?o(@w&H2JNV|FMk_E zU&p~Gxg0l8#K-0~_0!tLip>4bD%g4U`;or3eYlAf`eCA@7E)C@fcnjy4AD4@k1G-7 zK?8)#O#yB25J-*e#g29cjt-8x(}s7YJtUnyZKRAEgV{lov~;`?1>M2%)Rm(*g{^J1 z?-@N;^_|LlC{87v0-Oi@1km<YNlAb#Krvt*pa$?bU<+VB;0r)2AiR%CiUp(r@&FS7 zKjB05X240nZon4Mb^;mzX8^4LH9VcMfT8j5LE3=a3RntQ1=tBV1UL<72K)f{70|OU zUH}*YcEDu7Ouz!bgMeDVTEG^-e!xk<IlvEqc0l)jDrpd4B)|rk0GQs-tCE%gSq*p@ z@Gjs}zy&}%pf_re1~39{0!#+X11trs1Z)Ca2hd*)YUTp00{rLSzGS=fqQfrD-D8)Y z{|sCDHTED2zEqGJzQuws#m)|>`zM8d>0ZKGMk)oY)uC3u;AiF0oyDVeY(K7!_RhX; zr(Z*mGzjVP=eFPN63KBQ#2!G3L5Piqw5{DP^&~pNUPMP2k8u5OxZ{P;fe;%?srgsC zlz?y%LfSZ`AWS0qFLo&zVLZYVgxlKebR}e%)NmD!G!WJ!d;noB!UqvL5w;_ojZi^o zzq(`Q*%MvO(j3VRSO6*WrN#KS5Rf9>DcytT0;G6=&Bc0+QgcB|m+qq)?HF<)M>Q3K zq^V$-Bi#<>`ABv{Knh-1j2MX~+9Dx#F7W&CF9jm+6B*nAd@uwqKzwjWIB-_?-7W0Q zcWwRi{;L49q<MIAv1F5`?>Npg)F}f;ORYNizfg|JWn;@GUv7ALk84Eninb|Jvfq*3 zR<XE%mD*YSt+t?{aDr#wux3DSwkQxEwl=}e?h5o*Kc2`Q2_&lqvDfe%%svPttA~n| zu(w78!{MWXocHM2gp2(m-_Nv5pJy`H#iX#y*};@2O4w_NRyl&v@5|Wt7n5{x_!<ba z3pKfvHr394yx6z<XXQ6&-<C7mzx%0|-bS+2V{RuIy5n>0qQLZcX0Dwr4D?ky<`ai1 zmZ^S7R`2i%_U~5{yE^|xWYsS+upVO96WVPyT%hv+_+thC?i*IRJ6Na>_K5d(HHzTT zL!!hDr$l0T>_5Z{oy9#8Dt1TlKmRnCPEzB5FQD_0(BE_#Sgn$@6K1O8?|IOQFQKXb zUpHj%693;fWbCKbgE2EzMGF_-J;z--|GxX?+%dm`ZNK!zC|PpMaXTl^zkA`5`9*gw zm|wPN{@e<wP*vt$jQ_@0NEm9wF=pZ2x8Hfs{2fpJbXNpBsc!Gu01qquH5BdW+Lo+Q zr9C-Rb=_P1vt1esumkb{W`Gf(2V?+J0Pz4VKmxQ~u}c>L9B>}c3^)fk12_#h2{;Bg z0yqR{0K5y>3D^Qy4_FHz{?%9PUTGze8h{(H05BUc1>gXb0PKL_fD{0J4NdCMz<9)C z02+V<XuE8eS^*qz25<zh7qAJi8c+jR0GJ1u4wwQcxjY}W2a*Rc0)_+90EvKDfELjH z6NCcJ0Zsyl{}95xfGvP00JVTcfa!oKfD(Wi5CdqxWPkd-Fjd^s>(#39qlW}LfjWZz z=&Rm8{{5fhPY(@K4a}l*ypF#us?m}@a<sH9YP3Y(6w@c27~JA-YgiDk>556t#%g{I z-_sDLN>O`pl~mFL=;hbIloG*cX6pPie1qld{}1SR0YCL7{_iV#6fDL1e+0AS4gDGM zLK<H9pMYvgeNGu5-1Bs#>Yr-w?J9|5raljapug66Dk%f09r5@oEJrX?cf=d+!KZE! zH{uy?q^IAApL_=Km>cn>uYn)3JKk@8199Zj5my)geb295R9zk3GoW_WkJjI+ck5r* zAJU)KU(+XN4#^yysmXF?J&^Tbme!DH7-A?h%rY!AY&N`YIBfXZaM7SPCL6PjrN%`@ zkMU{aR^w6Q1>?`gFjKN=h^fd_VY<iUHZ3<jVtU5(f@zy+zp2sGCwpjiWp-osN7+AS zN9X8s{5enO?DyvUnA4V{<iwgsna7wPG{0be!+hC1$g;rVwLEUwXgOrzmhQRF=f0i$ zS?+ha^Q^t{-pJdTcR0_UUz|T9-<j{XWfjaRxT9c6!LEY+1!oGrE4WgC8l_0m6a<n! zM&Cz2NS~!I(9hL-^gjJs{Y&~I`rq_jGkay`WL9N*3$s2m{Wbe)_V3xrIfHWE%sHAf z+Oo?sHMf6WQQn075Asjtf1Cefew1yF4eBP*Wde=9M!#9VUH_i`L;Yv^FZDm7#nhSO zGbd(F%X~cZ>C6{0H)ZzCO3q5p8kUurm6K)5D$FX$D$BYxYYtj;QP$F|nygh>Pi8%v zwLa@LZ`O{i{aGJmoyht+>wMPtS*=--hG;bS=f-ZPp{DVst)>&EOQv>HR`yNV3$m-T zAIV;my&-!?_TFqwPQRRAbHdF%%(s}!&2!CnnqM{_H2=%oX3n%sw7g_lmRpN<+mqYf zYO;>DmZ04l@~ZQzY#tl^I=E!Vc`gF)-!faXa*ZpDe>0viZp!X!>2DceNw*BKjId-{ zOqQ`0hvg~D^Olz_*4(AJkL4=Xn7j#j6?t>=7UnI@`)l6ryaRb3=AF;`Ay1W`o<BW* zP5!3*SX-v;L0gMWQ&3&tFL=J-je?^E9~XRH&{`llB&ir$-bJ6TAE`I$^Yu4*^^^2V z^&8NfhxI)(=VZQ``FmzV)(=@#h8n{f!v}_64Ju=_u@{sr-8j-{H0B$}8*edI7-tyo zG%hqQGd^ToWqiuG*Ld7`*=R6XOmj_lniiVsOzWU<`Ps9xUD+>Xzm|PD`<HB8PHawM z&cK`@Iiqs2bMkV==G>g)$nj3gnUiyO&f=W=bE<Pz<UE|SD(7+N<;ywm<{ZrVAm>ER zXF2C`&gZn`v_eV4&0WpC%m%Z?Jjonm>1|2246+nk_F3MyoUwchMg7$hp4%liJ$F=Y zVXh<BmFvl^&aKa7xtnsogz`pPW34068MCeGytur<dH3W!l(*WO_j=xrytne2@_x(H z<oC~?n15IPp8S91Yi)gP$+q#fh3KyrZRc$Pn^Z8aU{b-df+q^L7kpYE&6cE9=)XvP zyuL_(r+%US8EEz!`hV$v(EqA8WaecSXVzstn)z(zj?B}UUuK49^~ma-m6oMs4K$1} zI1DcuE*So8xMuKnF{X%ytuW3s-fxtRYmCoA7d|%jFwI9Z9>!Reb6U;U%n`YXx!ZHU z&2{D_<!9ud%Maw=j>3&Cm{{;b0hZ8GF$yzWKU4pZzB%)OtVBbuVK#op@Py%egOdGo zj^A?3(q`$K`@40jt<p9FqiMeFPTSqKd%d=oZ98nA*#2!xM*XB3>@6TDQ=gb=#BiEp z+-z#jPR+R`=iZ!KE$>^8T8~>lw*JHVx%EryH&!j$=^8Y8qiwtGXS7e(f`o!W1veEe zFW6epQgE5<z-nwU5OjmGRp}qq-<i2E^I+D;SzQdh3{zmx?lgEGG}IXiP35MUrg=hj z&YHe4{ccLnz88aS6$ab(>^<3EW=lEIIel{mKzU3#Gjpm?oCM2rmdM<`x#rvnxifMX z=KekRMDC{i!}&`7AX|~mVY|b&%Jz5LbSlJpY>*H*^>^#N`gQs@^?UTgGuLFU%Z$q! z<;^m~$UdL7JL|KoU$Sm8$c8-QO~zv5G~-<3lcuojq?|i)B=br01k2sX?oxi3EzUOB zX0gq(-D~sNp0vGUd&~Br?MvH_HdR5dg7ktM%5a+`HdAlwFY3cHyTQ`MXC`K*z(g&~ ztj~NRb8Y7Hnd@Pswq$P2^lr=iHB-t8&(dURvleGnXRXb8F>6!SmaMH=$Fr_w4Ke5q zMuXXqXRsT_8j1}ihNleA8`c{(8MYY47#}p&80(D8xYoGN_>ytE@f+iJ#vhH@CacL~ zdeF4O^pL6E^tfrgNtN9-J0^Q=_A6-1{n>}JvvaH%eq}jJa{l7Ysljk-&FN)MGN+l1 z7+Mp|lg-o2x0{{j#pb2vznW$9Q|9N)n=miDWxitWZppB0&E1jvR<6z(XB}XjW?f`e ztTSPL2ITj!<=U7{YLKL{Xatr1tp4@PbD2M78nPy5{g^etFy7DzosTw-!L$`+dd+mi z^n=Me875+N_7=;Fx%#~O@?M0I+(_o}BsS0pbZERjnF*Q6nW>qBGlylqh$eeK^V`hi ztZ`7IR7195oMD0C70imK3_lqPjd#N!y#!0zjtoquF{TpJD$IXhm=u#X+n8OIeNXn* z?DaXDb9S3gn{T)HE&IHd<Ce#ApUmBm`&#b4-1c0JwU0H``jIs~FE{Vbyhrm^<+FU9 ztv7o61=~g2t^%nUYc3S*sQ#pWa@I5GtiNTwo%MOvHp6biJB9-&MsFkSbZ9uv%XDTg z$o&6SU|3?<VAx`~$MAsR5xX;XSM2Tp%ab>DU+n(avDi=FlE^5QlVYC(N;kmv3217% z$9{r6g8(B#1dy#^;9-zq&|$CwSZls8;4oA%)Bu(&8HOFe)N=w@vV1W7V<-Vk!x2U) zz-n!Q(E+0eMhw9EO97l@5`d-n0$?%lz?cD8%;=Z^4OEz}m&AC(iouS>j>C?}PQXsY zPQp&cPQgya&ce>d&cV*b&cn_Jn1Mp<BJ5)9GVF5f3hYYkD(q_P8thu^I_yBBPZewc D1!_B- delta 25633 zcmd_Sdt8*&_CNm2!);U$R8Z~=2#SI_%rL_Yb74$CG|@p6q!i5yO=M|0V^-Q|1LZhU z>ejT5njW-c9W~2)siBx)Y1&P#%&5$^ji=~jNJ>7x_ueyLo%8*Czpvlx_5JUQu4k{k z_Imbp?X}lldq1<}M$nQ^gH{@4?W;^9rmg>d+|WzwlNWcwxbue=e}#Bc(DKE9@bKNm zpYYJL_@4+rYk6z&=Lj#YS1f*x(_irOzvB-N^Kkq_Gi)r+Rmst-TIJ*4e)!Y-1pi-) zAj*2kx(h-x2%CJwmxmyX0JN~U#fOW65G2SLp(_*`5Dydt%`arMY(USs0W!hJ(ycOK zt94a!pU~Yh!O~3-R(>N0@vD;K`X0Q)vtvBIzZO9lBU_am+k2>e)<QdIxhL^pMK_aM zvc#U<Gk~@LG6AQ_05Pe11j=L#?E)DmCT5{5VLYO8gn#<8prE1iX3d^6Ll9o?i3C)I zzjoiAP2zpB)q%mmsQ8we-ef`Gfb7o-1i^J8vaYt{wj^0?nYSzgeJl*L28)$uMzX}v zYMB%zv{(dBafkX06}pjgfl-l(!v$*z^`Zc9)Hc}T4mBi_-vj&SmD|Q3le&TqwPc%? z0WSCA4)@|#`XIVPeag@jzUG*10ys*jLXxCvv0QR=(6VSjVByq$WO7iPv$PD<c19TJ zOL!*`jVMJ$9CE7Z!+7PMR;oq@t`iDFlRd%HS{+nTxA@avw|N%JGnxVl+F$atx+h-0 zwiG>Z6de#)MvwIn1b4v=PXP@Qgx1>??t*qh)8YX4;`Ywc4)x>JaF(%>ZqU=cS@Z@< zdjF0jd0N#ChMVDQ8*Fn?eYfMfm5yTMg6kOtH>!hBWCfUC@>PPBP0z>$0o8Gx$joqD z$#Aq}{Q88bZmSl`Sj)!x%0i>c=?oOavp*TvaPy+gxdsJT>GeNY^HS^b%~Iw4-kw#S zh(o)PP-W<44=FM<&Hu?8;4Wxoc~!>X{esqtsiztzdak>!hn7_jMHi{t+>Umdj9OGw zV2BdRY_=i81sfAwR6$-QMaeO&NU;}sA1k-*Mdoz{H>`Eef(|Qf;`O`X;lprGI2RMc z9mlA;f_Ce>C&0=E&a3!Sp4LC->4;>E<z+Mk!vzsBoOFEZ4N$igvij~1L6xR@PFC{z zmU&vGs7TbrH9)8qt7X+rp^W8am7?;Q*tOQSIYOl<BjacfcE}3x;uPM{#d5*Xdy7po zoHe3bwSYI`!KH4n_|mVmFTH*V0sv_rE~rnqYf>*F<|&Xvz|>NCxjR2KUW&?Qg<P!5 zPmQov&qc71DCNP<Oi+<)sB2Mb#9T)Z^7M-82HR=0+)BrHNYc_3smqX?pk&?4U$k4R zyOl~&p?K4Fo%0O_Uv++E8FI}ox6&`i@@{n7Q`0>f9oVYdr1MrJ&ZIXWN=<>Op|@G~ zzuuPOBxL%VFV#jb_)^n(>UnZb-q)GW@k5xlV3gT1`&fjg{M5-zQAtr2<c(~jS>Bn} z^73*>S-E6KC8VebqydezP2)K{jvPGT!We-XUC(+GjZpZw&^<-0o)Xf4NBi=Puu3W( zPet6ejOU}|H33_MPg|UZB$l9`J*NC`EW$fGZd=I7ZmFUpf?V&G5ql8tlu`@iKz*vV z0Xm`7b`hi!x|L*gkBW_fp5rpx+3%&c_zu=H4l=EKRNRjcN0#jPp~{sfcy8Kb!Vd&j z%dPaIaI&&{Kj%Szx?QersB#CpPKxEU5<^jn$u$A_#7Wh{q@_Qw*?{?MrGNa+W?Dp* z%aGz2fLC1Rl+y4u4Yp;x%&(E^%jZNWvT`+wt|o|rj|=MH9OpU&F4O|uSd|318NjNO z)me{d{=T;-BgTi5X+2W!O%3Rj%lF&`9Zcgz1)?{rT)Gr{V70;<RGs2#2&^cv(j(yH ztqpCeZ9yxVAzI$ly1Ama7t3#FPja?Ll#`|S)Z|g7cFU?E2I)LQ6N{(V<?ekHJl?RT zS{6<R(M3hiS{6~Cs&a?kQ&-yx+<G#ofoXR-3oY`rs~c)1bP{A!@dq1$`gTZM-E&MK ztJ^9(XASN1qn7ttLZ*er8g-mwHsh!$t*gBO1!g@NEM(j)E3cN@uKmG8Q;9k;U8nxi z@GDcMGx<BZcCtS-RlN~Q4<C0qI=V`q==g#6bAQ`PaA8EOwDl$zcyX$)GiQg9u&@|s zLuc|4p6p9Ac4pQ+;3h_<%FyVbb+y-76U*EiSx6to!(Dp?Pj!QSBMWUeSXN9Z<{>La zWsEk(9Y1w4s2=`?>qK;2kqE6E(B!N#eBx-!IQCJyr`5wt=uGKKaMxZ!J|5on|CQ)z z_VAIdvm(FffTpV2FOl8xkJ<y81M1j&<#qNfc^N%|PVX8K+ho+qN>8^*Qn9p;4Zp+( zSlY*GEbZg1SqYx9vU0i!i-7BdvYh^iK|Osf6(eH^V_MN6{@QqL2p0P4P}^JytG_9Z z{M55=*o9a@XcGEDztJaSNw4tau`gpma2Lo8O^z_Sk*7u@)q4u-dzIecP|#&vuT<Zu zD`>UaSpIZ(JS#cWy9FveqO6p@%#Df&vL(6CpfB-8BQ!nz7)w$`qep_yDDybvQ22dl z1`@0LR=Sn+Yz#RZo+{psA>W1%RdFsCRKI-)UWJ%zqEm)LuqHsTpc10X=xZ_PCafuJ z#jw(f81i96V&b3#Mnp}Jd9=Qu!`_G5z=x^YolcD*-FqoU=MF*(3+0}o+3ud6BAa_` zpr@xH%oaCXu=n<aDPpk96wP&n&}#$07wjnvGz6tbBeAasFLkBMX;Bif_bLqAiT?3- zp*5N`^_tiF=fQ#yexQ|gPWXWkEB$&fxwp4jEJ`7s-Z5c~=wLs8RX_4(@1;)GFu@T` z#|}eRB+~V;PHhq!lI_R{^W;l=pr;Y5DvY(e{V<75u=ZNXF`zoMQXjM<9W@LqrQ?wY zQ<<-ML+Do+SM=+79(bCjR(c~#X%=Ik$3bfnLQ#miA@u$!jR%E@;X>n<hC<TNC&pGd zT&NDT(rsC+`1-9{w(vR<s{_m2ng*oN6@N(xs*ash*{J^b5X-|Gw1x4zLq*pyh3k0W z(%+XrJQn>}eWwx}5#T8I96#0`<O!@@q9Ma0`#V8e^x;%>LuDgP&lZFwCOhj6dl0>W zQX~z_jrM`4|4DFoZ4IsSQkLGChTI>I^?aDoeC+oi*RhBaTBQ+$rDH>dC8)f&yX&L^ z2GC@57K{llQ?LtM?U9T7x*8&^RIe$6ycC2kE%Lnpo`O(Ug90W7d*|*eym5wO^O8)w z#OS5dLM5jpWgJ6WcuxIL_lhZvzIUg!5Pg)Q4^{t##_}U`qfVM1x`pQW^DZgznJvEM z!Sjpy%W}i7jz}8GCM%y82Vq)W%Y;r~h8G>FCHB7KvrnLvo@OW~cX7Mn14lo)4qQzF z1O~$c)FWKm1zcY&pWlnGN_0UVa;tBe>Ii%BgrU(M-z1zqjhWpG+LkTqAa!dJzWNID zxIykIZueY-dJ3aSeMm{am?Q;WLur^ty-ImKcA&kOW}a4WDE*-~araB?%e3+-Unje+ zM?eqM6}QqU<Ve5qVrdTP6|D>#0t?tL`{#O*vC#u%7P26EjyOG!{1AO#w-z({%Syj9 zk<l@+;!j#q6=Ug^$IGF|BFGyt=^-nb@@k17mt)fUgBvxVGhs#eGYtW%ZlK>IfWnEU z50L?}TG=)-B{psFZuX{zF?{Ksaz)($lihF|O90l3XnGb)xId41;bcc_T<<1m&<gjI zj<%7Z7RQ`&I@&}&k4==rZ1M&jS{O&9*kmV<99#&^(Pl|qVvnSzyV<m`JyY=&Nk8R^ zqklQ|n$Yb~2@zIwUuZeKjLN$UuE27Bgf}C;sh|VapG2{Lf<RuOwaAJq%|)ytzFsXk zC_yh&=?mnJKy||@@=9Evm;@L%2LgwC6R;PvR?F%N<akO^Gh4_9as9)Vpceji_3ue; z$Hj-0-=*9NBdPJb)ElsX`g!tzx|*eHvmk9)PBaEq=?ZKw^iu<|g_;^zEh{afdq$C< z1d~idMkM5jH$usxgxq*^1y&7Dz_F{rZTZT;CH5y^<3nQG>D40gae`WWvxtNwjvaQ6 zjly&&9;~?qa&$j5OB4+N!C!?}v3)}oyy<Mg=s{`{Q&TFC%DcjfMet8*z*88y-Q|d& zul6A46mb!sVYQQC_RfkBD#~bLI=QV--qWGuBDH)ZN<0N=akazDIaW7Zf;zxj*vhIi zA4UG9I*}mwt8<_`c_cYr?C~4%BoA_yB7xUr^lx1?ahaAtMG0jd%}gjRHZb1D%dz?l z)oUh$V1JO_87&C)6vh^f23FhfL^rx@6hj>M`d+GSvk&J>oq;vF1ZfqXc5k|s7BbY( zv>?_KT;(z@>|t{B!hQ_apuLEgQ~LLe5RmEQ?$8t=^uQo8C#7GHpCCX!y|J3CN>S## zRwM{@b7j?H8ST@`?B7s#h+4w(-jA*at8WpYbwE8#AiZ&=WmcM#LE2N&qJIRn5!;IL za=IOs1qz8zM}->6`u;;k?Mq{gZl~IN5z+%%0gG@I+6N08O$>6KmAt(@h4IiBJ*>3v z&nUVoUZ`+Yg$isb;=QkA)sH4UQ+3@^Sy5&5^?#Fbsi|Xc{`*hedmTL|Vm+9v;3|+F zO6*)8PsO1&P^8t7HY>~ODO6J3rq^u7fH-=TVSf+ZKf8q-OC9694?~9@msiKrDVS#! zRKmK=2@!M*Cp<U~Qxe<OER=&5$gAUMV*(10qHI_S>Nl{3XvbI#<rOxRKNS19<a8tB zqSMe1{Iza$1tV0;=nPgr!$~`A0Ou2owuMPV{WLtRl4b$|Nn^#6CSCW9OhJp@am5jA zr802SbUeI6;d$`LJ>A^Jm*9zcYZP+96qrRl1()0fEw05^1jhdZ%r6=Oe!m*=6tvLU z*mgD2%c$|SwRf;q@zrnX^>pkr>f4hrU&C&&XH+YDCR}Gv4P{T$6+Ej}L`5iA5EjUT zgceQ=y2OLDi#!<eDS{0z1j)hTDG=&A5cL!U)(eP36V-<@I=6Y9O+iISl;PhNmRjF| zPHYNl#j{(W(9jfg0~oZ9ZxipXy@(Zm%Q7tPK8sq4TExb{f-KOl|3I56*v@Amh_)Tr zcuG-rpe_?*5HQFx>S8=LmN)l0L@}?yT=3jopteM3>`5H>jx1y^GN26;NK1M~;bGuq z^eZnP4~Rz(*U9cnKVV)!?{I8xntAfkKPN}9WKr;@REO9u@#I&$q-4;@zE?}xWQfEB zU^+qJDTtJUS`x{wLCM4B3K;Z~07&bXYRSF$3hKKp1@*;x!@M~y25x7C*S*g=WUH3k z88pDTB^eCZMAxkG5g|O;yUXYw5S@w&ZghD9cehzWUG`9G8J+MiMlSTO^0Xs=?~H04 zEn*Vr3FGEj;G0<P$J#byk?DNZXSpmv7u7a!nxhX2ppQ^wgm^y;Mf`4W918JqGW!U0 z^7i2D5sO5KnuoVzA7+nVN5j&pk4zcfX+`-&?@!i_XNtVsbz8EBl+v-INmfQ*@$xV- zD&tKj8|!LVx4r?c<HCU9V?&G~iGa}?>VL0-%?Efar>f7~Pd9w;vp+EJkM_gBmsN}O zJuIrwdC&uqc-t=nmA>Uv(y-b|QOkI~_JAo;3ycQIL7vDQS_;v#_ZjZ`eaO6D>Wcp5 z`n}iVJ*&ee!DtISvj%BX8iVqoHL8177@Ff6`X7VMG^LR`(VwVc@WJ*zFsDIlq_aCo z9k9PLhbe3djYXI#!JLA|g)_=MJ2j#XkkJC?0Q1*chm^{CldVINdxu}aq#YZIZAZXa zb*PR?-;oPL9_zj0J0H1>lXK~!@5p^a`zNzS$gZ%hXKLzKs4~}yK!{$rXCh|9cqwYV zmTVdti<}P)P3ztE?O(IF`YmZ4nuNrNVQD=rIb9WG^%*g2O!#rDARJ<|(=$R&Kedug z!-k6TZ^?(lW;&CQ#J2hBQ|gc5slAH59U4+CjJI7yJ|%SQCPA25i7m!N&+JtB_!8!m zqWgTKGUd1&<7{JoYc`u#VjQ@iXzj6jwkPr&^IPo2^4e4Oxat=5$CGaUVkiimA5JyU z4{Yrs3r&f=eLF+8BSPn^T&5lAWV>m;cySO(Fek|(iOFn?#+a$Oja^<>OJRRor3Haf z)S4Eu%3Lf)4I(Y(3~@vW3Cl|q+s2V0dHuzY#*y)PTb<i3GpT~Du#!#MpJ9#Bm#|oQ z+PCnn;>fx71ebHRVDIO7+=gXeUag?7W6Uc&Cus=EX{-*T@z{Y@xMp<-U~xOH7&Lr8 zsl~D%3&Gsrt9SU)J3=IcE(9jw*KE!)Cilp2n}@UGJ@Bq?AvKoFgw0@ZC&Z(VTzBp` z5}Se;%Em{E+S)?SSo$U1C2U6a-Y;EuIvl;>JoZ=u88-<S-XFs~aJW_$O)7`0Gq>|f zgx8cIm%0LKg`aATpOgcVO~&XW;$a);29P7e3$lK}^6x3Wg1PnZDJ4`^-J2FxU&j@1 z#_~d0O@n<<_<nQv{stHU1ue^$WJVM9Jw+&N(LIk0V3iB6oxp^kSz@!dl26|+c}GXu zcC(JY+D5`hJgWA0PA^ZP&E)GID=oN$?r~SzN<F6asH1eDt#GawUi%~3-xj2ZA-hM! z+>?nSAguLD9){LuBUCrQz;U$)Esm)2j2RIDUs!<xWQ&*b6ccOX;;`joqT`n>Q}iW0 z^5cd;>sh>4B|ZXOBh`lwO;>eRpxzbkg!E`qnm=G1AHq^v#)vctW}?rK^ai?SwP0;* zp_@Q!beUGL`ReFh{Rb^WKU6dYF#;-pTKc5E6c#0hyE*?}Sq%ASftrjghz(@jK|ahc z;H2@QJdsERDTVt!hbr2C7%$U%*s@pQd3G-%Q1Sir(T+Y=y5=s!LD(%771I9QjC;{t z`eUDy2Md#&R!}_8VqYk{y9AGnCMz{@(m8)@7=NV!F`5b#)}H6tPZ2Yv!*ODd)30YG zvoeoDHB0+h61{y9Ev%?}+S*hg4+deWvptn@_wt0Gjy+IESV-p?Q-A%`4tQZk2HvH= z%jmAA2&q`^Xey(-FOu|;d6}`knKGaCou{diIqj!%oAMkJo=0In(r6J9+KO39$jH>F zr66Gz(-(XB=c1&s#gWq^hdOJ}r;Sq74g|*o1&>rFY35=}Cq?ZBg&#}ANg5*r#kW#; zTDutbGKQ0tjaz1eA3F<sUzm0IU7XT6R~So?wy^wIdQU7s2l0|CjF)h2Hxz{eW<@Kj z^t@l-fi!;4jG4nMV~4z{aPsDM3wfzX(HCL`Me-svC08WU0vlHbXN%HCB5!x$Y-z!4 z`VQO2cWxx3SkI>;&qlR05}D!kN~V`sCrsoBnu=*|L|Kw%b{sK`8WhHAtFic7Kpq<v z8GC|NcWj{K4)%5{qa)6v1a%u;YiJ>_j7oJz%TV(SY%I7xFTAE5rFsfevGQO@+dgBX zQ<&+CQC}?E7yH5&Gx}nueKCtKcGMTk_r-SmVnx2#-+i&kCiIM8SD}<ujDpt5S);g? zecDzl*<(>jzxD`UD*1FrIi2|#v5y|s4ehi?E|pfsk{zQHouMoTYQr9d2>|b^SFjU? z83R0&5+5#=|9Nzn>#ZBj#-0<B*NC~=jE=G`#PF3%=WVlDSg&IpdEPdSN4Nx?w@u;^ z-o@u#CdMH+JsKuS^-OZ?eO)HT31=RxS&m>CccgA%!dTBzwlD!|3Sw_I-(!Kz&bauy zJ{hj}3tM>2F^q*za;?&NW;yaXzYR)8(y-S(8YcSE`C=cFzq-Eh=S4~9ed1j2t1d5u zUgaAp`wWkX*-bWgJ`*3_(hJhLddN}Kn53k;as(m<E)K_`G?RJpu$?;5LQ2Qvh$lZG zE5>AtcfKOW$E3-=AaqQ6bo5o^#>F2mMcL$tV#+kPko4kYSqm9mJTP|BkCIeIzZl1M zo$vAI+fZp`w7!^liq*y342rR>WM<G4CJZ7n>W)1#%nOhz=wT~Xv)QoIZ4>aITcA1l zsmp}2a$ET#P-?(%a17FpTE8JZ#>P2Wj#De`ake!q&v1X9q1EgZ6P0CDbpv#d!e(KU zVG}k7)pGiKcfT~kZL1GjgzgTnxrhO&UkG%0LB!wf2wjUqu@G+N<FpL<cr>g}rSqPP z?kVTs_q9oy8kv^7I#$;Msf-SSmRuW~I(RN>xODNAMF{5ig2TuI+s7_K<rG{A^j>W$ zXj#G;T57%_IpYS2i&Dt+aRbENKa%C+77pC~6Xs5Li>tlwyq{RzJ%R7-1<CJ{Tzb6& zGQw=XLDoTuF(s@a?XvKi5KyZFsGJcXlMv7)y~$i_U+49o(VdY2Y|TKU%4sv&4#NN< zS9=g`v$0d8qi-RpthB2B?NqdcpIN<gTjb_)lI<`ufMaQ8=X>^0b%T}tQ3bXiWpwXA zTz5bvj0*;r(SIOP7g)gNf~Ps-!|DLrN59+lB8QC6ORcmHBEffEjw7}mARcA)f-g(p zbkxbQP1+ZTWdpS9SS)!x9jj~+3YlP=h8N1|L&coztt@4K*PeigRNs)IbS>nOl3dFo zI8-XEb+vNjVTDO{8Y9+a!(2GcZI}SM3B|Eqvqz!FR<Q_5PU1cGLrGfHcDU>Py)9{2 zgsm+kd3?Hfv4z|_J}rd#(=>Jsd3^jJING+1&rbkl@x$-dAgH#$#ypKa<!5Lmt|0J< zrG-6^wCL~NU{3<#rKN+!Pg}^-rTvmX_7t?kyA$jw=vXc+D-7iOzPiFtOsyp?WN&GI z6mhY1XrzucrQL;7;CrAz%}}7oB6dQi2(F?DDXdab)VUV&*o3&$56?j7w^u+54%7$j zMhg}sp)*>LL*26!U2Xu8?4F?M_dlg|Oh_HPd<<5Lb~HnBWa<k-?f;vsMo%0#>MCbv zBgXk7{x|%nPXIG6Te0jqj`%%J5w>Y;h8nJrw<pFujFXJT6FfKhURNK#+g{MQe-5|O z%U?h(uoXK%g2^aFE=uQYU7O!aXeXzzE%L!rVCFIGkVksAbgsbIC7|c8&|BXTOPR6T zCAN55X<jL*Et}??kC>JI6F!29n-?qM8khycyke2IC9IA=V~56F1MbC_;2pR}1Rsvz z{^A{QA%9+MUEprL&R*$(8yegHGHm}FA%_(e>W}YnPweQdxu+<!?U>6xOg~ZX=tX9h z8@n@|!h8s)Pm|Zn6UDjTlK0Ewod+?PF5_>9p~Gd4gu1}r4gz!QxX?&SS5q{T8n)H4 zhZef-q%Qgd8g6Q3qbrp&LKk{}@kphf1lC7oN7>8e9y8>rkL&3`nLVsr`lOt$XoBfD z05jyIeCeHw@etfgoO7xtn9Q2gCx=n4R^IR5{}srk?DtqkQq(kL%I&=Etd}Ny!+L2X zJRIsT^$F1_<c&%F<H4s-#F1H(_mS#o`Zs11dD;!P9pUs)3;AhMf1|JVj(+9zos&qU zH7&lqB;0>fu6TR6&EAl5x|t>Y=Scpmg%nrB$l}O@6^Y{H7V=z0YW6Mc#r)3VjX)uE z*H*HDUmSrm&QQZ`dkoHVqUa}g*%o|FzOE=0!$gvO-_WoTcN4B&CUfpn_73i%Tzd{V z(YDLv&HJJ<Hoc0~=?JtA&S6~bcN`<>nahaiQXCbu5tv)&wR)n<%WRuoCBNJkm-&)E zHI2T&Qlq`_#Xb>-gL?LI1bq_3CLx6tkZ_sk?jM%K&Sc@awug8Yvl$jehk(WBOaAC9 zNg@^fRbl@EQk_!amV18j9wyW8A0%FBA$9i;be7bONtI_byS@nzI2L!R$FYiQKi)40 zUDS|H>ZhP`Ph<(}o-*1E=7hTdPL9xu?a#m|U5Emj1U7YCV?!0JH-|yfZMWFNXa-MW zHTHg2s{V4j%-+{s8Mr+nyyiK9<V_x!o`o@kbIQ<a52a}ww-{O-VQ?4G#3`6SP{G(; zzdxBgKY6J5#u##RvN{?%%^T!D!wFQ_Bdf*I(h@rBe$s79if9=_hD=E(t14o(Ut>e( zALeI(vWl?{*WF=`$1q3JgLm2P2TDSdrUqX51IV1Dn7}56tVkLX)&ce6&+L^iFy;z! zH7TO^yIs_49yTm*e8I>4;X@erRd>3^eH48bjBMPe&_%#7?i2X9KYWN3DH2Mp{?viA zkfla@6EW@y?CwWT-^dRG*+23hvn^b+#2qwzRG19FixEY;b0!=U3v}n_<l!kg@k|BT zHst}<VYF8WhNmT|;#f;?KuE<@-hm{s@-g#uY}Q-~fqsP0KZ@w6#X-2%$8Lfwk{ht7 z4HxDm(c|E7N7L<HjP%}%<V<D1I3#;QaE!Ekj54t8m<&guwLaE|FA}Np0cV^qCgU&i z`fa^{`z~6~@sFsxI09$10qz2J`bhU;%jdZWr)n!sG85@pu=$TWE^rj51^l$@(LmTW zGT3wMgq03wC&J<G{GRF9+qx(8%=Cl@eAqTTu*lPBe{%UK^7T}=NLxttw7sFUg?pU3 zvrQBk{D4_}X#%N!V15j=mtd#M`-Ja*Km4e>1<~45I7+pDM%o@2Q}izP3rApO$e7al z5ygdxP@D<2!Vz}&>I^P0LRhmqFE_-n4T`JyI<6tM!vDqgR=wUK%csBn*D6?Nl!}$( z$+j7zVvML-bx<Yl<MpGDe$qJ@bipSiWM<mrW~MxAL52&~b^DGZj+zi*1DDdp%Ore1 zdZ_lxg>nN-3XEDT?dxZ|{c%^8n}B*h_B7%kyw1oD^;g#PC7WjUj6RD5mgQc~%SOt3 zpp>4SNlwg6>*c(g!E>CCoU4f%d>v>b>QqYm&BA=`Qvc|SC~EA=hkpGL(a%bq(gPHq zetP30wB@9?$h9~ETPVFFbY3sqeS{Rq>5ZX2Ypx0X>>J?}HtskIWW4WycYaTL4DWZU zwYRGKPQpq|8fGPTL#uHW;abls^5ZP)^sX6FMlWGL;uwyZ!fc1Jh(7Q(TFZ0~KLHG< zB|Ogb&or!$5<LIIg`0(;p-^Uyv8CncD*0fnvw6Zh%>C(5l+qp8IFL`36JnC#yI)!r zD*c2UtD@O`NmrFzsHmJm^i}=EOaCU7Rq=4%JykU^`HB5-beuYisZ_fL$^|nwgT9TZ zU%5;<OMa<JlN*p!@iq>v6CX@~sb_pJ4bSNhR%S-GS}>2pne7pNwC0!&(dv+6?SU2U z+7)=ma4h^e1#EIDwKi?;p6d@riGjVWf<Sg1jv<NF-^vn*?BPdc=gHiMpAEw~w7a$i zFIj0^1__uwPb?op9-sZSn0SRudL&DPdFy#(xVS8leE7&<aZMuG($KH_R>of{X;vog ziF>qKCjKy*JU8cY<SWfNA4qagl(Vrf`FZXX+3%$Iv0>tMEqVH}O!26e?09UP`04B9 zkH<>#3SVP3_sVx4gt0%%BK!mxQ)Lm>1L^_S05LX;Z_#1ut&Coth+$)fVFc}Sg4E56 zabCxC;g&DkD6+Z3x`bIcRvK`E%{`n*$9Yb6UR=SzT}^_Hs$)mpia)x{OUmZet;Rpi ztG5tglVBafy9#m5f6S{?>hbf<?c$?-fd0qCik4N8>GKEl>U)<fk-x}hkHH1hH+y!m z2Kr`?>v}|~XG#P#V2HZmS}??qoisaQ>G9)TVlwMEiC7XfeDht(2%rZgNpfCmL{-b* zWvJon1yOX-U7jSM*Iq!fgj>MHtu*vCQnh48&xT2CcHyRoBc8S%BNvuv6h+vo31#86 ztJuWzmHz6nuF^*xBbiGR`#yD-k_Z&Dc`62r9P2%i>|3hLoQyo!&5aIEa5<cVR*y-E zVJa=Hh5(gH7uYTDM*Hw;5w}CXB)y(V7UxVN+0P8<x#3j|pVtdtmq$YriS3!Jf{|F5 zu`a-8%TDakGeK;4XR&k`1DY}_LvwNO(^OHnP-aiT<#if~SMWjrL}GYZxt0F?8*=%X z=&|6Z9osO%Y3x<O)gO)-b{hb_3_G~uewf7)zZO8xb1qgMzukcQC%*mdwWGMG$9#@a zH(E%(E7P2Tb=SAA3Inl~$z{{j`zsr9s6t2jFWFt~Qbf2vas@MzZ&j>YpnP|^5}k)Y zsg~hrem6_P{Q-M?8QscoZ1{?hBgTfWSxn|3=-6NjNd0f|t~d$#7GS#L!pi%oD4zz* z)5<m~5jMGq=6?Zt*DA6a5mykz{qdv3TGMZkZ_#*|Q9-mK^vwbK>yiJKZL!cwUav_I z?;IhI9E=a04{o0{Z?ckKYU0If^(4G@T=G5~|HEeiPo3@T0_H7Rh!%A^&DmaNkFz#k z#=SO<Ao>f;K#V*)vd3L;=*MWbt}#J0n(VTXWponyw4%-)EL$F2)8P2AI&fq;{RUo6 zwk>s=nAD;DANX=8^{UJ0K)kSo$;Kom8?sWmhxtS++#!&SyfSzoHsFGvi`?^EpL`}B z8@Dj=VC4(0f&VM<U`5c!ITu6!Djv(<CNDi#AwDya+<5LgapMH?`Lbm3$qA%m+2{<& z85%eT>dEI|iNJbNR=O-95?6kusCM+q7ItUBt(l!d9&_i5S3e;K+^MPT!hxG#KEO_m z?U7;pbsdU_mii=7at{zYTFAiV31aytq;R=n=*mrKC9X%{(l{5&f(|dc1<!CM5Il>r zgVQ7^VLZs&%j3J39kj719*Cx5#rk@AiWcW^-dSbrs{vf0{{|B1=uqP$kSIBDbt`iY z`ocgSXLl%Z(HuT3PkJO?y?NfPVYdP*r=mbt+FT8p?-?PEoInnG?&*6Oi=jK()l;I! zfcXrAS^G)Eiqm~N_H`zG4a}G1+DHDqVvsl{g(TG7s~H2gj_U)sxp5yXnNA0nOCh)u zX#ub%AsPZhbAjSs-85(i9Bq3{5#NxFb=B+^*q>K}JDo<`>AmPNx*1P)v!?x8RUlK9 zEzGIHr(nKE``=T}x1#v-z)=HzmgT}<<1@F-v6B{q)FgzVO5V+`cJl1=(S2I-F@J+S z=U|i{yA|Y#Tnkn3=JQEd=0AFVNW{zVY-4r9=NY#hG4%8v(({G>LtloX@j1U@t6YmO z#ya|VnqC@jm9%uY&n*UhdYl`@T59-t&(s&X$%eEk(McyFdK|Z(#-$*zeh)(b;~*It z!)u<ugRTvzE4ack(S1(Vt}N-du=9=Zn$hSt-&-@3<d2n^;)XX##;SN@3CtZ<H{@^} z_Q1)4JEGUjbI<XmcwcTRxbpKm@51?bzo!vLQg!(zUd98-#I-5~jeK=gGM>$=ba={E zr-?;y7_Uw&ihvAZIJ?Z~wY$*|!MHYwUiv-A6I!<fV~0IVUA`=EiQ*1w{TwD>#kDQB z!Kd|n*vUkmUmcV6<Qve`aL=TRn6Fcj6L-(x9-$+ee=me9Rek91VDd@V#y7~t)zd`9 zZjwv-hohXkh4jCN%qBAl?(fAEAv52NfGrqK!RorsK3ZX}*QIOz*_}QM{RR_u%s8T5 zJ3ySYnpCb;_iP2#e}RB8zOr^f8Y4L(JSCx?@uB(>xg+SH47oLymb}vnyUg2zBP935 zlwg<x>?OtDMvBT7^3dN#_0@fd#{S2J(1+KM=D+FK)?VCD9rHAE^>x6LVQUIuS4Le; za&N4a?jodi!@Z^sNHfH{oQ{2wOX>|;00bV#in>`>pcs7LlP!9I8EJgoLKV;`W--LC zB4HcNPz+->CbR0y*_fv_!gKYX)tR#zIYS)(bUziBQ|)ip1?J&KbSb45$d!#L*)x4L zDW%bCJ1cO#s{-Sm|6f$#=NCxsrhMlxR8CqTrvrHyN|SgPLHqGAl7{mznuf4YO9dWP z&~{`gEl8!UJWQvQhncj6he~>phuQQT594Vw4|TMGhemo3q3eW2axSpYot&0WxACxu zZsg$@x|WA<Yx8gdt>xh)x|oNPse^~p=p#IwNoVoUMyK#_Hl4`BxwM#vc3Qy0h1A5u zMO1^($ttmb2_%LNWaRxd42|dLGKTiz=n94gb95y`Z`UziX3El5j;?3u_Z;28&@VW; znW1Mnx|N|PIJ%vohd6rJ$?#nqZ(wLWM^7+xJx7}vTE|g{pFp1h%IkcN#q2zR9W>B~ zIC_zxl^ngq&{B@JFtmuHR~TyIC<W@IDvn=g<bfRJ$JX&2ZDrJ69KFHNV2-vk^!5tY z104))<*0yZN5AJNJM*AlaFl(zNzVf1b!Iz2dW<JTFm(S4C(9+0;s501Xoha)XgovL za#X?4<s40AsFS1V47GEVT@j}baa76BN{(hTw3MSdh8A(u$WRkUEi&{!RdGC@kq2_L zh@tU7d8Nm&SWli{WoQ6LCor_#!>T`tq5tCOWQKmr(P<3502CVn*o<9fE1ZlKo?t_e z9A=R{#A&5_!Gn`xd}<NONwBbe6uBWN!cST0qbLnQIzQ!UPFav`&=@(TjLv41zm^nU z^CJcb{zXF&`<#{eP6~ZFMHqsj8N~@wwvQBP2#WVJrT8e(h9HHX65*r78-h~(lsn6L zwH5e6#80{Iqof*wGX0b<d=#A_$l|9o`zS_35VH{NA>*xdzmH@w1Qq$2w)!afhM+Ni z%4#2_$Pi@pQ<iYbf-wfo1WqZVk1`6{Pe#?x2>rOeb1_;_PY%`(7B|$BEA<!DSd4JD zt-Q?jL%8c|xZzIaR7cRYJ{Cc^*%W}Tazq}xCc3^2aQ!ZyH|W=GQtH~))#RtGeG>y$ zV>wE#6Np7viLeIIm63Q@05eyUlx+h=aNfHul?9J&i%)AsLft6g2Lzu2P6JK=4g>ZA zb^x}nCU0#kie#J~iRCN@L;}(Y>G|r+uoY`q!p+Z7o^cIvz8ceaB@z*12`e`PKLNN- z$gWqD#1(7E*;mt9@cXOEG{%9_EeMJLR=^~{G=L2-7vNe$#{Q!y(pSg^5VivL0-D#5 z*Z)yQ=C1dU+y5vEwPBg2H(&wNO}ogr?TW%5j1cUwSFl5|HLOK_qS#4#eJU*PE}vC> zI%4&i?4e|jY&`r^fLV`W#@{Vu-}cNXmgu`#!j6!av66~5k+$uF6UOhvGKTA$7vYSL zWZM=t-;T0ux1?tf(`y4}c{X<KqK5+Y9y-c$+rl2+zDHTvTiC<f{U|Ga3wywnLN4r7 zogFpf3bDGuwYUYRyXQgG7qmD+a9}xPk0j{}uCTds@i1wAO+Q5WJD7_vdBU=R1cb4u zzWCDO7*Fw~hfjri$|t+?d&#AOOWv<&_&>?e*N2J^y+bNr&otlMz_xMhlP|Qo-iR99 z?Sy*E-6%dH^__l~OB!|*&h8%IsJr>~WN~vY@xGqoEII($W<LDBbt7PJ&NIVv+Cf!* zs(sioNwn5wAM}p}&ja6&oA@1v3+(gIz%uDVeGT$PO_+bDbJtRh4>9Q`Y#DHI3YU9) zW69p<%-AyqRYy;%OlgE8j(w(ttAN|tC~f{5<l&cHP;R){$IP@2vMRBk{4mIk^xQ6T z=AZFS_5reVU@m5-FI)F>40kmJx}@}ZSK%J;TSTca%KR4;QMpnK3%)}tE605`8V>iC zYu0sKZq7$<VZ^9ov282*A6Hso-?Gni!fV({;xRR2L~ZMl1cwN_?{SN*tiD?wxB+5Y z#!~88F46@SxAS;WS9}tWkLirpPXLBZZG_F@%M$O@TS42x9-d<A8!y8#`$&Bl<F)63 z1NYilYCF5EP)kniP{y+Y3<cCa2&u>}MLOp$xDF&{%a{xju`^oC&me<$_809>lW{v^ zL#MygskMS%B95JfV#G`2$WDFeXX`uZPpl{6E|*xio;<f}fD`>WrEv@UNWdM&p16kF z1FU`z=&p6>y=KWTbKLpVgPjL$T!P`Af_pm-_N5?>h~zZ*X^^ldE-v>fL6a4-ujk=0 z_J4R7%{rUi`sw`I&QeBu?3Ttz%>SF<y3?}YgED&i4dg*rpwP~*>zJ>EJ_UjYn_D9W z$VxL1tH6D#Z|RJ`(EF?-3*Sf+hpr>9ypbTjt|3R?=&#Vd$5zi!=3>MpS}raJui%?W znuiNs%#@ey&dzuZ7PnhRUqIk*w+bjz86MoX^X)S4l-9*F_x!ZoYUjNyRUuI2-4{NA z&3r8Z+-><foP&k4%Ps8E&a07p%U4Dx_QlqSN!3dtvJ7a;4+&fnUb6xbdKJzsk7c&{ zl)ebRF`@V#pGdDcX7vO}*&&`^@R2!s4!;R;EWBnKxDCJ9dxh%i@)%c*6$E_E{|)K? z<|BbWM8n~`=FI_t4>RQGn~K1Z4EgrWVaylN=dFR_=@w#qtDFUE-<l@wRFRu+Wr#~u zBz4bMj2N)@A`8CS`yqQ_=e}Xl>~aQ%Cp2JRI?6L#c7EXHzcD~=?8{5`Cq{W{S)yxO zKAv!5>k#<4ChQ*&U$T;QDm##*6+qoeZ=SS|vC=6k$&33_!+LRsc*YR3k~Hm4n)Rif z-vWuSH8VYthV(8E*;m**xX|;*5&qBqV7lMW_O~uJY6kkB?qG3MGMRNCt=I6oY@_&_ z%pW{;7f_!E?ek=e9dp@HR-qEm{T1p%+hEY&t;iO%;!g)Yz3GLnWcnmf?-zeM=m|d< zU3f-;ocX1@khc6_obP9qQY)ijfl0-<LtXdq$vykH2$x{5zyo|mu*dKn3@#A6<#gKf z+~M~b^7mC&NG5O3PbG&AMu}^WkaGtkMduN6^<WA=^oS+>4;hl9(Z$pTQ`}v<0Zs$V z*9=;Ls9*d$RuJc*H2D`eg!!+7kJdxe<2FO1`42ukb%&9`!wucS_t(f9uqZ$Bc7O4$ zxAv@lJ6a4}3oi>h?hrQ|BBu^3#3v6C@8P5T;3I}K9+@cKmqcQZ{^j&z6D2#3J{ou# z5<`;Sy)16sLZo;3`G=~(fBuoueFutU2Os<#BuQMENa7w<?D@8#NESFpju(d>U%*d6 z29XbsFA`(+kTH$-i-+GLuQiSmm&A}8jVt<p_a-Fi$R%VX*#m1O=hv<yn-PI!C+%Yh zz28RaPt4H1Q^kZ4hEVS779nT5MR*AC9-!B279jyJ1F#lpr~mv%hP_wl?1m3X@f{aD z$c|prxU^GHJWZYp-qGCa-%)~3{4mtlG0&I_9zGD?Xk*fKv(lk>r^0*oe{ysE!R{qZ z#Px67uaC6J10b8Jc!Rm&rd)DQabgj~x4L(5knjoAHJdgE5&ERn)9Cr+8W-0FoKNBk z6hEK*r=M?S7avsv^?u}G-v`$jxyJ{Wg81o;Cp`S<n5;n8H!}N4D}5H<rgllP5|SKG z=ATRm8+tb<?Gm!_WMayTpzx0XkFz&?D%p3w$G>Ethh8RMoiyjMiigvU&dv(6dv>*V z>)M+>)0ITifO;?g#W6)Ty%;m9(<Xpz<Oppm47HSp*J`oU!|T5TJ^K#(^pxFF8d^p_ zVFZ+EPbs5Kz;RSn#?NEXsRwqfUY9iZMIQ&}Uj8TUr~iZdQE>MqQ`dDHT>PguhX3ge z&40X+wu7u*mn!}mNe-^lin}(G)^)=MbwDG+pz6Hc|Ne9)hTP0?HOsJ61WkfnfOWz{ zeqWy>4xLF<FU??1pmi^4#2*%s4-p_1^s+`wok5H*=aZ*iekBSQoGkWW*FyX(KrGBZ zIb}b+LMCU$lE(R}SgdA}t3WQg%u<CT+?6-j5mBAzX{1j}B;3xvcTn3WCg1x2=EmQi z<eCG1KCU@dAnMl~!=5Cs+2;p9L69UzQ~*;5IgVtp$9sG3bHs{bzYenC@#~nIZ)p0G zCl)3I2H!#jUS7B|@Sb)cL!Vd}&QC(wC)HN^+h?Hddt!QE-c2BJPp%eAULbEg$+f|^ zPjYRL_7p!2EqO`-EwE?)Q%}nRgWACS+oCFzGI{Yfarti~;OWU?=5OSIrwaq$`!^E* z`Lsf8xIxZ5{h(NWgJ_)!F`$f0a?YGOeS&|o|NXE<`1{+~cL9dIV-cPLJO?-fIDxcI zi@dBHGFjo9Kl4lYEaB6mbE<^nl+$JGDLFL!?&7GX{_*L|5DIa}2df+I_x7b<KH4ea z^t(sNvL;0prsDik5BoCS^9yF0Ukdr9jy4=9$?RH}Q7PG#EJuFFEaL-v#3;Lbl%r2+ z^$oAQwE8!1Kq-DSC(2ipD>N9Fti4fm;@zUAJW6^t$BrBh3eyV%x>C5Y_osOlOB4J& zViynH_6X+NXTH`lFEE+Y-0$9Wrhj6<gfGKAO|14eeR?7w8+KFI&IvcoD&-DMb$2=r z>ka#4kHusDdZ+TE=FDWgQkSn|mn<hkJF<&_U!d4V^_R#WZ}(b35>G`}gUjcEW{m#Z zZPB(OR+mz+vrovGqxn*G5BeSRKVU(XGsEryr~%MctzdOvzrA(QC0WAj3?O^5kFfgI zwuRRm!g`L6b3B2dkj<wO#iA9Y;Z!WU(0S=p|72FNwnB_Z9FB5W;jWVwugkOeqQ}vK zDu#QA;{7Q2)V1#`V&k4eN%%Fb7O3_I>Oy%DZW+IPI`@6{HTbUg2Xv2`jTWL>CCka> z_m78ULR9K!Ysf!7u*A)&?zG?g0K@Sv%<E|81(uZ%@pO9BlQ7z0=+acA`hUnErkdoP zP7MA1A-tKwej;S!L*${;#mu-ncsgD*wvh9ulf*aHk(;OW*^jZ_!3NG#fO~FF_kebY zrc*eLeP@MA<KqncYzV&<5(?FU->CV9%si9UlO4)>Q_HzJ`Qufx?#zJRjC8DBym^ti zl1^p>%}gO@&tybC?n1>|(Fu+Ys112_H@fg?((^;Jv%9TJf9!k^&A|99LC=@sS{CHP zaM?bq+Ehw^D#q(LRy5qPPc(dOFTw9J=pPsHdpB4)x3D4&K{h-c-KiFete#U&Iu`Pe zhH*{BuEwXt!)WjANTb;3(9C~8eKkB$PJ6IJZnSVLeda-O{zJtu?K9|$dUl+MSH5s7 z^}cV@`&0+gIG*Io+y1Uw>A@j5Njp1CR2P%7vjfx5Zo{hqjaKT#O6;%geb{KTz4jh| zS~;D)jBGnQXi&D#!V7m5-;n52Ff@Dvhuf25kc-~YZinXRpsUA_n`ejiV};Uh*aFY* zhSz9(tt7^CLqd4k#-(KLxx^TTU;7+~NoDMhM$G~ra{2aN@{e<gqHzpqI_D7gd`HY5 z#ft0j@#9A-g%z@k`7;Rter5@Cq#r#nlT{}a!$5b;Bs)G*i|fviZ$28D<ma6LUiFvP zo@ERp!RyimIeO5+Gs)18(~38ID@puZ3#)5*O+G5X7Mq>Fp;ahGhi2cr$bQ7F=q3C% zTmR$ki8t7Fl0o!?8LUutG)F%H>f7^?%^xR<S>KX|kB4BI{PgjgNe8eP`q!KLfa1at z<ScA57DP9G%WBFV!)HM<=aNP~sPgK*^ckQPZ9$tc>Y}|Deg_PDKMVXc^;3Cg0-N?Y zzefN!3*X~HS7Mp-7tubAi{un$wA|g#jzRlzj4#p(yy`b&N?fMhDB}*!m1hQv*kA5W zB!|x@WE%N6+rO=XLFPZj@n7Fu{yY}auIrmIv}qdo>XSI}^=ahRC;90+N1<ePDGC=d zw{X1cLgvdYC=~;R3z>y2r0UaQ;^1>+-KXjpXs$nB$c!pw`_K~l^=0zir-Q|}&XVxY zQZcXDccxIqQ}|C|cyTF%4bWJ)==?u+aIk=hFFRMfZBIfPu21|5!;OocgIx{|#ohkW zQo6K~f5F2x*PZq+F<!vWp50;|7IuPI&$^@$UE#B}@Dmbu;22{!DafZ6?g?1YV%d{? zu}S7^hs!@WNG3!B1^_I8@qjA8BEWjUUcmc+7C;A}H!kE10B8Y)fJ(qzz)#&}!X>~~ z;JX2b0iOb{1MUDK@GZswKrWyRU=M*8e;JV7fCj)hz$L(sfHpu+96}5LC;>*mSipS% z8(;xo31AIiE8uOwX~0FmkAQYS_ii#F1uz(p3n&Ck0?Y%f0Q|k1Qzje)@-g6BKs%r} zTA%<Z0cOBhz%+mzPy^TucnfeEa0|fxR-kqN`FAV<eyu!<@ZEZNXHQ}0^0=jorR6h= zFfS4DRsH)VcK+}uL{}(gEE2$rpMk{K6(#LC{-qe^wEhGqGsx}#vhZE~W`x}lpM)?1 zp@6U#g3Z@00t|d%5<={Q1OXx35yDC@?vo+3AcP-AIQJudX%68ugv<#Lfe=?zgmc#{ zLJ~qbLIuLj)FLF~i%MblRg16?gp~-NKxjw!B*IAuI}lnCvU)48?s@du+3tIuxS^J% zO?>o$>9c1k>~j<|=FNJ*K1(ro-kce;=Fe9+=0Ei4gNlJ^v*!;Khzy@DuWXAV548QT z$96MKRKUKn;toatvKwETUn=&Wo7B?yO~LDFdwZ-8{$)!r+4;MnyF!E3;Iduov#XN( zl89T;Vn!?(eJe?pN~-ZpBg=0kiRl*7<5ohzo<Y8FKn4%RpiI*Jc5LwEA#kV<A%kxx z1}s<kQl=P4HKN1ve9=Akkk@Y~hF>0G5un*w`-Fduu#h)y$MoD>aF@2DfZVwqD-Ivc z@)AQ!SRUaoj<+y={x^NRg@oUV5o0EBj@aoekh#c~jwF#RVC>@68Wa^9{I_Q<!mei- z+M@+N`YQnZ5$SeJz?pTvN{xPrXEJFU53*k6ug~2}-oKSJv5B*HRkqn*>CVdk((FrT zc@6=-BjB#wKFwsf1sw{F(D}C>wES<-!T+!8iK72odV(C4j;~!UyV$KjR%)Mz|Dz7P zB$GWH)V%?&d-m6$J<we)OOneX*A0%4-Er!^w+Pt)B_I=!4oC$k0MUR5fE>_q#Uiu< zDBu#{BH&ZNIlyT^GvEZE0dN>_5U>}p8?Xbg6|f$#60i)w_?KL<IE95k<^pB{CIhU1 ze1HXD1SkRVfCvD7xkTuc9y#Iyp#3{k3_t-bfQx`;z(K%vz#703z+Av&zy!b;Kt90u z-7K^oNG2c^5D$n1gaQHq9pB;{4R8@~8qfe>{Cg2@2doFw0PKKifH8o4fDw=m5CGS| zv8;Qyk1UEPFBaXyKJotZ-~a#ky1{*9DJphI-1)cu7YtXMMc8~3g(Clzu{g}||84jc zdJ}(3x|8IK=5<TD%K{HH^pPn<r?M4q0odCwgG&)am<I3q8w!kpfRf@f{?Fj_{{@0y z0sj99Sj}aEpyQbX>H4#Lf<ga3!+~{uW#5X<@iKv8MY{rtV1KO>WI{S}>x@U^HyhZ` zGj+xrDzU8Ni;K>9`b?y!-Ho4fBaW4>GkxL9;78t_@s>@9qnysTSoF$)TQRckR%Z*S z-Ic?Y_bH!L)+zTYk1EeAU(f2U>aUuiny*@-dR?_&byC%#>ZMLlYt<vv73u}*I`vj{ zllrv!Cv`x!BHNTbCi}kZ1=-H*wb>i8U(J3i`&jnL?2Fk~v%T4Qn(>;Y8n32J(@$&C zzNp=*{XpBH4ay14NzO6ll;$jV=IqKjniHVY>Xz!(=?3eI_0{?(^*8nVa^J~4lRMHd z&M?z3$FS0<H`z>&niiW5n~s|<nY^ZUQ=mE09A_SE)|y9{A2dH<e$KqX{Hpo1Su*#` z>z8N9tIk8q6oRk@L4opu`kCyu>_4)TGy^nmYEEf}=j_XwsLR(oPv}qUztUgR%X06} zeZ|nvXf%#8mKmoTA2r&I&l;aMzGQ4MesBEQILI{Aq%j#xyG{FfC7Mkinm$8SzBB!3 z`UTaIOd;lQb3b!`bEY}VoMRqt9&Mgze!yI1USNL8ycE^jXx?hxV@7S1P-6(@D*vuL z=2W&QM`TrHEz2TVr?NiJ3Q$F=l2z#{oocjdoaz<To2pgnzp1yYud4s0zNHq>*%jG~ zvR^<KU(Qx(LbZ|FOzlYRChdFLkF=L^!gMjZsk&LZIl4goaQ!5GbMEK4eGCf2V1v#u z+;FeKX;^MpW7ugpV0ho~lOfG`pV9e<@hjsk;|SARrjJa8=JDoP=Eu#e%`cn(Za!rG z*!+|Eruh$Za9)qR*u3HB(x>uX$on!+um}QFzA#uhPkB=5&N`4aOf^b%pNgm&Rqw0L zt1hXi>L=Ar)gP*0b#HaFTA?1GR;hLB0`)lcMD<klQuT}K!|D#Tn4Rg&R%Pq5$7WY$ zugDJ84AQ7I4{PRY4r`8UE^5Bgd=Jt2x29bqX@az2Xlsl%Svy#()f%)GZJ~Cw)~cPT zy<a;`J4aiq^=OH9y>_#9n|7!6E$ty~v-XVk6YV94o0t=u^LftKIW*_zoL;(Tb<gXz z=yvGd)HUkP=$s$xe$ut;dg^2JYQ0%ss4vq`)<2?uRli$*9&&M0e@8zk_k-NaxvjZ5 z5XK6F-LS~;jA5nWgyAE@Rme-Wafxx4@iXJMMr!PB(wWQ<)Safirc<T}bF8_*JO!iw ziutVh8*_M`Do>X;J@4VX$MbgO9ms3U>&O$FRv3#Ah9G4x<yd98a)xq|a-;H9<r(D{ z%CD9GQZ{FKv)Zy^R5sNcn1~Ikk5$)r$12nV)fV*x^?m9Yn2;~4zfccFFTS8Ts5zjG z(Dl=u*Gu{yh7`j=!!^UBrd0D#^S9<-%(grSn-$ZT{t|@I$_Yy64W%k;X4Z<V7S#gv zGWDqJ>)G8kO3g~mE1I`8XLaxCD-0V9&B)}832)35gslkDl!KH*lvzrRQm-^YcPvo4 zl=aFZ%Ab|}vi7N-S8rB_Lnqvyy(#;(>^<2hvd=;j4Ae~2tk!JM?8UH#Yjd={bE0z| z);;Oey`?*>`$H$!_ty8<57qyaE9Hh5!VJ9)(S{mG>#K$j4Y9@nMvXDoc)xKPrsQkJ z{l??Q_l+MLFB?Nl;ig#A33Gnl8+pg`zRCNQwQdQ%PDc=|j8;xkE>sq)=V1PvRsXF1 zT^*6F$)1!Qq>0uhXoqQwwGU{W4?;wS=M?9>#|0!pSE2j2?zrBVJ2kgG_dD}%=8(LY zyo|h(ycy`a=ki|4`)A&fytBTdR$?Q7SrDY`hyHjy>r&Q@tlOBta#g4*LZw$tRV`N4 zsFtZ#s8*`hsMf1Cs5Yy9RJE#ZsM?`Vr>h@9f4|^VuTig8Z%`jm|DcY6GEL1+&(6$N zW@lqo7_*<ntXP@7CVPE$qvn=I&<1JyXj$D;v?|p7LCF1n?K|4vw70afoZy_WoZdN# zoTWL-b5`a=>jvs_bY@+lZolpw-3i?%x;EWyouKchpQCr^pVAZke*H0hliqnje@Xu@ z{crl)`k>q%xly@sxoNpWbM?9A+>yB@xs!9(<-Q5|9|xT{+4z?6pz(z9wy}@t?<N_v z*$MOBysuc7Zx)1n1X|_Rtfy6D)lT(V^?CJg>eU$g>)BP1rrnyWnpVy0+Txstb5`a2 z21TXT-D|kV_=(fh*L;t822+c>1z`+=LgibK^tV+<RmZXLys!FDHBLQQ?NAep;Z60@ z?C-PXniS0l%`D9>%}vb~?GCLKE6C&e>A6qlZp!_K$;<L;5QJq21}Gm=u2mjTZdUcs zglqa}`f1`cNt*teff|EmtY)gFM&o=}^RebfO?T}SsE|$Cx3xcLBXg1<5BKNH%-Nc= zKj+^${dB3iF}kOrI8Nve>W}C@%)Orbq+yHUGsE|We;9Wf4?!G1Ge(*ereRngu9{5H zqD!$9ZH24|=eW8T1}cXt-OBySz^pS_|59bEbJZp4O7&v(+96mn&uPwUzSMjJO{dig zEl{nf`J}ALS<^5vZCUoLg;_hYc4zH1A2zT1_Ay!dOeD7o0?zP-8|HTNvb^ni&3P2V zAmjZfSEehAlrxn!<uc`VWwVkh<ynzg$}D5nB;>R{Yim|RR!f$kQm8Dd$*O6pMOfrF zL$kD~1So2YdYXE+T8LP;V4mz^AGtZy9D#K<+8l3Im{ZN^=FD{u%$N0voNKn57n&ED bmzZnJ%gigxE6r=n2Ug9O%?XB?Sn~e?VB|SA diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/t64.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/t64.exe index 9da9b40de922fb203df6b9a1d0ad4139af536850..325b8057c08cf7113d4fd889991fa5638d443793 100644 GIT binary patch delta 32032 zcmeFad0Z4n*Dv1FFzmx1GRW?*DGDlz3kWEqgEk6^`-X}EagCx5ibgXe1BsK)JRz0| zYTUB#iHUg>%_6t~uEZq9#HeI%9FrJL6xaEEPj^G|yl>v$y?@-#=l*jiY0jxrr?yj7 zr%s)!ZYy?rRD9%7ouQGQ&Kz!iad6?vpJ({Tw?7ucv++M6eD~<f@keF&QvB00td9Q` z@YmRF@!tXdJVPJ9Pp0ow`8TSN$#DFFqWP33r~dM@Ij;CI4{lxedj;*Zt6WTH&X@P$ zxGNyNqmUa1vM^0|?*N|T+&QkSijC3?O6slQGAMedhI=U=uv)`K_$vI$U!Gg6;kaZs z(An>NFLw=B%Re0*7TDV|x7-4jUoYcN0G=z%hmZ1?22nDkNCmjc2J&6Kvk(eGD1pu5 z!~EskQ3~e=QU-er3@Do$kC4FL0mCRapqx@kZp-eod{52O$zB`WyL9#fi>Q4yMZ*ax zhXs3OT(TGI=^mA2<TxockmGV4=D1|RAruM@LyMn=lUzEE+lS0Km!bMMjyrMa#EA{= zk$g%D>*}7+{gFbXJKX6%)4fIMhKF{fi};N#aqrS|_b?PH-WJ5)Q_km#O}UO?clDn% z=ZH;GRcB-%HVRdBmPk=64rA}S_tki@Z{3a7?#K&ODIHHBFEu;mhg|#IRH+axH;E^z z>JaA#7X8M0srkOkC5Yfb6Zn)fQUKaSVQFcEg5(xmH90=H3$`kkNiW>ZPN@^Wlv=w| z;goD`oaPhs4Kn^RM?7g3k4jo=2*&{z#AfLyIs0=|$*!nM3l$SY5tCj=_C>Co#Cw9< zoz}-8st?hd?&5LjIlWRaN<~NBsY3Ox{#EcYwP3S&C`bHK5Kl=pU5Nh_Wm=BX&_FBN zH^vujM@z+X`e*8;EBDY!6fcOMnQCcBNM~P@%cq2i_fZ8Y55kcfB4%He1(H(dmVHG? zY(s1BMJcV#6_27&G);;{p(qIj>6C(~l*dqtNxwIH4p)+{4FtVRn<xJ0ETQZiHL?iL zpe3I}gwmV;Ez3`X+q3-8=kImzi(JPz3KCs;=@*uV>Gx`DeKf4vqf0`nR31%Zs30yY zM?9P(&PdJ?R{(uT$PvFc%{I+4%`_FvJamGc@`&)>fGH_BdW{^<u6snM_Q&k3s?%3i zLKFvRoI2O#I<!$#u47`{e8G_$H&wcWDheV6MFhoj(#1fQ>)E}}7fwoC7M6n1XpSp{ z3_kP5*nNQd0raCzt(>KtN~-o@FL?Il9ZvRzXHWkoBo)tr$<JkY+sSlZ@%|puG+b4a z#Vh5E%h1osvb;ijycJh*6aDqbl?rdFfwl&^LY{b4rSeR?m2xJf$z@1zvbA2Z{PoN1 zd9MiTHZRoShubdKBY9W3WRgpEi)mjq+O{9LAe-YZ!vd%bRCfbd#YSl_m{ZPBTT9K3 zVe_RIXJPW)aJgI!=D5=NNaHeWk`qj-6TrTahO)y@rX*g2#QXjPv&*o8m?k+UH4@iS zg{xHInnYZG$XsKTteS~wl)|(~VM+#*;Ltq};(qFjhU~XUkGTwqm)X<a8P=*+S>D+$ z!+BtWtvD_jt;j1i?IU$+7VA>ZnC-c72?G8Y_mN6;8CHWc7d)u&4NncGYC}qs6miSt z63x_~6XT{rAa<?K6b+colr8$W&}Dd9MW@=1p=IF|>Q22+!y+FwB7(zk7g?eiCdx0D z=!YbZW13Z+ji!mF$%5^QQ4lYK?^o~%;w_h<7y2Dq$n*&-^@+5emG(h*VHAXd=Y8UF zjP`OFu9AdA(iLSVgsSrv-yFA#f_Ss_G<BcDuo;Y|T2kJEcv>p*Zl5jBgI-mK7Lqz! z-$t@r6{6qkUxHE9$t0eYa#5J9jFGXMsviE0L`Wh{BYxXaUgh^A{z#>Ak|16zF{gF2 zgzESD5?!m%eFWPf9ucu+KdTW}umnKCd6k&$u@p&K$#_RrMX2|q)Ihm{P-LVgtUqFO zm)M5Jl=UdFrN=00!~rfefa-LVqT92wHSknF<g*(brdlXRx&3o76HunH#Ac0(v4sAW z(Fc^#Gjc}jkP)g#f{3REWGnTwhEncMJxKb!jZ(5g^`n~W_qvrpN<RwrnJ5K{MGzOq z34(Z35G`@}g6*&oTBTt2Awk44IMqwAmj(#-@{mdB_DQD6rYVBswNwlWOx>NHR8Q^L zp6ye&+JnZW^(T(A9*99BO43p-TF2P?PEY;b(}JzeUBCBJnj2d%rc-X19VK2g5WI68 zz2;+iamMC4=JU?ZCh>Es^Exb^XuW@q?FdgAm!n#L4<UJtA|cUwL>l4&lva|Kc|8Zi zwb_Ea21r0hIDLVA3rw(GhGZS3Solj!_WqNPYS*V}xO^!Vg2Te#Fzok``%Ya&lA#;~ zhmCR<#QQ8k*VPIxEKm-^Q(&aNy$+R-wSJHQ#h9OXCs#a#3h!&ZN7M8))JlS!)^3h7 zx(pxv2wiweUW^~1Vv5=*gZGg^iQTd`U%<+mUy=r~TQHi@S)WTwFtyQYZvlg%ByCWc zRARLajg>1}O66nxTacW3R+iFGg&#fJfS&b~dlus^T28S<O)>26DRlpkf~G#k!D+#6 ziOZ5MVO-1UOQ_CP8rLmU@4L`)x#HTmEb&yHERj`|KO(+I#E0^zb?DBc`SQa3xm2yC zwv?^KvXdzwCx@dDV%<;jkuqREs05ecH2Tmnp>v5TZDL$hS<f7(YolZOuhI+{YGM&} z(hwxVSjoef6lEDMLy;x+K|;|X!SM`bi_9u8tnwW0W|!e{WSHxCnFuL$IkqNOj(89D zlp~)nx%O_3=n`&zn(Huu+9ZeXvZwufS@)wf6!oE2E(+yj>7O}@X@2oKnr&YkmzoRd zB<DJGpMnG34nqSX%eAkKOUShs#~GzG9~v~Lr5q@n6VQ+e(nEKF{nM|cMBlX%GXx3J zb)R;F8PO?7sILzwxmoXWmGRdP3H40@ttUfSo_>ICO)M4+4yFSqdB(D*^znt&u#iZN zVwC066w*lpWgZBXa~^==>RlEi#dah^s}!9Ixk_(=TwR?~=j;m0GMfY{n^-etTd)mF zhBV!S1qJl=--{{yA(d(mk%q^x2?23@RSa7lkYpvzkPodOc;(u&LUQebal+&&%93y^ zrB0}7(mzv&S{bC?$U?38htV8oYjinB6T=yq!E#A(VC+jR=%#8CoExepp34<a!Fr+v z3|i^5beo7U*S<u}_E6o0k)UQ{Y(Zr~Tg{#|1Ex1tlP1fc2~>$(v0e~$1%ovl*&A<t zD-Xgh(u=47WactV2L(!KbTs=iAgb%>-ijzu-|#eZT$aTk6%r>Vp|3PKngw<0&QHr= znVq7B+=Go`2^K3FA(JZY0jRw)FDYX%nC))RgYWdfz)2hKWeFF=;Q``$nN7Ee*u16e zzHD=+g}S-M4uM?j$^rtH1^$K|66QY5K%;)|Iq6acV}Y}yMx-Lysyx(U4O&(fCe1~c ztD?LX#R57r>w|-6pf;5mrJsjk^@AnHN$awJxd=Oid?5w_Ok^2A65B2k<tmMrq%`f% znP`kZb@!i0jdFC(H>Kssq(R<1N%J~TEDg>;=`^^c@g2mJIu;XT&@YSBaEG#Fv5VRt zX0j1M(boEzu+r{g<0#m*&*aIIsVvTs??u)~<VzHJNr^01BRy-wP-ExHlqL*RH{>YT zgEWHeV6tGV_Yz#kp$$dtXfQi}qJiNs+=<X|*$%-iM{LY>7=FikkR#R!^=(?)w_S7C zPeJLK={X!%ky$uU!*Ld^baI3&i!-^%+IBdhbq*BV38mJ8_!aE3smM@B|7!UV!J+-8 zE1PBL+5gHY^vnK5A$l5PGoWQgqdLF@SNs>>V7u3z{A=5-pmm*X^#RiUQS2i_Xvm8c zi}C0%G=)>H$CeL3u6bSAFNUO^Gf*$IDjB&HfREDZ^!t~G=#fF|5Q=n+qR64&u;IbO z2V9Oq4k<?jdp1m~8+jt^m1|UE^m?##B>b=WISzR1QnXe|$zvY|_X&F;7aFjr1${kK zT272TA!1DAl5pnUC6n(S&W3l%=6B|@C%UBZFNd)sT@oX=j+O^PAjXRzo~5Dj3ii0- zIj5gAGmPm%;xzMEzYviZz1V?}99<zcvg(NZC4?D5hx2v0Y<lRU{121a#n5FbIg?=! zibr!{R?N>4&tl4jPoe^pEs3qoWm$w?OO#qy!g!{cUYW}t4IAk>7S)ipbYab5Wr3fe z7o;fcI@E&R3}#cpcj%TwEvXcjfw@KW8CEh8(nOX$ZEg20@j?-7@MA*#EiKlbhlXo7 z+jm`sMQ5hwxCJ$$yF=qJmO23m03wK33$&AmGfPD8%%g^mj3)rKopM#(DC<d;+l3;< zb9v$kRX4%7J}n(bsqMO{s0+IkVTujtjx}*_lYXze;Or)wq`6qmvS@xMo5gyUVdNKV zcx1BfPi#@uCOjI%Dk6`D#r#Ei7iK7wW$Se6(ZS<cVN@tTZx(wfDuz!qu^mx){Myd! z*Qjy)^D~*LYd`+MnXIg9PrgSMd%o*r-7;8vYS#BVF?VAMYZ%uzbQev!Dpy&EbOV-^ z8gB#2KAyTV`jO#^{GtUhyV;d@uH1cnh%-QNOlroaq8O2E;<RUV`(ZoeI!0(9x;``6 z&1pSiD?zcP#^|e_B&D+wA$v$`1>p;UgYWwSHhg+oW`e?3)?bPOX0{uhJ*(<g_kfk! z2R&96DS3k2z+t`MoW9a+7!zs@9|y|zO*gXNCj|M{(5A|EI28(?B^%?C)_3sH(z4Lj z@>|L;c2kJS)~12ZjUqedo*UMV<I?stR$)x>pO!~vT7x_qpP9z$jk%ep{X5Dk07@D4 z!p_9Uc1L5;+V1dW7o^JHU9M^vH8d$sOTVHLSbKrFUt^=9yIYTCqMHOUvIt!dCyu_- zhE`yAQ<fs6MGgVcQYPJ50Fqss9}V@lcRo#n)!x!i^9U)Mg0$T!Es7m2RNb(QpxwpW zh=dU$h`zM6rAXT-oZhf96qFKM#_G&6y)seVq~A)|_2_XiFQsF3OgRtBr0RyU0(X`w zF@xI|@c<nwiRrWOQVDfpp@!@oD9#hoJ5ET|?m-JfkkkVS)FmJas2Ji5%M*{}h@X?; z4y8gB4#qgLxaHcr@52s&lXxtF;vuI&uouJ`vs0QdLumIdonfw+XcqFKF<>jqkM>$5 z0B46MCh5sl^`YG12Vspgu&%Lv`xOqPMjsd3q%$NiblebV0F>-fWTv&9b~!r>;^+ht zijS0VnLQd?U|9GoI*7OjK$Vcpj!VD&%C5)825-S0*W#XX-kB;CX`2whkbLYION#4j z?E-5Fjft^k8lh%+tP{QZVD*fkCLKy@Uq`)rAh@`DJL{iVo`{tvRt#BA+SI{%F#*A* zrRW5y2+`<bpM6lhoD>Zfm*My^w8*aQ55q)ycL`^uz$RoTz4@TZ>_yCfCQ_ygRgyQ6 zz5_{CB~l@3gSFZd5;+NCuBaUniv`}U9SR_b1|xu|3&vk5^CuDo?L#mUDM2x1yb6Du zUBouqT5DVu>=)3dfpkJzioeyQ_zPfm*$yYqKBSnii_ji~6Ixed*kf6P1Hub?zxAjW zMENx850?S8Ow_KW)d~epl~?JBa2t_ZLHeSO+D(?K4ci^gViK@RO6Azs@nO~sq^CF- zZE!N=!gB$=ij){VHY9V#2x4WNT$WwyMcF&lpsfx&Q|&X9T-5$5=c1iWt2p?pscv>{ zA*c=7fgpOjb9D{cWMG)6lt4$$QxrRMJ_6oqpwwj;*aZEh-3dlT-2P}hZ8C#FM{8#v zr+(BRV9Q(sCa%m!;rnR`MT9hxBMN0<s<;ei8d-C<eFIm*0OKrCNKN*dv<aN(K>z(p zO-{NDlR*|oyPIl>k|TbR<91qllpW~a!{cfLI?T5FSGaols2;oTCOoGJSPhz~)=h54 zXOh*zUhUa;a+XZg%EaRm|0Ddkb!Ftk@X_!8Ezmgzwn#6Tg$5W+j!QpuumpgmlQR)4 zF*1v%%JR_<taswj0NV<xufFOlcqm=kKZDu2#ED(s>`RKqg`JNvJ#+kUEO?G9l&t6` z#2J&qt|lf7TAM5<tlTB%XK3w5eG_yIT5}BsX090dB`7(f`}G@HAvTL2T)T+6^b;GK z6g*e#N!4tG?-==Qr<8WTHk$C0$adPlQ%};Ke25Ati-M8+kes>SEy?FwIqOA~wJq)r z=Zr%JGnLpjO6xKlI7rhbQs~EyCH3Pg`mp;+p?qr?3+)x_mZ8<Kp}j&vbLOLmi{nbs z=eco~lryH<jcA4R{s30mD?Bt?&f#S^_7IPKGgK%BymX_Mz0@m}-&4yj_8Ktw<$4-9 zVjIj8V3d#GGE@LXl>@K}``PZeFpZB(#q+4G4%!0=DJP^!7g%2J0bM57VJLM`h7!#_ zn1sGa4F6j&d$+e|r%|Esfrww_kgmIi30Th>de3*8<ECN#`iz{n1NJm&b8i@)E3k8R zy2JSrK=~Y!x24v<#ZPIE50c|mTmCC)#@~2SI(T3~4nqX2?r?VvYw6RC@yXrz<$GAq z<k42Be@x1?aRH`@%4YB@DJWCc-XF`OUSC;5V%*e1i+EkKkbbVr9h$$h^q4{^B+4pz z9=Nn`bkcA)hlgNpa%Dxij`)6EFbs;?a%>rQxw1sr1uA)T!;G=J-=(&O2yE-z2i->} zKuS7TxsCNPZf7_R`=vh7YB7r~nC%6!Iy3{LxnGKBs+9SS%O$PF;Le7d*q&WW%XX5q z2n5ysXu}phJLSANb>?dpUqQ^?Y1@fu&fO#PEXs$*Qg#Jx(~f8B_a>OcYi2u!M7DU_ zB%aB&Puf+IUG<AqU-_Lk_43TxeN&MZPF*a$X2-O~99tcqYtOF0dcFPtSm5V6QesLx zRFYo5$^exScUqz8KgsoKv3;v|!nb)$C^{$<VXH)@td<9d{*xfS{<b>`UMwEN-nudz z^W4>>uZ%^OfY!=WD&Zj&mZ&gHg>4=(V^t5e4*QBx&GdTxQ(ma3zA_H_#@1%iR~|)g z*xH8cE8)%olC7`&Kt@LDE8mon3Hr+QNNQ`Fs;^vvw6?bCmM&IX+Z=u6vob@Gz7j*1 zYi$Nsj_8qNI|?5H`5hs`bU}o_5k^qI1HDL0`u#unt_aZ|;ILjyMIvr<;=xbb%wk(B zjU%)EKq?7)YO5U~m{1FJq+*Usb4tk(eI{3Ib3SOgc{th2iXr5d?fX5JA2WPDQMFKI zR=pWxNi96P@_T*dBPaodqw55{IWkht?0S{!;cn*CU`w=qk3WpzYH2Q31Z0ldx(TkM zCRYRgn$ultzs4>nORlyVPOMUW)k>&i(~ZMf`pRr8<~B9POK<Z7T7oH3C?B5y5n2-| zma3`N5@SFXOM9bb@swb&7NeS$LF;Abes;Ba{lvHhxkVK;L2wFCO{xfoI(f~~aF`Qv z6<7Nsk5UaR4QPN8dv>$nnAvQSOKzP288l?CC#jZXyE*qk3vnjfi7c5#{JqMhw~t5v znGRrNz+8vqTAz{obFTi$E=VgUNUwkR_`U^Tz)-1lLteJFCHl&G5cWY#GIcv(maVPS zGRxMsLSMNSM#R1{HSR1imMej(Lx^#h-gRc9589hf9kByvZ0z!puGaNnI1m7Ci1LcQ zmewf<#bT1JFw@60rLrr3SiREZ$brYaZk%J>nGzT)g(SKMoJFR65U|WKMrHQ>(V3GY z4(S*L($|Yr!Raf1#?~R*G2KWa*TT=}<YVoF6lr4cK7si_!ZPiH0^pSF$X&%gIJz#t z*JS(QrYZfNNk8@;28D%XHNcy}KK&rYdKw}kQ6?S|T!%1@v#Wm8SKft!sj90@Mot#5 zR5yL469jlYFt*mawO&M^tW$|Oy|v7wcHQ$(PNwu*rUNfh;rg++C`(hi>*4N?G|WT^ zLeXids4Jj7dl#mazOobv;Q11J-892{Yj(PsMN9(^ESw*E(?Y4v0+aredqU9-B&%~7 zVryC7#<X4)U~n0}+l$;V`Z2p)&2rDn6$-;T7K~gb$V?yaWi^e_)?*MN%>|4=nyH^d zkeCJ<i@p{HCdNKa6I*{!h`gn9t+AcxDF2qGgVP&T6lAg)*%ajBaH2dr@uoTPCbID> z3zT=$Vk7n>blj0K3^fzWYSle<Zd^biV&Kt)PBBU=5X;6k1zT0_`zG=S!^=0d40_r1 z@Ek$BV-jJsWLGOwC^xQHol@{Ox=u+a=gT7^w@9k0dpMpf({lY@VuJ5KOIn5Oa@_9G zT)Sc2FW1!BLk930paNNNvC`f@&q|g364Fzubpq*-h@@0?qdW%fa<@dtocl-|YGucR zOBE!!Q1VS5QY7lCbZR!nJ8j86`wV72dKDScdZTc)CL)#=6I)x3CEv!%k98RkDwFnK z@om)ugRk97UX03cz*2_;mi<@!T5o9NaUIb53lJ#G))sd7ShV#Mgi1`d)ReMdfO5W{ zoW>n#{?NBRkH}hDE|4RZiVYMbm7oM|6eL}s1T7S#>F*G}S19v9ahQxsfZQFxy!4g% zz(~0-hRm&T<+8ldo|EmGK{`FOLprVJp&3xIdp*l~n3VY}w@g-?G&e|@k>!uQBL!Rk zpbGdMAo)1f3e7U@JZL8IHkD~^JtbF|GE6dw9_-@rPQ3~+k?ijhlkJ52&2t#W1+6;~ z#EzxF;a4H&Wky~^x3P#5$=2!M^m0xlerFE;YMjGK%AU>)1bF8_)W&R|w4O>Vr9TDq zN5~exqxuj&)fq?~i$S-Tj10wUkXuHk1{oIms=hd`GN(?GO_RKxw&xpBrF(Rg=kRHU zk|K+8<Y{){MC2$k{9vZ;r?cafCde@vqX2Wed^gza+DD*fX$|h-GRy)c!(&AlX7(ab z<%=(-eORSre|qX}_DOO-e(g1OExCK>9Ulz`*Df3pF2mUOkrOPjUbf8VI<bCzXUB^1 zG|<lrj&%Vh$7l}->5$nz(=XRP)4k;Sn}Q>A)D8AQ-#L-9(IW!gx~VEfF>s-XIqtNc z;new?_gKGv`I;@v)-Qu^TFC1A_2OTc573#pr5O1K=d<{f7x*uZv!;{*eD7Nu`u9z{ za}%0$7}*iwS6#l)P!4VFb;>pu7kJVev3VbNk`$u$3>XFb<F#lRLWgg%h5biFl_Sm; zKi8AS%bojW@eSI^z!8G1^512r`bR}2br7O3BAlPw?%XZIg%|8%8?*tQ%x6Fv-}iTx zJ)kFl@&;Qrz}#o)JIY?@Cn^c;GK!~NhG8o4h)f(4LE8)s%8}C84+Hk`RxR5xFw(!! zNXz@g2k|Fz@t^G2KpfyUvZn^E=d~p)G&Rn8;5Xc$ae0?@62yN|OLEdrPz#&TLi=_i zTy?qKc#Oscudlida6|hc#I4a+{er+JI{bmN>CX__{SSAyYhT1VE)MsyYq!gYpIy6| zkT2mQb$`R<!nicrZfn;WgrZ~mz2?zg0XS8y6w0Tu!>LBiNcLlDbPpJZt_CO0^|cX@ z`&E}0RZbTGT&di{ja0cv*EQBBEjVcJuSEaGLgxe0ctlBt%WPWO0K+6Q6w8A4VfAuJ z-k9SJ+M%KB#k6UB|Lg32+E)JAYi#S_63uPqmfllXb(H#4OdtuaSV%o8h5)BsW3Vj^ z=WEiWzj6Es6$Op};?DdVz3)+0mL6l(DI~IT4^%)xLgH!Z))Bb?(o7V*tOS!Ohl>hM zHYx_&{NDjE&cbZ_=wOnHUN9FVQ6vWGJz{n|H1!j*tiaxj*waQ{wFC;HUiNC)JW*e* zWIw_pGeY>KN7$f@D1Ph_HYLNv7hh*vGLrcRuCfyuefimcFjq!D<L+0KBL_@zQP&N< zYrEyL=v;>Dpxbmof3R_xG5qUwtSqw+KlL(uC36^Wxy~+U4i4~o8QrfNjIN=r>g|_V z{~^(X^zv+Jb{Wnh7QF(s5R7YMSJ4G7!`DQ!;Vy^W&FLw{9m4&Z^E90Izs#N-GSPbY zAW3PoKn>AKdz8>rCG?sSny-YmD4|j%^rR9hS3(cVAyF2+oY8{{VW%RpFF!#Y<e5AP z^aJu3J}pJU?agKn4eMl+sM_jm_mCnEvL}Wn`Mv@<Tf!jl%&V+^Xe?h>&wd)3=r#N$ za{1)_&O)<{T@$G<M6~=GY%4T73Y$WVN<SKch8r)ksae^)SkHE3jpPGQGAS!fvy{b{ z(jxo5PU^u8iAaail@~oIvK#297g)I|p4UImc9;frA9#txb6lD%kI>`N2o-)UdjXDP z3vQgz`u03_&s1SuLZegs$`XQ&U=eU>`$)M`UUbcAJP~vgPzY}Syg)$L03bb#nnDLp zNBDX+!pEopN91!dOXgMpCl3AGsG7X187?9(A^p<wUj$pGClyCG_;mO6$h&WlQktH; zxlJY<ox2P_=~=*VQ=ehCNPkq4vTD%wrU1Idygoh_Xw`Xr)#ubpqOKU(xC~2nvWnrs zUPI(S{!X@i_>iOk^dk&X$xQc1?4B;1qwB8t6ZefBaty+s%i*7PFfVi88Mf!C6}n;o zX)jr`4nu`PT1KRoK&sBJ`oW@?jeIG<59?0V4`tmNw2^H}^^}O4zO>4pmJY$H26P$1 zwzG|9D?jZWX2{Ow!@gw&+1(-^g$auj1bt<h28O~IH)$CN5US3Wd8&o&$PTgocmw%- z6oeHPYH<@50YN-Tw>K^{V>&dEzbptzaj)f9VwCTyl-LKfqAqaoxeWf>K*imUpOI-# z<j@T{D0dFGA_H`d;V&<`Vlt+F1LrGUO8kr*<OWxrU~9`*Ig%&`K|vnp-(jOiL{2>c z#r97bw46r<V5`QeBRlg?N})1qnF&`JREnoU!6Y$6fX`GLV5BYeN$IB&RyU%{Ks#mf zU#n*MJ5j0a93rWA;zGAF-N-EdbUqt9GBnT~Ef>Tm<Fa@_7$<SHY|Y4|@L4Dr61Q~O z2a$5pSfw73w#>IKvigxRq2FTRQjeMG_*2w1gMeB39J@PmIG??hnS|~E#a^hamO~ed zC+)f+UaVZ`6CO`Q+d*7z8+B8OgyL9!MAZG^$qowX{Fk20B?JfUz>+RF)>C2(NVhNS z28So>kz)=VNfEXNuk_E>u6KHfk%K&$H77CP%obV9*+S887?(kwY=2H~{*TS<a!xFN zz=P>WrG{SEL`{{@Q7%g!%{M{T2T@yC;ix`*-e&gns1&cOxJu$Oys?>`7&Wxl5!fRs zzrg%~fe0;eN-Eq(+Tt7;RS7t8i4);2F5ZmxWp262Qb(7C#UjPZ7#he_n%S{Kg8a<s zk-j%s!{}+f;^tBn<n(>$$fjqSb{hsHQk-%brpPI_t0}5(u)N&DUW@*Fihzz3$7-mI z_7rV@vafUV3bnx+Zrxm==sK4BujuNGE#oR@$rnVwt2a#W>;_1qaq2=s@NhN2w2iaf z30m71%x@7hxD>Z+6!J#D)+of^rEVX{p4`W^?D4#50Uvdy*a-o2bk`^aE@oHrmU?G` zh|rqFZ2p++Kr3dP-cf<!lUwvXvLEZt4vY!*`E0LzGNkR}&Mu7^rgOrU86^gN2NisG zFY7&aIzRC{_QY7Dz8|f54!=ghu6G&YH?n<Wr;I-FEegQyFj1)b#S(6N$V+1}U^ci6 zMH{I)C#4ndp?_?#X!D@}!F8(ab7<NfNQ4EjjTSOrTe_yq!(~X>$Y$hs5139-ZcS)( zBmB94Y+xJmWBA!lwkJQH*L=$^<VW*M&TJUhW6<smvcBm;E5*`XuzvcTA%7&a8m`Sb zbvgEedz{llnusXu$#rqa*JbFoffbHR;`3YBzsB|E>z-zxj2qk~PKzS3O*cpjKkaa7 z-fU+2@egQx+4Awv>WZOp)Fa)h1<Wv^hp{&(@{#W2G#8Bs*+ZNUOI?6qtY<7>vnHet zK6_ebDoc@00HgEV9ysqxIO>UcOvkx<K<zlreO(R&Q+5MSv*rmq_+D?aWfNmJOv>R8 zJjF&&N@uI9{n%rZ$^-Ay^c@`{6kWYh|7ckk>64QzU~(A$!bz4i*_71%T@twLI=1*r zp-M=08~b9qM8nyPsUjPaZ!c$?C->!_En+7o$Mwp9y1?BKx*{ShYMLsz`5kn6qA{O; z4SU$+Wwdm`cKj4>2=h|*G8Q={fe$a*J#tDPO?dmYCUhr+C>-w);upu*z3GGawWVy} zjAH)&O}2GL4FAMARySk2^){?;9FL?*p8{#%ywUL1BQW<;T(}yw{v}}P+WVMRdfO)8 zxRV24CGM4dzux7_sds5@KXn}~K6%Ty1QTS@jj2zV`+B{G)o3wzAGw3|-MX@iRD+yR zK&e~+Nw#%1qRryZSb*7uf+=B@S7`!WCEXD&-Go`K9_sovu`x5dM4w(vtA@@GMdpg< zMvIklak=(_<SZq?9-En>S;F?uG!8&Lp2$s3GYxLEjKi)fZ@o`Dzsc=u!F4d(QP^f) zZw5!jJxy6}HNAdTaBpgX9DfU(#4@<OVeq$ngA;Ykg}!uTV00O}+S!CzaXQW87*1~A zA(J-a5>`2D11iyFc2q|Cdbx9bT!wv*$yiC+x^FCVu%z{P%;i$pHQWtzvX@{9t5`SH z$I{ijJ{HAMHp^!B=(h!y5Ydt1vnm`4>z$rJ8-aok{<z%q2POTZevq#d;+IQ5F}pBi z9FFgQ(C_gbrvIc;Q;HA-+F81BgqaItQMLJn5nbd~9$Ihin`XQ1R@UhWH?`E~3XS~g z1?=O(ZvF06Qs=ZFt-)pRs*)AgBiIPoj=NfqX)qoRi8EVZn?6|Sa*a|7H_X|Vgq%rG z@V%WQNttS~Y{Fr8gjZ#@9XTOSf0rSml3kn=>{DrLmj$~&#~6Y1sFwE=*<UGDl7ne# z>jpoNG~^WQ&iy{f6r%7z+LE+M=PbcazjoTSM?q8XDMLVo`bUpo7vPLhvv}!&ehFKi zM>Fo+H|fpa;1a+zm!Xr57IQ6z-AL>Xl;JNG9Aft$2#uKoN^26#HJtl|Nht_n5c~bb zw)r4SD>8Mv2(v!rJPw=w4Md$lu=%-^^E82ALvZPt-~GmR78&^Oe`9-#dWNh5Blz=W zewq)?DX{Y{EMT{aqWed6<RYcWd=NWgM`J{w%3%`4BQf2*RI6KA&&Cud=nl#hS#gc6 zXSU+hVe3F?zscYY9O*OY_$N?E|7^KVz7={_^2$K@$8*Jl*fr6mS-R74L`c80!cWSq zXZMP`x>ucfI2Siqg4oqMug+Odv?@0_vYt5UIxeM6!#>8hKAx?8H;RAf5KEdjJT`I% zR)K=L_G7A@@NVFkYBUJyG1a*PY{R@BV<uT~9lYvgw4c_FLFg;kasj7c%cVD166kJJ zY;I8<CVmnaaedBYYk^k0rOjJZH#N_#vWaw#(+4#Ni?@||&+kE_`^K=;3QLl-@jyp$ z(Fa)0{8f=Hfw)@r-;Sj)RW|x?o!N!?mwon~R;uz|KlbT@Y<~G^7P4@VW*^I6*uXD1 z#kTC86DWJ&rN-W}Q<VJGZ!z5mLv|lq6s6(kf5#U6J0hHX)kjaz&_C)0H}ze`O<n6U z^nHj$KbV#9IO1G})-@>0F{uJgcQu<T?#?S~_fXH4<D1qnpFI)o=TQ>txo1^$HN6-h zX7B899^(SSQSu$blOuH>4=&S^FwG=(VNXAP&0N-LZ=(C_<3K&Iq^J8sge=(`t$Be} z@9n~W_bf|T+Ldow#d4M=@#042RU5-LFWtjGoyf+O2D|@zEZFbYc6WaQ$nIxxm0Q<; zPlq(?|IJ=s7O(3$6ZMCsR2CrJ{5LzlEUZ`fDvZ!JBH`VMf5CC;<WuS}+Pq=+xc-fa zcmiz|phnTj-?NzIWAZ9MwHY~SDxf-Xp2BFTYARomo+*CEaVr)H;-y^ix19QWxEwd2 z8YJ8IxEyzW>S)pZR~$5kc-g+ySh6uccCn9_N2gwcC8o{_PbG6@_C%q!82#bW?_i)J zoBm>)43xd|VFY`nF4(;>4o$AB>#5-%*u$>WkBmL}GzPQZ?)Jf4s|@Bl?{~;!{rhb4 zK@0!Hzu1X`vHaYB?f(5>uEuZgL1iet8xL9L93I9mILI7_qugId{_Neu3*G555sN%> zm=CXK-yd1;J}w#sFFjhJ;lKEt{oW84(dU3Ngk;a~e;ju_^Pqg(aSckwWq`Mh#?k+D z+;K!^>hmBScerPu4E9muMfbr`K(;r%z-QQZ4?DJ*=Qmfv?;54cd99;aXJ2JrC;PCW zE24D2z1l%t`zl+wB9?#XG4|AoMZVjyrH4lEF6F$+w3dR<4^ic^a<V?cq)V@KBwG9m zTVr{FHy&V7WpTmp&qalc+AuK6aV<^0*CpLJ$)=V?2R?)-&<E2Ege<6|z2li!HqiT! z4XW#i^Xv)s>!>h~WSpK_YT4zoAt7<HYqSj>;V?9lwk;naGOCtkt&9)(`2;46-IJ*0 zva0)nP9QpMV(V6hg&o<ebkC2E!lbar??DhZ#2U1>K4b5#ObIS{5qm;R<HC<&1u}(a z;>YavN+bX1To$paPuwFgu9W-9mQK0i5&2$vqx2l)innyoE|;PET()#oFa5buAkmIu z8ti<xPuZKRjMmpdlkeh?UtudTq#v?GVGvgnPWN#J70Hr?0bu}!Tdh1fuB5{`sHb=d z`I8<r(OhsCSTiN^JC=uOG3=;0)C+zMiyt)VbhLtQ2x1!2O{rvZ*x*$eI~!FVGRB7@ zF%iCdf%-|d`W{i{-rV#^1F~E(69-O$cwMN!kGU5g29oV(%stmsT!i@pb1zk%dxKEW z;8AQ>d06j9JF>!Y0mysM8nFSB&W;EwYfcA)^y~91;N2kRw>pfURlyQgkK>))*+U53 zoWp8X$MS<e+Wpb$V;XBFR5JM&&tf*CZx6t2pMoXod|5c%Y4`w<8JTbn;VRg1X;csd z;V5v^JmFSZSN5}Gq-MVt7=)rm$=X24X{5}p1qEXQB8gmv+$z%L4IunxNL`Vh=_u7$ zdJ9FrDtDg3v2ij}T4f-u_-i1oiDAaIArTSCg330p1CuS@4?%QycleqnpTbZIVT;!W z2Y<3n8BXtGm`Q)Gl{sJD%QmgG@^i+r&~@ke$}#N5I&si2sD5hgd8ikhI22%1x4%4u z5q4BN1``Z({x0cWF>(O71$*Wz|7NuhPvJj4#5!4%{hg4B$#p!X&TNl7{{b6o?aznK zWL4Hloz4`1oel>r<7hwl{Xyom_DrY&6>}K3MNUaYt7)D6Lms~Kh#Qyb5rM%xt$<}# zBzcFg1JRKgw2r+|(JjzR*>5-^udkKKSJr~uhks}rJ6{nwym%W{fR!|ragnKE2IYn^ z=rVi&)GVIMNjdK_1pE!(NG#w^ZI66t21|b=G3wW?(1SNW%e6;d0j9hJCJ*Ejws>6w zt9;~He)&`u@#wSsuw3@RqvjCHCNk7)zAkALuy*_BYz+&*+f_YxvAed3vGa$*k?KZk zRkRdP)6W0Xvose%AY<wLvus7>06u9dd!uqf@Rw6ivOT*+UhoDBMfASbbBEaUs_>+X zlQ75|r71`%1>%pG+WMS3)f2Ng5Xm6h(Mp1ea)PKDIRPfVKez=kuf=893KE9&>(YnM zFjf`sSD20r<U%eVWQVKf=sho?irI}+-%x4rGpzq(-FtisPX#jXQes>8d01J1P&BDg z^p(O#kUi_N92R}a7kWl|<Kphh$9y!M_YXpz==&4x5w8zo&p#f@Kib4TdVB~!p0T@+ z8~K0>ENXoOf9)HVvA%nk9|y{5^p)3Po3*EF9>~_LPvp~kuvgdb=8uhI1@<IEE>;3r zI0JA<2WtZo`Tc3O%^uDBq_R5uAU>y>-Ll8|_rYq~-XkAYv&1JJ3=P5{r|m9nbw&4s z6~$g&_eH(*Z7O^32`jz3nP9thA1@6BR<|oE=P+0Zz0-99wDXE?^7SIaMBsv0|Mz_} zGSh!z_VTi|WZ{*N&z@>eE)_n-mVDHee|H0W>Z7Q%y|jopdR1e%Ld|{kks`kV81~Xu zI$nbrnm~c4<i+N+P;^eHzoV5ZPO`L*BM0*ON?g7{(Wj-!GPlbjC(ZeL(v1I1+KZAV zB)X~m@GqT~ZeX>dqHqWBwA4yV7j(Q@)Ym82lOOly-yh6A`Z$Xh>e&5{Ct3HIs8l*P zfM?QuAtfk?E2Y;X%c1A9@KWR~T#BVzZtb(wx&sG9KBS*SUzZ_t3=q5uL{4?r=`!cv zRtQ|26?OJ%Xxci#)}#^AbaA_||KC`L9dP?=mTb!>)A%Ec*q@)2^Mi8P(gT@%{*&yT z13CQLquH$k@%)yB%vdMz`2<qg_$_-_vIw7pgH066`CU2el$gt}`H)3C8N%;c$Ob-{ z!gtPLi=K?<Eu+|mCr9u}quAF^X7CU1XCaOVe%Asv(9xgYlEW4|hV*{|%OJc3j?q^i zNxN`C)|g{Us29inMHm7Coh|2i2`ML~gkpBlF@(32u&94+;=4V;YX6nWKRJS3``5^b z8MtC4B(`AXfH1LizSrP(0-6<9{hpr3COy@Y*Y0PJJQY5;C7Wim#emNY6yhakdUc;v zqX{z(E6d+2Golz}V@b+`+xVoE`~b>MVtjQVzqXhKSI^)5SoK6V>)=A_v(W)kUj+$~ z5){NJbybiADOf?0C7q1;N?L`GCEcAZrxv8^3X(5fR*<REw+d1yol}tc(rE=rm5wS% zsr2z|tDL@E`amX$6{PfDQ*byhIBBbbV-#>wwSvP3%Sn|AzLW5B1%Hk3#R`sr!byb+ zj+K^^CMtLx;evuU5^l{<0?iaiR`7F#$0>LV;UNkx5$>npR|)47yp8ZbW>HH-I@ggT z1&14*lfF@KIw+Ew6+DFS!wPQ2X#pqIDuD#T-%{{o!gnfoD&ZRyJd5!43N8@7R>AWL zFIDhT!iyEWobag%UO{-Sg0Ck$YnD~6bu|V0D-oLrPf+mrghwd&YlH_V_%6cT6}*=4 zwwZE1b%bA0@J7O06ug=6&lP+p;f)G@PGd#jBPGy65xW#zBK#!<ze@Nf1#ct#Nd>3d zYf^=R)16DnqTuvB6=}YLhY((%;6}pp6+D4(GjJ>VjBa&EsY*mD;YkWk2QHFP!3DyD z6g;1BF9n}U_}v1z^@W6ARq*+Q|DfQdgr8IJa>7q2cm?n&Qk@c5PZ1v|cs1d#DflMB zw<`Ef!mAbhHNq<ud>7&63SLY2;sW+YP2{+2OdFgikC0<V<J3}@u2A|blmJlrDwG(7 zG8L403MD|Hz*DT7sZj3EU?H1~)+^W?BiAoxP@_j15&xY+Iiye$Ksl#S-cu;JLZUmR zP<AMkR8S5p6o*2|0%gBKc}SrMpzKm83lz$HP(HCLq$vui6r>LnO145N2jvZgk|I;Y z3d~Osw<*LZg-F^3#A=1;tq{>pBH9$<AJfr7I$g%AQ|haWIG|tyH|zat*8XKc#I9u% zHYf74*0PGt-|minZmY)X`wYoueKTc;B<`+Hot!kJ4?=XGTz*Lep7c*$_!$xhgKXC> zCaY48r58qYb17)ol_+4I0_FqA@9m1ZNknnQ-$<id479%D)nZFr$Q1dD11w|PP-`@- zXw}j6PC48jSuiUuJcm@G?kqs-lc*d>Un``~0GxR1JpP~({*c0prAkH(kv-yGw9{FR z`6r0gJE>vfvD_l<potupHqG7A8Ann9@*AZs?t&Xm3EAy!-9pStW|;u(mv;+sB%aDr zw@3JI+>GrAPT0Kg;)?JJdtm!`I-3_irO$PU+Hn(D{r0#npg2F19)!JyOY3pq_5}Ak zFR{DZ=cV@80&ze{GE_Q9-f1~Tg-DS*sN#*-*}4pNvN`SAHLu8E(JSn^9RrQ7jmkk# z3aBnaCRp&sK4cRvhkFfTzwhYArzNq_oiY56+gaw$FrjHX^2E}cA=JCv1V;qkBY<`N zDp{v^E9qtI59o}rMh+vMzTNSJ61hT-yhz(;Y3N3_V`rK*Yr>?-N^g;InU7uh{&HkQ zH@eJmf=QHe#cR1@t27k`Bd&v`MY2tZ#*6$_Uy$$}x)rc*T?QSsP3|ClC8N%97)BvN zuGzSDyLpY=Yp83(0GT6X9KA^;o<g;UAXj`chB)QMC=4z66sGuYa=cf2{Nqae&W?D0 zIbI?%rJn}`XCp|crRqXhF{sfrh23SSYv&tkCd{rqSA+fyK;usnf?F2aqd@SkNdo}h z+_OB8n|{dx7Xb1Ok`wFRrWo<6%iunMjej9WFv*;!E&W}Fcls-3_P{L-`3M(RGelj2 z%Fx+maHtG)+@<h{x?9hZ1XlEC-@g#UyT&rl7yJ5eUGdlSPFTT4yeRM^R<P<9NBiGe z{#Q)Za@P7{Ilp2#D|#t9W_}e~5G`s80qwf!6zVLcgI%43jmy|8FC|!mCtxlv#OwTJ zo>ISj;01#1cFU^as3>~^H$Bl+klT4&<Hk$<cn3X~Zmujyk=qP;mMu-w#y$-Nj1hBO z>WjS_WSl72?zF7@m?&LAkzQCvrTQVKc9#n-!={odo89Y>RE1c1US#x@cSyYu#>J#B zq{ZzKu}VbHGPdUB0et*2_TJ0Ee0?|8^m0%B>6h4_FAwlr2p<YrtD^R`6>Q)uy?Q)d z>T*S=N(%wh2H#wYy##bdy$AIuKKk)J4r|`n#-4m7FXU!t6n$L!{1Vk)UwI6R79z8^ zvEN_moOt&MdC23^UjdY8ojlyN;n*0ZO$!<&`nBYUetP}JL%2QJm-Tryluzo*M!lNM z@88PSzS__I;qT!`egD;N?(T$q^J=8~_um1z_i848yAR8FEj&`uf_``pUj2ekA?1kA zI^fGFeVFC7LA?Dr_V#Phyhk6_{Ms!3*;tnRdNjWxmQ8p)f#0`<t$RH_^jZ>HaIW<V zYK|RGZmUyi!b4lwhp#`zU*F8)-;D8D+KX&;ePtgP8~0`yv%j&0?<B|Yu*7rTJjF*s z!Mv3ic~u_Me)t9hu6k3o1bEWC%!x-`hO$I9=dI4Xu_s&k)_}qu*kP%T<sm@v-f-<L zT0}nWLA7x<QA31ioDIsE;7xM*?#%U`N;P)h#fAV^#qnzAOOSp9a%e3_dMY*_DkZUo z`Mw?2^W6nhNCf0A9R_MMa;=%*N~DXm*iJZ%VdyPv_x5J9v2UBk6e~PZhMY`x;bHw$ z-RDa&yZ(#&+yc`7<UZdRz%IUROg%flqnS>isDa`?=R2X9<UjvHp-ON6#W)x!{O7?e z_nk27k`8(+#trJCUylii6R>U1pg!y1&c_#&RF~olAgT3HU5bi3pAN$g>T$=Ds5i_Z zxXO?SQaVVoKH3kNV?x3?TGw5cKD0abZ3d~+{6y*+>GHhxR;b?p0J-*^rB6E|q*?Q1 z2jtGWFgC2D3;)z+HlyS;zh?p4`tI=9_-st(V(cJsQC+ZS?^K-r6F^X>Sj$|-@9HsT zX8hkzu!g>b9(5Q(EOdf(^gm9p`h)R*ae~z;o~_>X_Y<tGb32ObF_-PzwaT~)uW|l$ zf>n<zAayxKb%I;t2>qT)@8$@W|6ZHVt}+tSap|>lZ2bG#{DLyJ`TarsE<GE!(7=9v z|2@9Yf|IOy?gu`1xpr$mDAw?^*W=vE*yZEpbl}kNpJ!GH(ejzq!-#VkT8zm0AJ42N z%kgzaI<w->e9jW~tnT_at=eMt&VM|yifUtZV9njmF7D~eFY~1XD}LuqHe_!Szx*a! zyf>ELyp27*H<<q_gT21j$RCJg$M>f5%Ln7M>g%wTpF&gJkTo!_oR=zB?w>dzZTXYk zt?j`t{d0HseO?+}C`@U!$6lGiGWN&m0&wR@c2sqeF3eyL?(dp-7xN7(e=0fGuu?r3 zPJ>H6wE7OVsS`I~jw^>&*VnVX`}3^DpkiJ~8Gx$ee1kIUR;Fhzg>}1P$$uPLSwQ-y zSx1hZjnG}ybd;rx@<4p85VzY9L2n6Qsc?3YzJM*PDxS+gZ3#xyUp^7$s419~5z-lo zK$YJ3i+9j;_U4Dt`n7aABz~p5f%w}a?EHsA*z>CmhOgm4o0qPefm<^xW(!5P-~gNV zI2(I1ivOvAEj`(-`znY5+UZZ&3K5<-{|=Vkp4L&3gFs0&G`d*fyAdq89<Lpo^!4BU z7;;-66yYN!y$(FaZk~+i2NkfWQwheSq_{-$JyhM#jgHfUr8`qQvJ05TN>0TkTn2?& zaj+xCnTKljP1m(Sg{<fy6nzR+>HZjd|5S`&r^;O3!7OQ~ver|5`7Ko};&h~``(rRO zZEI8fxH4Q3%uNYU-^5)J!f`jxg`bJN?J}qM$?rr-cW{vb7OsA@==T$NLjzofD_z){ z(+S2vlq`q`>5Cllb(}Ngqr}9^4Zt0$__w;SgQpXtL`q(V*Vd1=?tvA7xi_Z@h2xyU zp?$NG`8MZ;coD(&5GoT5`?S3-le@6e=CHwMZBR5-zmyeI6kfd##yy|mP<nn!_z=QQ zyykvNW*T}FQGz`McOb6W*x}|O{QEZM{;AQJ{4j<L1~PnDb(R%E5nX~`Yr~YbyEogk z(`?NA={)}5kFvKvo!2#M(qG2ZiHTU74mpbjaZ(mu$!n1=rLdGU{Q`HahYq}q$OaCp z7`}ruk*zwD?DsY(j@dELvgs4q?la5ss}1sm>1<n<!0D?}uqcuf?RgkB+gygvU~bsj z*66FkW%><VJ%FHb^#BGIojAPsI12d{mYMu*1!THr0<(NpnB@+6ktJDVrmq9y6-2z! zgp&wZm!~C(#C_gkL^C7M-UPFV+Y3j<FX+2<%IzH9z=A%Xt^0O7RUXpgq$T57#pe-x zMl##}c~rorMRWv#m)<ru2<ZbhEMg}<PcRg&p(3%hP&Qo-!CN}Af_Z<@Z%8QE=_?iL zRR!&>g=n$Ml0$+vOYgo-a@V@gMKO!sqLHwAwY23;LM)}yg>h{47Xvc8t(KXdLP|=7 z%~OIP4ycxcH_D94g<p-rv_@sxt}rcH$edsF<I~?@UC$=-+J$V~*&cjgAGY?aIeg;+ zIw10*7Cu9Xk@Z8)<;eM=1?>FUaeV*PtoN6ZS>9kXr@=F9DWgwlY<S(}`UIK>y4=qL z@1<Q{rL-KWCsF)z#OJ}&3lFp-$V2iS&crWc@^@hQ#VGfO@{yh!+deF)Lyq*K@$16( z5s|nSfjoO}@lfjv_h|iW|G)-)l*}ki9)qnXu|3_3-TN|;mwK_D=Qf8puF=gd<@#y9 z*)b9ye#PKeb&Z`r7n&mYqmc*bRUpR{>>uzMFucDr0Ytj6iW>Emh9Fn<##jnakz@G} zhqA1%;%CHOfKI`2h@Y@YNW|UJ^ACqR&r7%RKuy(_`AVmOHE?t%U%;6qZbhS5h7dkL z1)_Yg+en|IdKDq(q}4NVrXq-6fN8_;tnRCj?8TtpJST+XBW;)9Mi(%X%i^Vyj$EaO zfXX(1AUMh04uhf6ACmz&!9+w(1Ab?bUq|t)*Rag5Pg+Oic8u9CN4s3LG<uyrqIUXH zB-fBbUiM=sZ?7+o3!$wny}xc2zom}|iC;^pFyh4t@}<OskK(wf4cRCQh;4L;vDW2+ z3nCS2>c+t=i%$Alj*X#GT2fp$*n{8nPnz8eb?&&hI8F%_();doadFy*vbIA{9G8B) z%1(aMk3ab>(_Dz_wW1u8{U0ta`r?kzf4R80_BS^ELJ~jbTUK!)&bp5VW2?tvj1A>G z9<F9(F#GJ2XD^4<{)RShCSD7&1S#L1*bl9fWpW6-<Qj>$bg>k2(&j~U%!&gex`7ai zkFj)!@^}ulsF9pX35S$Vt~EY@P1+eE2t8c!*S+PJQs^CAFXWJlO`%jkg91lSe$qbV z4$9dAcJ6$4-r>tM--N|k;sWSoC0Y5zl_gh7ei7eGRi>4off|uq-P!322IH;_SzuZL zXW=3AwNxYCGi#p~(g=aMUre@E$J33Xru`@>%M7h%bQzxUL8*4_uQR9~hXvJpCF(B7 z1iUokN7chkWy6Qi0CLk@@d;>%E54`vx4+V;j!*h!;foYkjPxZ`F)vGsN+LfDwx;+Z zZx(y=qK<b@V(+zd_CzdB6B5~P7Y)X|WT`pvZ+w&kT6}q*CkcF9871f@h-mJoqL;Ml zmH)y@g~=Bkq>wVEyA&FE&+<<kv|*OMvk(X0U}3W^Me(m%*u$4%c$<ZNd@0hmZ6$R; z&@st3k^OinnoqS*?fUBCCeu{yhRSrbq`i&Q#I)c3DovIG6I${^{#c<5Qye&AgHfj( zIQmHSY*$NU@*+4O{##y>z4#xfBqj^H*Am7@uVD9ELV1rBsCsB8Tngr-pWuIJ&u8zP zo|-_7byHt*F>u@~Dy&uEQ5CkR@R|xe`YHSoDoj=3L=~2(aIFeAsPG*X9#-Kw1zI^t zCA6t90N)R%UyKS*(AQt^+poemR9LOTwJP6w6>e1FTPi%P!m}!rRCrg0IB*E*uM{v) zg`-qBONHesd{Tv5RCrK@XH@u|3folZF+j;TNQFif_Eh0Of+(L8RKjEx7OQZH3LjSC zMiuT<;rl8)s=_lW{6U4+ROmiXsYrwh<5bumkjm$>RKj=_E>_`VD%`5V_f>d8h2N;~ znhO2YE(ud%PZg%BkiMiyzv(JmGDwpw6CPFx=Ts;Y@n@piS3^`-n4-Z)?&Bk?W4J8J z>fB@nRyQfIZ9Cx8(Vca5Hx)u@EFh9}@7DhLeNu&acy`8UxSfGYeOe}H<m@UY?jHQ( z5}ma$KnarxBmdMeG?)C_saN8m3gutE8ip|-|E~Hg@o-PbziKrcsD`uDFw9W-*P>JS zu?e7`l`B;Vp$dUBs$pzc<linoB>}cW@-Isblaq#iSACUuT>O%Muc_g#YPiKmi8rd@ zQZ*bchbQ66k(a`NwNw5Ts^QgYI7<z$QNu<xd{+&_J0eTerH0!)HM{Rz@8+Aq&EYI~ z7IQ{!F1H;2EyZKx7H~`OcQInhfE97`<=DZX#d9k-RnEI3U86Nv|CHY(FwWr~K%%)| zvmhfQ5-vxGiXd94oP81S75LAHtXIed%>&*Y881e-JEs&vILAHnq^6)IRio)tqtR%7 zj?DOPHJZ&8Ba7wS5UybFj~b1h?<Bha^8Tgb_2zB2ue;BF;L%4lHClJgiiruS^1qS5 zZHVN!f8*)jrqp^r@Kb*acM5ao!ukc58VZAwDUAkf^c~Xgsq54<8XunX>F1GLv(sH; z<PR^aIq0q#71+*Q+k@lC-xt<a6Q$JzhqTkndve^s#J@_esq@!#sVUNGIvLto=Jw{e zrFh0rK{ZX@n(*XKK^|OCu@@KA-;)dK7f{;HYUsytad>1_aBDnx&SN~EUz6*p39%9v zO{^X%99M^DzFPBBz`y@nSmWc)`Skao)|7VAyK{P!qVMP1QA(cy@ap1eFU4Et_vU=Y zdvx%)59GKP@z_uXL5&|@GuA_s*CCLYOpY5igyWuXt9i#m6BU1W8OOC~!H7>Vxx>Dq zpT?8tJT=eB;wF)6YI^u+qHDg>X@Wh7zuCR!vWF(D8{wr$+exm7r(E%Nkt`p_aZPwW zYOCq#sR^Tw8R4mkl7&Hep2Iw<gemZmDvVAfvVMF=HxHlAanta8*&hEl;j|0X2Ub-O zo@**k58(ND0&h_(P_NGJTxSTeb3eT-Aaue2ZWrdr1w8GEegocZ=3j|4f$m&je;pUN z!8c#-M^vgN!b=m^A;KMn9QXDdj_Y-+X1tdshMKn0OY=ZS{E~$n_Z=RH__CKKDyF@< zJr{G_5<KJ7>iv#A>f;DMa;v5%Mpw<ZKAND}iWu@c8N4`y1y8XjXTX>+^y}2zF4}oZ zIPPUUTh)9t;XYirC4dVr)^p+g{kiZBLCt}60r?ue2d6Lg#yIlg^y7U?NhCF|d28Zp ze)QJ#>8NhS!yNZM9_#IzcQ8e23U!)<j>xw~j=O{hk^l11^sIrh4oT?L$%E_E--qke zPgmz#>QmTWvDob#Hxkc&wPH1Y_-OKj+hZSp6)|{P5WD}?n(4lpuqb8Z_;5bU{~E$4 z--U|C!{4siqEklB9-XF}MpLuRR}(AGa9_>9?lkyleN<YhM=Sj~{rgHQ<w5JubN>B& zY7+f4p*6XF8n2Fu{Bo4ze#b*xzaOnx=%)$mXm3>`$8~PvxY4(3p7qm2^=_~H4bVP1 z#&HX7lN32FGzv1^;M?Ne>{+KRbuV<w=aVrGzVp+>SgB^la11}{!~VY1cP;wn{}ffz zmmGHok5$d@e-i0)=OBMP?II0L_T{?vFXp;xrgH&T{agH+ed@eQJqtbZIgZM0<hjKE z|AK$S2R!g71=rlyX?k`v{uK!?T;thvyJoPzCMuO0Pm1eEEB=8TJiFD(E(Wf1a@=QX zSYyBd?H|Ay-uExn75e4ZyymaT3=j0;0w?-lE&=xQuk$PQE!5ycKHg9=&+0WsSxm{e zb{U$)b76>)I}}DmXrd<<X6dg9>z|?t(<Ezr)uxqtqs#m}I6sU$zwt_=HC_F<u9i+* z*Wv((#h>fCJmhL{i#q&6e7KNeJQKZP#d>ie{eoIBElNWN>9}r|yZKzV;yYZo{<pbq znm@R|k|}bm#{NWBiNi2pd(%i!e#x?}rQym|gP*9Spl#w|S%v<;{lfp-FZ|!!FZ7WI z>Hq$Ip=NoY=2$?nX2jCv56-dV&0Vo#&b+x(Y6f@KeBDRn^5<CQkDvSC(v@>ZELc2u zLh0P1DO|Q@f@L}W&zZu(HYASWOCNk-!IHUqBZD-|J+PuwF>RIE0#mtkZ(x{askbJ3 z@1wCAe~ne}R2a-E%u->73R6|sUxmpkOi-awg&`{RQem5ilJ6B2wy5wM6`oU|mHS*J zG^_B03L8~;ScP>e{78khD*Ql&yHxm^3OA|nNflPAus#2^au_>GiyE<5g@r1dszO19 zsVYoXVUh}sD)dren^tK#J`5`_Y3-uNtN&S~xFi*}5TqULIhD|?!bTO=sc@GHcdD>j zh2<(NRAIgfQ&pI(!UPo>RT!ee06;6%$V(+~Dr|FCYI;?Lk_uZ?cus|lD%_<)Sp0Ge zt5rw^Q2rHaxKxFODiqu`!(@h3m0(n%mkRH?DH;5s!WI?Qs&FU48ly!MvB7PuwsI6g zPe+IU?;tg(gL_Ruf+j+Hb-bLcbHBnMrL?Od3M>s%U^V57X5{B75T0jwr&t`k_h5qN z9XD%Rpb{aeusx#(f)#qg4P}4-e+ps~#L*+y9Hs-;LQMdnyy*W7w5q=!)O^>f`TjG6 zmiy-jW&Bf3sd62^nr*!_i>w@0aTv+mPk65O!Ut#o$9Celf*$zSdH|p9jQ5hjvk`Dz z5XaF!E?5ER#M1))D}Vt87)PKJ9MT10;8}pVp&U0AIKjpUSOmZcp20)>S0f-aJvO!h zu}BQ~3!Wh0R{@7dA%pH3t{Cu7Jo%uv0S@R2T>(55a5Wy{Sqm6xRB$8Uz-Vk?@ehEc z0yg8J!U#SVqcjYzAP!z0q{YM|qy`T$Z34U_M`*aafW6|BOltvO>;?g&;Fkb@!}AhV z2r#TW$L#_h0XU%tu3rJ42zUoiGw{2Bq1acShzAegs6=!@9HavHz7>C{H6H-3NKz_c z0qlwHN&r(5U?m<iaDv;>%ZC%-g#!E~8JlL%uLAn_RSF0IJcws4cn$*&!S=fXJXwHu z@KCqj1-y?9_Abz^Tz`&Rgonhq81N@NR}morj=`?_F7SN72JD)RXh9?3*LX-;-vD-l zF>D5%jN<sg$cJ!1Z6+!Q+zW8_5Hte#e8Am9Iqogs9{~P16y;M!5&~1QFb>dXQvp}v zA#v3Kw%{Qif}Bag368)+l`aIV#B&b()qt<zX#u_q@I5>~P(Fb5cqHJ50iPKL%N_Wp zVOEZ#e_`b=0?mMj;pKDh1}4C;F-l<(fP3(eRB8duV-@id>^~0O1b%|Q;t_yf1*{m4 z!q6Cke@^7M#h|wVo}J8b>w&`ui54S&D_WL3mE&HYru5BQfNs;3mbnA2##4&~>jB@! zLz379I2Zf*bD+-$yn=@+M=+p3NlS3}O#FkKkdPVh2p*DFBjC1K@ZJL7326Nae<<P` zz-ub*Hyb(t57F}hZFnXEuLS%U4;5AixU^8gO95STIIa*psSj}6EIgF<Nx;@(oM-^Q z0@!D+(tuPz`r-|EtlVNCL*{eb2Vlwq`~*)k@H)WZ3l!W8*rek0rKO2*BS_#Ocpndk z(atS`wD8afUkliXhblqv1Gp0sz*7fkeh>o_IKkr&T9v;10fA#nP&-7N01Q~Fw2a^l zJT&Fn0JD~%LMVXXEIc$w3jq(|p~7gh_AMUjrXK)J%Q@~Fq^$rvh36{pX26*iIO>3x zT7gV2LxIS&0PvlaN(*)Ye)j(rb#5V1L}47Cl}amveW|tx>Y>PPAS7ia;bT$M7D5t1 zJw=ew8a~){4GJVzvIsOQ+s;_{QdwErg|f0MWrAJSh+3qJ-MnQt3+ngZGotYK_dnl! zb7tnund9)ygF9Ibi9dy1yI6Yh-Ea&itpfz$ailAb!XA`|?}O2OXeMzur^M!D!m$x1 zCpiJQ9EEk(LCguHovY!(Qailau(HBF5l3OuVV+%b#N0}I5(RMH5h{4Zvta&FJ3<BU zH_}4%7d9NTBPwpMqJQ#tRPp>@prMnfgWr*^jFP<LKW@DkkJ^qzzd<}&;v3hll5zOF znpKr}3_dz(*MD)yDSKcs%kyFvS~{Kezlh??O_a;dO)&2)f5GwM!dhl8J{xX9y5CXw z59vgmbNKU2Fmjv=3^mfz5rlW@7zyGnaP&pH9t7Z8&xc?je2E=M3c&UT$LYhrh1)JW z&V(5Z5qx=tdyjtwS6^lJ<3q6eIx`tB9=yeg;}1dSw*8wTj%c#+RJaLgq>Ets9WIjm za5g6SE|Y@BB=`&!<74m-is1jkarf*MFNM1vFb|0H1;uGa>hvjm{gByCJPv=N#I1zR zV-_MNqqr9-zZkA+=KiaZ+f1^=kcu0h*cqD#dp+L=b6#-2=}4R%qa%C<9NEUz;8WpB zq&{=tN2GP)3%vT)J`va8ig)%l<-FrjnfsoZ&CZ#y1Zn38ocF;_x-9q}b(7Np*L2tu z6@4GMI^Ke2!i>)hD_;Edg%=FGnEKVu8}U5S`cVfr_b}>Vl0ulzNUu{)FP(owcyWCn zlMBBAmZQW7!Kpteyb;fU1xRu6wC8K#@ShxvoK(0QX(8GJZ=e`)zIQp{c7ADReK#17 z%19FR?V*O5EH3lnqN?=D5milBj(Eb0iz;3#M^vF&UQ}&aUL5s1;lqil^eo$;bZpDO zN>Vv^s~6u5Rn}EpRM}5nZ1ueAr%C-bE-pbjIZ@?F<%_ED$%_^J(`=)HhRS%_AU1nm zrMyn-`5^eUK4rcy-xoGage2Z9hH&%w>8|FLhY=iT2Tu8%nv}tI;~9o)=wLgoj_p$h z+bwOh{e$hoPvq10K!5R0+8k$~owTT{zJGx)IXICQ`T!eROw7bhhjH8#H|XZNj-UHR zynJ%Rmo_ZL1dNkizAWNvT^!`KlMNwLXo^g+DKix&YHCcaX)ulEzEMXVrrZ2Vbme#X iA3|=Lo9<@1ixM5Lb2qplx6mze>+?%}`;!i&C;tabtURCq delta 28445 zcmeIbc~lfvw?155(9F<G4b9wOqX?)74k%(kF)0)kXH--aoKaC5W1K)660sy99u39} z8i#12Q4>uZVhlths6>q#=K+T|DQ%4;PB<j}dv;Z!dGCGi`hDM8-&)^4mzCqQ&p!Li z(?0v0>J;yDC|=`G(M8knZgTgcFUC|nzh(FLwk9{H_qc5m{C|EtXj`p}1Gjx7W6rjE zVEx=F+wK5w+1a*zCG#^?`8*YKW$ZsOcLJr!{g=PY9vqkVrUSQq^66m+3X+2xxCqXb zcj34z@NQLN%xlZMH7%Sup5yE}&T%R0rs)_PqT#wwaD|3jp9w71aP^4x;;%u&<!d<3 z^FGG~vuk{;wt?f;@b88PvIdP)hmn^d=m|&JAklyR896Se^^CmH=FuFtvI9a8lS_oV z&RX+fZR3!tFMNT(fBdCzTvh7=C6-(e6o_+!Kx?)Du~1B|KYX44Tdb7UGsfmm%jLKf zJA|Xya(R~SIL1e7mf8C|p>%?%-PnvV`>3J~eK}6LspGhe@-$PNP%h*M<@&1UVCMRB z+#X8Yrmwiladijk>V&dvlao+pHhB&aEO(5ewnc&lX+0IhKN2oxh=()EyFJ(KKanmT zmNs}HNwHQatulv*T3bBH9ZOBpr0deor`xL2V<4Q?={f7H4b*tDL0V%*5;8^JH@iW! zymwrWgqs;<W8)-WSB^^+>q@H-W`u}td9`%nZ<|dJA%imW2`8oX=xXvy?*u9YtJeB8 zpAT+^EH%%hNfpng?zTsD(#4;o7Jf1c;t5G&Hm#w^x&-lZZRs(833ktJEkdQgMQ_@R zN2TxFl@wFU_oFSJl_KAURGsj=M?#f&R&rFcND~jFi)T?oX^)=dk5STdP)d{><<5Hs zCTbv_)2*nMe*X)lFYjG1h^JC_Q^!k1`AYs2G3IX+L7Ik<7c;+sDit&T&cb_C@|W4Z zi)vEMnrd{GDwTqqmFz<QExTf5*QY!?#@0LO@{B;;E^QEk$E9|%GrRPTxeDgYKL=Xv zdW$#Y7Q7csLm}Igk}e)h7l$XNi!(va71G5UsiRUyrjAI>9?>)s1`VcR5SM^9`J;SD zYoTnODMxzG8Fejh<=UO?atu#wyu@*(Rl1TARP%cpOxF1s<=RD0NoJsFf>54e%95_i zy+&RUUh$mN$BTJ7wFr1$j$9a+Kx4&}gNE#gh|)<t%XISPIcGM`sV(2{5_{h%+M^V* z;<?hQLN}X!;wAR8Q)JI0=^C!|u-Pf$q)q?mBFF8f`~}Owr~@>PJ*l{cK|*=HsaRNe zhb9+h#mF#>zJ!B9S&srie3Ec7;jm4A`yzYQxf%cUMK;+v*wOI`iqQEPd)qmrC;_RW zRL>iccdg|9%w|hCNmVLHEAKWzn%@T#=#kB4t4T8zOhDc?{V+MgAdG}sX%!9Pa{W~$ z@`<N5+n$FIw&~j|LbW7Rs0dw9gf^4V16jxuLT!ceK_^HmQ<3^bk$MwSLb>}!F!xfG zYkF+qfHwWz3v9Scm!iHG<Wg5aIzy~M^iV`d6SJxP2b!`3@n~9Eh>M^>j5HEOr{vi5 zWh$9vIf60<HTkF;Gi~~2N}72}34VHFvmqAv2hL;Vc(HKgQ>(M>#cl>cyp6gKhB(HK zO&^Kg!-P-$jup6u6#XKN^`>D{7yy|-k}2=yWYbI3F5)>%q{2F(^rG1{-R`m={(b8> zb!)j^1HshYv_R54)HyZH1~2eSt1`qxv~u{OWJ37@gKn2@9L9!Ms`!i4h^0zi=#a1- zW>66li$o+uk^a>Pc}oqi%Da*LVaP*N62z<H(mJ&;2k3UW68}qu?;=<Z@CayD_?sHA zQVuXXjY}(QMuCEOT*}5^QD*C(k5OW|fP&aa>qF6WWIE2$HKMTPI7?E5BJ~%fPys4a ze+q6&%5sRO^1T!^q2x$~1Y1H7MWBjCQUSe`as*Ihs}3C9u3G6iHZ3gDZd5ScF1vB1 zRS|{%5=65pQz#pOro>Vpi1{W#5Nj}ZjI;oZI>0>N3k=4j!3Kj#Qy8R9ni5k1I5z$I zv+OPRup%2g=+*t`3aVkDb4Gcqn1@&cA~MP+@K$f}3>BcaQUH&1%a44jZdVTuMJT+P zQQkf_NyB|d3tz(Fbn#4@_@hn###t;~oszu~BAzI8g^)8y!E&1y%9omQu%D0XQDz!^ zNV_ds!$oCEXC61Ta-7o2>Joe%(^4q6DEq;4*2g2Ph?*Izd%518S{mK39x`c*TL@Ts zGGm@)hzG<odv5*3acSc5vV2pNP?lyg+Voe>$m2}jo}NBJJ#*zg9FNFo=Dc~CLis#X zCWb?vDbq~DPCO%Z!*+`%$3Q{HVFVRQ<19t5G*deIS0q9swK-C-dMHigtwcZ{(MWWp z2KsfJ<&|(H#!WTG11uj+G3q^<di7-tnyTo_;~8anrWDEHc~jF|qGD&z_9Rigw;~zp z{}j1J9-&3<PzDHDj*x_^bjx8IcKCGhukuWO+@ISp1%<~yXOyQBSN8wKdV97iia>85 zat)<Ui8rR|Rvbi{QY4beKocfrK$D2eD0im<pnb6)qI9HQpmFm}MrjRgC73HIn6y|# z>af{FNj})l$n)ZF+&EnrZ7+~ba(~yL;R?Nk>S|BVagVEQwnF~?0ipV^=dDyf=I+(r z{r51ehnydTrMY3OuUBM_eK2Kmj=D3G2C2LBxh!LVjP-kTPwT=6sq#fM)JR!kGEl%^ zBh<x;geq$oRGl8;K|#?0enwo>7YnI$NXt%pwe^TW{BJ4OGJ~`$l=*m@_$Q&PlXq-9 z=4QtMq)cUq)q?20CRW4IekGlf*BF0kbEsTAoBmU9uvQNUWidXXVW*Iwx)ug?N;9RH z_0j~A#ISOeMuxHpJ}vkWt=PLhq2c|)HJsT`EUrcNrCJ}TxrI(rN(_X{>@a!xV0OaC z*ZEE`_|8iR}q$$d)5i)MNn%<_Dvd*5$`F|bQVW5}r6bxyj`iv8w0Ds&0vS>fxw zk?kCz^l)LIGzHD6Ht^FRmZxV$U$(>yMVL{tCSajODLH9jTTnKlYLG7Ky%a*&jHv48 zPY|bp`XaWIHU`?U*@K$$31TB3ck5<pP7q~6{UA+yK?>p_tG9FtQqr&&%!Dd7%TKTS zQK#V!q{!_oY7;ZrD!=d|eWr$!xQ^5g+FP$s8LJ9S<(W>%t1P6kF9L6LkOS+~K*!yI z$k940;V=fl5Tq!S`Dz5q4{?H}+DWh-H9Mm@S3}tPE3`w%^o2~1a=|WLtj#Fb_ej=2 z`K%JE8?}~eVd<>1f0F+zopH`cULS=;&a9PwroxEs&0k?l{S6*lJdi`u`59e>a&4S9 z`^rC>Z%ATS`~w2Ku;bM!wGqS%mM6aReJs_U(*7joX$UY}J*I5A<@yjgg(EZCBZW`A zSgIj5dK5B5apMGA<Vwg<TzB2x83r8^xYdmU%X?Db++(cL(4Ft<#V#3w3_oE&Hx1vR zpl?k>fu(g_nJ%DBpm$fy-rRb$-Ye1^5{yPGVVqAju&jV&zKwxZ2K3<VyRs_*o%k>O zSwLV+h+DGUm)_`dK|Djf`*s)7->h!ZSbsJ;(4=|B-V79-YEr1U2NT&J2Ygwppf>Ik zFp1RR^-#|y26g96UD?*4Mf}d5EIxR;Z$M9MIpU!VtX~tbX<))w1-2_Vgddo~&IG^a zxC7ZpYkiq1q|mz~(aoWNs|oJ#VVgr%yU+1?!Ey9qLqgkh>mZ<|8g1riOXJ_>NFf(G z*Ab!mi58md@%9?day?9#d@?KD&bJn7*Azbt051@EAOsN$qxSRmtTwcD@-ZkiYF5KR zT8`OD9~DN+$^bGD&-E7TXxu0|*l{TVRlzh6&!Tm7=`0~EmCp-cv%<o7OAV_Gi*x@A z+qs(F5-)Z->`?pYmz-VLRV}r)Tf?ZQTwaA|t`l|Be0xi-6P6e(#86T-((wW8xG{n+ zN?`Ymz4;PP)-Ajr?>vC52#@D~>CX;_NAvUJ*~9R`?r&fdQnPc=v8;#$<{udn8~lQc z+^mx<_4g9)mZcg}EsYv1C!b@n7XPwCEsjd9Td|zT1dp$~(-vAI_f1(p_DN($axOM2 zHRT2zw<w`j(C=I<&opMO<r!aiS@ML|TY*y+4VL55?@(>AWP_@Bz@nSCDEb`dOEh*! z0#-37l)4fN#vpXW!UUmHhzFSFLTU0Ku*$S?=oYEPKpKB<I7@>E5K%Y;aUYWwgKT>V zrH{<7k`5VyKzSW^#rkDVkle|iR5L3;5M9Svy1bEGs8a^8oiwUHJJq~jM7uWF5E3q8 zFDredTTz9SeIx^_)HHe4yRlbWw3*Zw2ltHf9L-*AkysPzq|U!%&n<JHCCFF$4hx&Q zx_t#wG*|<Bi$A7|r%BbX!paFLKj<oEyNt3HdmNF3v<$1Lc)+TsgN?CA!eNXfI=e{Y z*wq%{EI%qPJ{z+Vy&yJ9Td5t;9}cOAQ!8CmnnG(iZnOFbVy`HwtBYj3%Z^25>&N_o zb|>lfICUsn+HZfbS6Vjn`}MfZX0}hbXiXGywfYmJPr08w&Stl4TjYaMql^)j<a{3u z$75w{*$nGFb_P!R-q^JAYq7^j6R=V%qN%Fr-w_bN;N_&{FGMdOs<dG^+EyBcl+bf7 zdk~S6LLg$(+rCGQ%CyC8(6iz&u82H0z2SLls47evsPqT+OvyZ0WkI*#q$^;_s=t(t z(qMOSM2j2-GegwQfhv(vrkx8AM13|ubnlOUrJrw6Q;_<H0Y(vWDB+p-$LcROTINNW zQn1B>KM1TkX)^w;pmi=AO0?x*+^7SJBJ<jCc04+``DGN;rXTe^b&9qYjX;`6mb`%s z#F>{)#L}MK%qyl%(PD_oYG(!=t};Z((Q1){jvl|8)M`O2F)c>DQ2&9HQn~82RKaSa zy++YRZD%<T?Pf3Xm6$3N`7H>n(Jlhh*`BMa(ar;fIYkkMYLsk8;C!eZq<RW$`bqn- z&TH>LP-(*3!$?)P>3f2Y!5~&q-?WFvl6(k4T%n61_XFf?`n>NrZZGA9QEk&Z>}SET zdpi68^@lTuBK96ok{Ocd6c70rD~LbY^lQNud)cS%rn04rzogq8mliPBRxKS&NKY!- z(YckIhMyF=Lumbh#&g3yIuhE+)i@;$&tdo5v>lu;Gqp1FsPyzVn+*ey4s9;Fz4yGW zeXwy4l|^XK(9l2oQp6J?Ue*|h^pQnq@{!2S-`JeES3F0JqVnoW&tuuMX;a<U;kbce z8zX5=u)*43OiKPZQN!uV?@+Yd$7s%}QLIbbsO;G$Ibz9nIX!)QC<KD&`zIE-85nFC zVn{P^(nb6Gk5UX4oYlk=2s(=(s}@?Ib)K!})*HxjPZr`!Z-r8!YGKbp2De+Vs<wV9 zOE5lA%}}TpH7zJ{w<i*RQ%-sp{OK0cGtO#){AWsNBPj_s{rz1u6+)J`V1DuOA@eZz z1k%&xGoktAa4vIi@5#o-H*2@}DI~~zQd$oyW2)FF{wbcfJh5R3h{QT$$ukvTXk?ho z*noB~r^0$fvwiVF0dL3|ZE!-j%EuHeikJe@h3%{%K9OIwokb_K?|l9n8en3hO<x0w z(h+R>ogh&&&r(~Vo8_4eGx(^KA}`$KR0biTPP$gl-cM-nzxHd4IDcitkr@bT&MSz_ zrdRkG;so&)+U7lIOsV4ebXw{D#H{>=`Lvs0_eCSe7PNb9{0G>lXo+@)KJ`7!KvsLf zQsqfW9gvSEbN>;(t|{DC4p(*0^U{!i$P~Shfx6Tc0nif1KjBzx`{rzL`v`v6RyL=7 zuXLPx&=z~ix}0zrwxl@PHEjCrJ5ic4ZM7S^6O*K+P5&M^T^wcxq9U8Yl`Xodd(a5- zk)mKH^XL}O7yr$=b!!oj1OX_!*uQN0w{|G?YGp~j=*<>&8`aE;G20{IqEJ5HGqt>z z1CHef(#l4-Wt5GuA9sJVP@cT%F|+SJI%L~JYGLZ{0%~jV4>8?dwFvp{V2iqEYM!!F z-MjE@M=-CnR=mq_fDapxX5@bv#tPHc@?$TufFAAn1y5OKkG7q9p+U^uXcG=<u(@KB zE7v+v4p{Eugrr)GqTZM2aH@@fpiuS}1~me;zE9ZR9<PR)5oU>$$B^aO--VGi+V?P? zXrZv_7jI{wuZD)^zhDf;RI{G8JbPZ~D_|it)@U=G*sxbS@eBWCD_)J}LmFB2t7&aE zZc`@auapy=eZ=E7{T!9~qs;6ROoJKAA}77i+P$`i|K5S!do9GH%19kLa0>nj+4qPA z3J3VrXW3O@F<+R)CZ?N;bbr92Y;(qOReYOjlAcsYH9SYEz{kXBu-QF&vlaxeE5*d( z9%)@4ahyh1dKaGkP%AhnDGv>_y>z!P)82qNL+tKUroAE)Ze`l@M4W{NI`CVYtqi5N zJk`$E3%N&hyV80&d5%jhD-kB7GvA&@O-I(gXLw7jPPQ8B6|`yo1Ipit3g?~!S}EL| zvsAc{z7JSYPd{JB2I5~b(~(K)gCHs35&NuXd;Nw$v__%t9$E$^XDDkm+IIrk{hmYl zk_W6$uMhcO@3T9-#zDy+kP$7+{Dt~djG`92kxe}+Vw=UFK5X{0bi#37>h}+cL5hSY zNq71Oztu17$Bc*~XN4so`1@kCkPvfRdU#sS0Ovi~`>W!`!IG0MD0CJDm1}qZ3BWiD zwCu?Nhq69D1T0Y`0qF}8E}xsV-+>Y?*Ul&LPOmkXy#-Mxt4$V1lvgX2o@P^e8~Dkm z*;~Cs`Cg~l#@?y?&Ijyv?>PReKUi>|w*0n-tZ$!qqjjTlSi>9_-DhE}(_-mv)5mNA z>7MtHebOg_KVQv$?9+zdWMz-~bmI>`V4eGR_RRbg-S55xT|*0E{6<#NH@xF0c(FOy z^d3kTy@GYY5BDPt=mMMGn|KyjQp@bDj*{^tj5!x+IRE)6yVQ4JQPt1XO1%WCh*tVa z@ntE#^@?wT;(Jf=6)3*Nif^{!n<M)~x#{JECMt|IN|QNOp$zg&?g#$&@)$lYh2S{E zwq*u-ow`NER%g3|WcZm~$c%M;gmyLuqQT4mU|#*2@%v7&4*g=B=58SU<D);>#D2!G zxzra_^Js8U?NC_aF)H0?2<lsJV4M2&;NLpI?)7_(_qfCo`*+e@WHb7A3K{)5Ee}cS z5gW0{;QR_WvQ84Zp4Ik`<VUS#_xg8i(b-DPb5weB1O*?JW~t}`wG@7=^2Xt}E)Qe9 z2NW0WCv`_WZ#F=Y*#TNw@QPe0C-Ep$7-F;^iCE?O)|79#yKNmt*8-Fjo69ixPbI_u zlrks}IfXDRxTgVD*g@>5n7r)-^c5!|>FSKTf+cwfWoHgU#6Fa^m#>nWj!prGQ(>jE z={tC_oPnur=;B4AE1|`zMmv=}=$5p_k+~pCFX~E9Q!k0`J2AUpar%Uv80hEpw(ME_ z3HyCu*I3~b5;!NNksZCv(FTj1dly-twGslqN%n_+!m<Xn9sccFD!%)N0BILG1C`#b zus$Q!dax>bl-@M!_K><IdE)2*Z(;Kq?Fg*#R6Hf%v3w19Tsk0|Eiq~*t!6g|74c=; z*x126_(|8<mch+KZlhUo^3;_SB4&9XJLx<aXjFGga_Dt-Z?K_=4Ew43eC=p$*4APZ z5X7HS#nVzXw%>%qq`dheDh$=FNl?Bq99P!9CE5qtrcFQiV{l>7{SApmBL&(<MEUxy z7zv<j^e<0eWXUv2T_o5m>3(iUYh<RkRSA~HF0)=E&RTGghU<3r!H|%wcIfZ_l)z1> zpLGzc!mcA90)LVgVhuzAQeiYzInxvlCW$@+a;dvLjilxMB=s4_JhS{elv5)AwP>^d z$y7=^mRRZ))98=bh^!P|H-N3q3h?fNnhWAmQwk4+Qnj;&9m$FfI)seT;%5InXi{{s zQjQQy^7E_AYiLA3_;aqw@+OGqF*-cKz%2cUWen}k&;N+MJG6!8PG=NW%b|<KpUT|d zc4oCh+XNL7)AB<WEK-k^Kr9?HeiYr)oSFBqB))|+>pRTP_a3%%p?on##(=c@rA&X> ziM=r_&AXBUEHzF^r{*oTI*1`}IkDrzVmw_wl$*JSkb4i~GS7**3~$ZHe!w~pZ^rAK z*r?%&0g<bysuDWNX6{Y%4VND{qYVClZ6Dr-FI~m14Nq`NailqNeH9DNex;QbV>RI} z^q*8q9af2BQg$t^7V=$H6i(_UI=g&`WX0qQjcjptVPGzX*Q^Lye)V9dm9Li2erZV| zy&9SSh@q{PW>FF3_-*K@AKgiN`LP`1m`(rA`xN72HO7yR*+(OCTAljuF*0Ao;QuW~ z;$s#)vUkoPe+@T(tdM&j+kH#aMzC~g;LMpg8#Fvh#WBNE>U#zn6&k_8R)gb<$@0v1 zURwx1cYrXYlrzHMl~h#s-xTH_Qp5rISaH^YogF#UbFm+V4)7$CPpvd(6iXR3&Dj}D z_^yv)yGQl#KCY)TQ89*3MlLR)wX3yEm*eMh{E%#V&=zS~o1AX$Nm>opQ}n%#rMK}A zTaYu1Uw?yL$T8|>(4JTBRx6b0Z2A!^nDgi%y@GEb1Dvm7gwng_Aeg>1W<6$uP5;3P zD$Y;Rv?}zEr5Wmcz*DdtD?E*r_TF_0>(xjbnX4s9Q|MsRPhP<`k8a`lIR)7rMx|?U zYKUFVu8)r3SKna{V<P!}*IBDE;rxs%EOSiDj#eg5O@G=VCVvn0r^6MfKeDYl;gQun zy)64L&gvj#A_#UBcQX>U=|?iQeM~IB<{G;)rZwNVjCtgC_Ak*QPn>G>($ve81lo*~ zo?T+2a>r;UvmbKbbDsc(H>3Pj4aV{4Y&JHpr7@mV;KT@B>02ahg0IYAT_6R4fRG`Z zZOu#UeBq)jRG1*00F_o|w6=srCF+nPCi35fA<|xOYw&6Y8;+~=h3t{@?tGCM$FAnT z+smrQHd_{D^xSvBW^1x+37}bARIIL;m~!MFa4}NDZj9~8ixsTRxFj}f+-&bjSSH0@ z1|hfMQT3uie`(D5W#a?+SI@KH@u{%~zN22Lg06qg>>+)v_;TPYbj6%18!I=htZlX& z%gV;L<v$+G_Ki2SihYJf=vo+=)`iK3vjjW0AFPh3@WG2vnp2&OX8(kf;xX7ed8s;= zxlf4Vn+@L4aY7pn3(RdDa1PoLHMBwc0pne5Xd3$Lmxx*RpFIcBw^@MwQX=x3z32WW zR7|zmkB?_MQ_E(V;K<Fsl$yO8d}W4P#YA@?b+p{wGM)`LbxM8!1rl<F?WEhFsO6*4 z@-1;u*hXpkQ((Vn`a`luZu$Wr+0P~;b8hR-CQWp4FMb*3ewr<u*pXkF!}d<h;}<<* z#z_&p$61y(>0{UUQ&943Wr>pWDRy^KfN2plOr2#V^jF-lWQdKpd>dbBv!z$tw3c7P zdWlP)7)P?`$wAJ?p&cUHrjxAC<VpQ^-=!%nU&#bXZ4jiIC(~q-Htt*X7_DM&A>pKK zRlbUC0BKjeNAb?}5=)RSS<|xPQWOt!$xqOXWL@)(_;*ykUj~9x?JfwmA9|GMG^Q<1 zi|dqH{FkP%byM6W8F8IcJJ*^+a5xND?9#+mB0ND<iql!GL3O?_FTTaj=9}D0SD>No zu0y^NRdbrM4EcR9CDgR>O?fc6z(Q3jQ{y_#zhoYbefEpxHk&Gbd5S;8A5Li*|M@Un z)XdLvLHH1an{`%4kQpG;7B>@Y2QreTNV-LCP~og-Mmev{sX2yJSe|d{zH#lQ+h3w7 zfDgO2kJS1&tDV{mDc_kI?2n2g{^GQ@oh(o73cZ%tp$=5kuxUoVY$!{e);#_V3)XwF z9&z<HJ(;$#cJ4Wbp>))C>j<=!qyyrJdZ@>LEVJ2qDw)4O?IWb$u^?=MONm@P51W4F zBBFGq@1eg?SzSpv2-Ly}(UdZ4xJPMmPJ%gUuM2|4bsBWeoI;NUmh?a$QmHf?3hKP1 z$N7JFJE`^EUl1E{1*auBE|OjHF0Q~S2|ue_6K~eL%x!uQN)|OeKKIBMsKT?qQ*~*V zV7ne_)0ZzKT>{Suq@p?+W3q{MxlCeoako;TELs|;6xxedtzV-J-bc`_STM2B1WJ?P z!;npT8TIF*?7Qiy$$sD@T!e|`?$M@HZ|z5V2^UKb>lS|mDk*u~?=&&q1)U-OmLWb) zul^g?JIUjJXGt@n4cibA@fXYSX+m0u!2b4R5}Q9GyxrgzsY;V&Idls`$4^E-9YDD# z9Fg0+6x(-%otzQn{uB>s)Yay#Bg}DTqQ}?ZG%cv9M_BKf{vlgpklWD=@dq5^D*mK} z>XG?JA?evnH>vOlTR1bUNPUzy&*YgpkS>y;W=V+{h-je8gXJDCsYH>bJ6m5Yu7&n! zF(2uI!jTu)*LPzeaW-xLlN|TBy8+fAt}r*FJo02c){5N5bW4}#Tw%-}GD1ls|Ai%^ z%>Fr4qe27g-eGFUI;f2ca}73JV_0%c*i~s~&Gp1TI2AFMpAvY>J@(2^$N5!N%=4Iu zbuouE`}#BNj?ZgswjMaDmGxLd*EO+{6%4geX4kL<=9Ya@m*N#m=?0XJ_SDr_g`wm= zKLRB;sm2^dS8~lVa;vb~7$AtN)>KP9c7i9`-jl1cdfSy8rcrBkK?_3Up~xPZThg3+ z)Ge{t94obZnO!xs3bP;URXCcj;@Q^10KT4QhYN4JY^YPRE{|j1&FaCAtYhv!2eQ!D zLo~^(%j-4#prg#=<Y=d^>6SxI(u4jxrp_Lx;q9-mtEYm49I(gFKv^j)Rk#1SkoyRA z-EGrrXEXOXDNYFpv+2KkosB$gaB`JBAHB{NoDR0%6^k?R$ERPn=Tbp<&JD0{gXcV~ z)!ab)A5*~THMb4_cOIKJH^%-;ymDci=SFKR?CLM3ByR-M`wePWvB!2&V0BPnr8%F- zZJkjzXuCMilt%Xl6X^ON^;>!fi`MzNgtea+%kNsv=ADUPrSrZC_^vOe$s;5XLl+GN zWKm2>T~a4~$EKZ)wqN$pX4|o7zJtd7G`4EB*G7NGRxF4dxD`79DbR(UQl%V7lbLSz z`s4i!4yJUG2c7!fOq8{e<eY5!88a}}`GhLu=Zx~MMm%dBovmSWMwee@9z`&jfNL>w zQscfClldJaU3z%-%%tSkuX5bX$#|HRA>K=`{tHvM{k*H}?V><UJN8*oc!y;#B1s!T zA{}M2u0?4vS%ai^6|OY#rQoA)mlYe>hfCYK`^ZdrLtgVOyRftwU-=QUEuHN8sciQz zY#~kgmKBy~2Lv6)>>+~)9z@uE`=V(o_OjdMYx&|@_V(K*zrbl|q1;APb~bJ@<*Pu+ z?+mMbJKX!mRGPQu&Sc_F!NmW5BzyLD2dAs6FspyGo?#)rK@PB3huo}Y!`|s?NFyZ! zChIcooEoHwnEMd;b~W4nPNcz+0?QnUJ6nEa^9zjy{S|iioxs5M)ymM;Amxm*$ma0k zPO(Ni@iGgqNboD(h0`ELaMdBq`fcyaGCL2kNfk!^!c?}jqK)YjC>Y9P4YOB<_@n#) zrdEmxpk!f2#POzRD!W?IN>`7iNIp;vg@!-#0t<TASo9@$@|6%-6xNeK(gAZQ*3Okg z)9_fGPYqFsU3xw!+;s3{vXbfs(K>w<>C;3{wcH59_^`V`k=%~-o_sN@+o7}oCs;Jh zZdfuFpcXXpFtX_C0?#!L8b{#`DBJt4p-*cH#Bg!|Dm%`th01Umn-o%d9>ZybAU+es z`+~e9L@oi#@*6JvZCSXFd7#^$k?3T(rZIO!M%@N8<FdfkpMKMn6z){P-jCXdH5go9 zAb|2JeIX!uf5Rp$Yt7H8WGk2T<CB^(>#{(;?_}o4n(@O9?ucbaG(}&WLxEsbe1wsY zer>Z4a}xW&#lj%e>efyKc1eaw2seyJrO!jr2<6#!nk9B6RfWHmzoyws>Y0#>>4Xbg zWt99#y^isK<_Nj=Efkw1`Pg?<%17+tq~!7fjk&du`@3@O7KGzc9>rA#(ae{F$R&hT ztS|&GMG};E4zwY`5;+fEbae|@m=2xAFfy=9EBySff2j;5Z{#g`DU#Z1=JZ|>KXEuK zd+#E@bQl{~DR%to7p1n(_9C_N<T{LK#SD{x<JZ|k+WoZ%x9JCtr%XdOKw2nEcKDt} ztsKH%KFSuZjPvM)-96QIG@%ObSd;d%?^m|tr;lR~R}S)u9tZJU33cx$pjviE+3@$H zqwbBBN0}=f+>0t`6L}yH-S5GtS49ZGK>l(p`{Mmr=ZmFa!X8)30#`Nnc9I{$<M6mc zVQ(RJ8{YaU>$NJR`{_@ypv|ILjJvcjIW>$wo8Ap%ns_cf;i64HA@3!92Q*11HU*x_ zW1CmSgpNcy>5orkl!au2Qtsd6L0o5voPUBnSoI!%eL7qE!FznqEN1#J&9HI{=|UD) zn=}<vlS=V)Gb{e^Hos&K%l~L#vw~hQpW4yV8!olm+$8S;s)jS5jV0UX?8Zm!`I6Br z@Z$k~ZAK&KvL5yFqTX4^rN@`9Kd~bp2gSaM<qvDh+|TK_i+{w#TOZg{IWdDL<;ZQ@ zONnr>jv|EGMp6V!`glm=*?GN9Z%<3;#lp{|tDiBu)sb!!UPA(MCd2AjyVaw0q7_B# zQA_0wkVbyS)~{~S@+YiqSRVYxS>~S(Ec6s|2i1zMQnUV~M%v5=MOU(bo|IZ!cRX0_ zqVf4A9%-WQ>zV>K#51o?0{E$?S?niW`B@uT{wGF$`7QSLC&B!zTWrfGE&Q+Hy#?av zO77#7*c7j>ExY|m41d7H{MYW_<A<}lwXyp3O*>wDo4y{$1zKf4W$x?3dG9n9x2_{U zWj&j?&g3y*!^<AIx}JToZc0E2205Ky>Fg@n|4^Lg<n*_wlWwK4@byK^@OcD3<3rZv z^Vxhw7W?M&jNloE(QGkL`RCz5C#*wTY8>mJq(G%hdy9o`PT=nkVFNZt@@YfaoXxND z&$HMMo4fF{YL{&Z=F{F}p<CMVld{;rEnV9UgVhKJyYgNQ3p-uDEH5)R7SY7(`;u`I z4Dk@U$w^4~Ny=Qv_HT`4e{S*Soh;1Zi<P{``)th@iG2PbcHxWHf(O8CBbSX<TCBid z*4U|Ki!5QuTci2EYS_fBL7l4x(!en5@f|}>8lJ`BV+fjMURc_(mHo3o7mATTR=3`; zJN_hP6(Rqwyyrsp_tpvQvZvFIaoYyk6&;#KBdM3C^qoQ&q@4<3lr}3wl(beM;-vRw z!c}@#VWdcl6+)1T6e3far4U)t6otr<@)Tl%G+ZGPrTz+0Af?YMl5?LerN}I?nC4GA zg<ec_ltNb!9jwqWo59|#&})dcSLjVdH_oL5MOc@ixGVH-qU#m9is;h{P3xglRI7MS zkY}GlpCfv^Le~?$UZEwTS1NP^(MuJ&k?3NDCd<2IR%qB!@lZ#hVcO@UY=x#vIw@13 zi(qc%q%_47MRcM<#}OT?(1}DF6*`4zUxgNkc2ej}qMy%^YhOTggF??H`ldn`6Mash z7ZY7KM{a+yf;?49z)GTbD)a=RH!1WcqCZsV?L=27^lqX{6uOG&*$Q1tbiP8LAUa2( z*AP8Wq0ecG;1Lv0Jq2`8Xo=`Jg>E3)q|l8-8x)$9Z^=!eorvZX+LP!9c-Db&U?5sj zXd}^=6grCN6QGMAMDGHmgGxXm(YqCzEcVhCg%*fjqtKZ|FIVU+q8BT44$<=zdIHe} z3SB^So<h$iI!mF8K@X8K6whJ`NKxntqT4C-N}{6_dJWOR3cZPFPleu2w7o*_Cc1Gp zTk=gvzh6Ga^_EsTIa{7&xb|{Cs&J|ljwd+P3TLar$pYuG!ue3);E?NnSK%y~%^JTk z7R^(9qm^(Y!e=O)z6vJ_oCyjiS>eDg={{27#3-CZa0V)z0ELqRPEUp7sBi>uQWVa^ z*X0^a0H;rp!n&cb3cyNJI6o_#+2C|mIN!@0c~JqgmBRd7VbbaXrcq(8P?)sLfa#+! zi-}1GaeTL>z8Z#woB8e5dBng#*Q6cnTEP-_$MAoyU^%<5vFV>}bZUBHzawtX7n-7r zI6eyEVp?IyLT+#S*@~CeUE!k#F!Gx=SUPd5iwoBT0qinu$>&X>^a+sreDapLFHoRJ zfnop(M#qdP#IZ&Gjjdn#WC&jinN5ZvsRL67rLydAUnzPBo059YTm;gZ0t>~%&k##= z{~d5^DGCSHp9;$gu+D@3g5p0#{(NcR8mfqF&2%CSdIlz=AXcoQiit-ua<v`Dz(6+C z-t2=54o~^XcD=n|_k)nc0{1mHWGjgc0(_5{9DVg>)BpY|%i0(0k@qRoT<pkBc#~(q z3z&WV-A7|X`?(CC3y9i&quI56CVz0Or=;YMWh;sa(vFZ`@FffWZhT_uCNu{cNk*l& zWDUN&C+t_y&7f4_v}Du2+XJO7)2@R04-j{<Z@=qcJdQ4+TZIAO+Vnjjk|EZ^pCtR+ z_h4T8oAZN`Sf~9F{MVb=i2Z@W`;;d3;Vwe8%}yu}#=|FQZQIi1WOw6XP`Zd4r`56_ z;q)EWG9{47fmi5ACh0z5-|z1fBAcR`G~)5^p@CfFMs>C6Tc)v)jX^~z1MtzH+@FJC ztHNEAcI}(Uik=XqnS@kP$`Jp|5N}BhACS!sk9TLv3U8yXl#T@$UrEptZTibp1i1@! zC5*bST<?tlx$NN-MjG233ZU9lc9$jUN7B<!@fg}G6sg+ujl?H`*A;<HBgs$U4s!Up zrtl$3_=FeXzi*)UyPLv0f`FYH5&wk%YR(9hC|nW8=d(?6x-_E7w7Vf?^+e^j5rJEM z?WZ8{&~76|hgoY(M$%O?4xT96&2CgE_i_pm8*KX1-B{<UboX#s^0>L3O~0TUtElpA z;iPhWa0^CpPzt&#!BYWxD(!xLJr%ioH+Hltf^U(}?p3w*NQA|#Y5E@&S?g+nzb>-b z)xA7ch%ZCJMfP*`Y+fg_UO$9KWa5I$8ZK%DV3~V2^7%+-m%Nylc}v*aKSYiDd@82r zB%$<Vp`&zYGU#l<^0fZ-?kLG*Xg;*tqKFp-UD`SMyN?Wd8qq0Lt|W?HIIWZR*~eJ+ zF+`3^hu4yx4<nT2S^cc<h_eeEsp}FJd>|+$9GS~=BKQ?q>ponDlY*V~7s;t9z)lI+ zUdF~8XdhUOaP>y6%9@<6+lFgad?qSwXwSYp5Y3l;$u1vg?>1%|QiYB!YAY79(1Wd7 zwnDV<M9D#p?T6w^Y_V)uxK-^yorh1vZomk?Y%?<->|He32W2@bwbPT9t1CHz?F@mf zHgmYdw69Nnl;-GEtSi|8wzRxb9_x5rmW}nbN*?E0J4kjK>f2Lvt4|l*bh_mSaDqfN zD6#p71<l$yrR0&Wgmz_{{&`o93!i6#P2w^pQg#oP@?!+Q_H&l^W4!&R2AB(1{n$J* z76cAPx{|MGX4no`gOzu*^x+j*nj}|}{MV1kVMmhH8wXuUA9)txy|IATn?|a{kDsyB zLjinzGRr>Hk)Qq<TX86yKhuTnJ~WblGoBf0!ui~ImQ)kPf3k^<t%(dc*%@td?$#ZY z6!w>lTUHQACwCKDSMw%+aU*j-9KpYm#QN6m<DcNj1mB3xtnJ}r{M}AWb0mg8w}G`j z65!vXlYC@wwZqfXw4}d_c3ZI;afxi?k@kT<;*g@=7+e5}`?uXsXeauzBm3e=aK8hI zO4IFlj<du(lYn>Y>(M>CA$n^z1nCVRKGk#gk*Aq`wcJ4|X4ll>`COo`BpxDCFFA`~ z>iL{SEbwR$e|0qLcr-PhLx9Bym2_cK8E(BaNe;oh`!6QR?Db@l<bA{0p`*qQGhW1! z-U5k)6r1Fta5NX$B&RD}DgCA3nPRd@wyO)|T^6#;I)8oXI^;4RhED<4-m0Z)9h+Ad zSk%pk_km=pdX#3sHFylnXWxdQ{b_Ak(vez)%xi1fQy0-%j)A3@zQB<ScNT8~ltxvG z^fA&xk$j{nF9M{eP`$B>l2HOf1RscIj~aQPuJq)B40$(TpUrH}|NISGSlog&&J5+F zXX65(d$S&+(V$Ba_1}#DbHPxzcc&s);{#d6F<qaZyZ`e7pv_M7R=IxZBDw%L^lujc zn;`gqxd15b&h8%j=LNvQ1uwGfw?MuC=ze^(u>$uWmTNjIJ_kM`<W}Q(K~;Kgm0(x< zs_vT-=k0Fn;PFN`QwcRA-k9Nq5Bq+Wo?SlCn!jwp?LbF<pOL-#^CAB9B3uoOx1Wh^ zaK{@b^EA9=1+E2*{2vRM=CluA(nh`n_-Y|d;Vy0DOMoS^C%6q=0`MJfvJX$cp($eS zbA#=-VnrhRzb1_dbL08>!DRgB<F>J{eu?GzZLI#6W`XZM#`yjZw*MztEaZ%lPw2v4 zIg`XU9|8;ig}`XX|7_il{(=>pZOPlV?D*iUlg2%@@I@0HGqdgIOzy$Hs5x0;yd-TQ zyLK)tCMyQJL7FL%nAm{Mx0GipS@B&E+X~a2YQ-O42`j!IOFiE)eyLfRHhDnR^tud_ z3zqy@Ny#^$J<Oc?Z<c)NJxm)vR(0N<U*p3Loexj^Yi3hS$y-TGPzI_S6%ZpeSb7Q( znwuwIijJDe!Y+jC8s?JBdF4vW=N;DfLf4j-J5iP=f8!g_J}<<r-B6&`N$w|Bpg_%q zT>AL$(h7EDeVDuMg^={&4EAV!7~gR+^WPBK>D!GoMq3$XVP7bZ9F3T07jt`<5v4T< zqC2Z7@-UKv5j*#sQ2k6Rm270y8$y`4!8K->ET+ck{m(cl{~o6;#fge>RORtJ=eYC~ zN+{)l_x#7DyYT!Yk1L-t-%nff_zi`$PgD5%O7`ZbgF1c}LQO%>F7dFdE|Vf;n+oW& z@7b8rDaw?_&YeCH1Exx_9M%Y(+y|Dio*O6fZTqvYH_qnonOK`m$^5}-Z0e?TeyoY@ z*%Zk;^kem#1b$y8paXOJDbT@B+WicdMv2AI8vepqmbdV=W`Fd-Qjj>MX(`xEmhD)n z1sLj5urr?(FZE+L7mnnwC$qFg&G_$<cT8NAp>b=23nQ9={%0^Vsx942*QOKWeyL6O zc7P3c^ND`C5>F7iZ#?i@2^G?4WX@WYOtPPe1)Z%ZImojDHoSDPy>lZ<^;>DNhW~33 zoA{QAA3u$~^Ok{s7|eo_J2^$;t_GhP1heeq5U0OzJA+49!E9l2xIMj$W$hMUvH$Y{ z2*0wme9o&o_LjZR^ZmN9jj16XQsI~vHQ~pgCT-a23oaf9uz1WICFDNAI5}Rzc3lYN z@l@~Zh2~*3*o3hiJ%pkd?1+0>Kj|<QOLcF%GMWWnZ0`|7u_p?-=MlR_2^)1Wl3zWP zy?HUp=%>W~3-i;>jxNUiB>$JuV@I=N7fn$P;LxPFlGBv1wKqn2+a&iK3leTA<o=Ak zFyF!gFGcA0L5PHxzYvzfb6CGiZF6`C)7=X$TzZM^ew9x{mU#(oTw%NNzRuDs*cK}k zKi(x~BzW#!qobBP0G;UZxuBEHp4!q9-1g8BLGoD$4G+&MdWmkoLN>q?>!F@qzZAvK zS;X8f2lLN;S@h+Y&~?SK#n(|hbn6=^Cg}S&5EGx*K%ekg#AaUZU1TJNWiCn)j`K=W zrk`R(z*gs)Rz5+KM&IvS?QGrE%zwwW)1&lmVIIljw=E$0s5|ybH^uJ*zZIWu9g~G# zIfNjgECDYZhGD7h6%*Vb>|Q6f-YcyYTMLh^?3F7qeBeTsf5m8&EkalpVF;}<&lGa$ zDgMo3oT<v}Pgu0o#ca!!@%&E<nEvYcu<Q{pHEt;ztM~zHo*)iNp<jxT?ssJ$T#fhk zFU3@GfM5+5O|AAUn_ayc=XMz4<)b37VGqe>LG{!5dT;i2eGs4E#WvT6`UI6AmJ4L$ zXIYSXZ#cVDpTjR+$huw&<ntG@Vb_d&e{VML+9>zGhP@cAONKG&S}=bzi8)*k^=z0x zD&b$Ly5%)OQv2HzSi9>{`q{H+t-zU4IZc-f&eG*+Z2I;1uG1k-A3>@Y_u78&ql43_ zCDNpgpCDeDHh&B%k-LVv^o=*9WvgW^zd(92lwG^tKDjG)4ha1=o>HnZ<5Y}Av%MsH z9~ByvcfkRQ&`DM3V@2q|csAfhJfH9}d+SCVKVm%FbE75SyfeFbBQ5C8IJ!}CqW1fO zA|vU!))`3o)Hv4tX1`Gx3R71i{Eg6D{9J;m7C%=|>(sjpKXIcSjp%8euritp>C4R} zAK7gCc_aoM!2!SRz+)ex7D1|mD0~vad($ri92d%c9VEy3?EcM&mfw5GqtOGqm9%R} zQ-9KOeLj)Kq76T8g2;7=tn0smU1to2QGxs$64}^)#qiG)*|LAVpSSQDy{=N8sb;2? zzlL8z!Ufi3aOI2EWI?<v#NcdmQTKK=3TV@Baz)L*rzafcLvS?2FBahApLJl;V^>U$ zw$d=<q`vLy3sU4*Rb>F%^lN0qx;olZY?u=(l|&Z=SuaX@2w4)fg|3oy5WDxQq3AEn zX89htmOkZw44-w-8zXQHBZ$ACaBtQth0~P`#HtL&7g#lARrEtQS`>sJoACImU^^~Y z@Y@ZM(xO3B9{Nf^sst&Y@@61W{Bi*VbR`~01&QNnHqvTYZwr&Ft}A(ru1l!8MIdXT z8KlF7Tzj2u`0b}|<B+$ywcH<Iv+bs{oYh6t{*aG^HMGAq*|Ve@)kpwc>9R*Y@ZE&> zk+xqp%e<{G+SLyq+}TmldQYUgP3R7E;~yYBLZWK=7LWpI$CRY<3lIB7V-Gz^44As^ zVHm>pwTA}Kt$0eNZX8TSPDm^f`)&Pj^bIK_Zc;i#z&WOHY=w^O-0d*m`DhCi?>xTe z!A%f8l!qmaRZ5|4a55DK$geDaScD?Q$whKTE|y*f<8GS`*(GBAeDp(#*<JD;z-~&Z zvF~)G!Zy9WT&VbRnqFTHx+<@{WKxhGUSa)i#q$$Su+m$mR*%g{_CLJ7d}q*qdwsd| zGCOlCmcM<R+21xXH>)8!suI6aq|6^dgLOpmNkA!-3DjCBAFPqDn`@;lvsfQ%Fh3@f z<y+(O&qdJpX@~csLMdrjV2w8Yzo5)wdJY~+O?FUF4U(dJ51E0lYTT$L@swE~gyXv0 zA-2fbAZ?Lz<bO->FIhZO1}U$oZsW!&Q|jE2dEPOE7||LPh!lq(zmf&w@C!zJkR`rL zp3YK#_kj@yzdvT}poke+@g050yQ@@RQzRorl1Fp@>FlRF0m&Vw|Bn*E{|KUDy)4%r zOcmUb6tHG@BSOv<{HGMqwVd=_0ekIki0dMB4?ZS6BBgd<)9!{FXZ)ua7+PBg&>FGQ zo^8LYch8iD-+PfTJ9jtJPy*FTy`#iwvP!w5bdhecl;1<*HcxvY{IZ<VVQ380XvTlm zU+kSCeaOmx59CKrV{iW+5c}#s;{KP4VrjxJH2fnRzq_}#sMtu=<G8m~T%+Pv6>C+z zpyKZ;+J`Igj4F0fvA>EFR4h?(m5N(c+^=8}cT8pcs$!#x^O`FeOj0p6Lh&c6n5JU3 zie?qxRB@$>TU4x7@mCd}%2>p49?g`5qg3poVow!ksJKYQWh!n`@f#Hns(3=h3o8Dq zVxx)<CM7>#Lgdeds*Khurm2{z;usZYsklhR<tnaKai@xPDxOyHriu+J@{vjbf`ODj z*Fj}us5nN&c`7bdajlB`R6L^M1r;R~8&z~syDn74)+(l`IIOt_3O4>0s|?wLe|#e} z7@UzIU5s1`m346s1q(K-{!-w(;XdwJw-rWkI1o|7nyWuo$<Zp<8?FH#aW&W}6|V2D zk&`P<+cE6^RQIA3PVtlaLVxu(B{f>w>91JzV_%T};#5EOeEILh--^7K>Mv0J-m2fI z`eD74|Em5{<dsUJ?K4$IfGWVL{y^1V@mz@zr26rbjdBIZY({@4o+<uN#mg;L{b8!V z>ZuZLRQ;K%KV0@>N6Bea1ZJxODXRYs)o)b&b5uX4`k$-*#>Yy0o9aLJnA;V^Ynr?6 zTIjBMO=B7~b@Z70u}1SWWA2Qxqs?QD1v94Qj-5HvSU7Xy)bYj^k@+)QRGM^}?8+*g zCZO_No#wY)D?K%Cyw^6<s>{E(V!haB-M@CJUh$f?@@sF+%&1FvbwXWx5>4_DuHv?m z^Hb0sXqF~_<vbsakvHd5e(a;^*_mR|5Xfu6ar5CmS7UhvYq{WVhAO`TeU5J&$2G~B zqVRK~a22;JBYZWjGn@EcvG^fWxT{U^eH}R8JSWb#og?QP?^)0!*S;0hTDT@Tjn0A7 z<vDV?cEEVog31#<8bc9DQ(e+qW9Y#hR8##Ov|XE*elMLpVj?zTHsx>w;dkMha&UIB z=Um!3b1wZIUdZf5jfUXG%@d?ch-T&7G+vy>=*AiIyebFiHL2cB37&!V@=+qkZLwB< zs@H@TnR8GmElP(!jU&%FYCe#gkeZQPKpST+)ciFUnzxe+ZMTC9)okZHGjYd|!*K<O z=Oq`&k#mWES)hm%j(Y>n-^$5<0bx#@XFE^MbB5aqmnx?M#~g=DijQJwxR-52*)2}v zxOP2WM)h*wIfr;YuBrI<1&(u0=eT$^X(fAy7ui!g??w1QxZbMFdC*)>j+>(THFA-a zI9@*XoKHJ^fb0{mlN*#0$ms=TAFe5Z)u7M7y-2{@p7U<!&Ur6$&8!^guQ9!7-M)P} zZUkCqv9)rEzb1kP=2m~rnEa;VheEGf2DinEZ1Cq5;KBu%-MD}}S1zC(=>GZ!pA%kH zBu^!|FbvzqaC~>F%6|fXNBAGBevRIV)0^Qi==2zL`gpIZ$_j%f_(h5QM{wLAIPW`^ z`wf~fe|rsQKL+hCk5p%jHz!WlA0xHFpo#Q1xNwF%H_kB7l{26;hW@@MysJD5Dt!Yq zar&k@JfDVN(=Fh*xH~9d<t@D?yz<jvjUUx@f3PO-Mb6a=IPN}N+?~o#12oZHyu2JZ zFQn=f?_T9v;F8mn-VT^$&cfZJ^zf(g#YCgYHV~5yb)pGZ=^3cW9NHAa|84XqTqa@= zYD_Misa*hP`ZKV>Q15rbx5~S~E5|caLxZP3pQ#KI7tUqI%P~3vrqW`#8FDTD2-GyA z3it(SIuP~=()h`{sK$k-u@$HB;5m<YmpC~|j7Pbk_@~kwq%rz673`-?9QQLEjpUy< zRem1ykLDQrIhq6R+dGwKf;6G6o8p|=%yA!X;kff^^#elDBg<UtoliJcX$$Og>@xW{ zlqe!t6X8UY#u$WLivu*iDNQL{+rx1_d({p>&i`XWL*_ADQ$ttg2WX;Rbl0F79OU58 zU0VY+fz%2|0yWJe0^(e`uy&tvVVVy)&jyeBms40-1Wwpj{vND}evwYsiySu!&gpJt zbciN2kxEHJdBi1-D}ifv7asiCgWhnN<GQPUjUK}ni-i6wj{^4`x6H~D0UASPF}@cF z!Z)qXG-kb>IPZZjn0ml?k1DqU*PP08AsS-@wM(2m7OO}{&fnaQ^KX~H`D@}h*QSL^ zQyCqqG3M#Guo;F1zj}FCX9mQ(b4K&)SR-d~#({;Lv7MPSY6`f3djAvpDs_p&Sye-G zrW=+?{0d;xp95)?4Ojw=FL_4rD*nW8TmP@J{lCig|DVdXzdTC+yRu!mBwTZ(a$JPw zoO2J&0P_s|m%eLCGYzxfRgkF3)OZ)?D2j5^C@;?0<sGG&=B#PIYf)Q`ho&e>)dNi` z8dVHd(V(KQik>PusmQ6=_(;(wB^A%9Sf^r@iu+XDtzZ$iQ)O&daf^zZR9vs(8WlfO zaixmORjg2Pv5K=*%vUi_#isPLWj{_)1J!^G6;o79R540LgNmLix~a&iC_PkK;GBy1 zQ7?JhXlnWsD!*36DndHN?N%AvRotZF8Wk&4ELO2V#Vi$5RE$&6prWUWPAYOLHa<`a z+5jw~#ZFQg^(vlI@q~)CDpskuTg6Q(R;XC4Vu6a30i};0&XF-w#S|5z9&p{{0E5cl zRFv*35iY5CTE!|ASE^V{SZQ3O31$s^Uu{We4EA@!gZ`fom7YEt`^xMNnqcjkZgMoA z_!N6(vYc>IFw;fB3Q8CC$V^rc)>ir0l;*JOhYp&pc17oSC7@Qtri8jVD100XIC64M zzzWI{m_OkE6D;D2)W79w!Ubx=|8J2BMFl1N=iip!ln(jNh||n}4)`DVl^-Q(CKnm7 z`I<dB?jqby{21RQApJJzK=4lh_u%lI26`W`1@4}lKt};H;8ubr><W8y6zCKnJ?q(y zU$rE3he}ldz9%pcj^yKjec^KO6MvaOIK#IJ#<>!nIG_d2gr93#54;BF3;HH7GDP7= z0S~Km9q>91?uG!!0Oy4%84$)BmCAGh28Szg3A@8lA<}>wWj<!!CJ^UkkA};OK<P{v zB2+ry2XM8Z*8>m1od#VCv^B?J7c_^%@J6@?pf>@3gsZ?WS=0g_;Q(wxi#7sBwp1#U z0}P8+3Sk7EDT+Z;Lg*YYE|%jafF`^SS8qVgfFp4dUJ8B=FbLy<x+xeKiXJ}*+6Y_) zcLG0M@gb13XnXu{j6HBJ&aF|P=K<e_quN&hFF<?D0AD^tpKpi$hUXG+Pe=3_=zTz+ zPRJN^FmQZlWDI%&@O?N+=tH1u5^9fvx&fo%sP<;yayXJ9+^N!pPvNMGJ-gsQ+6Dc8 z4vD6~lLc1~ItMr!?k4C0;B+_%v>A8=juNc{j!RZ*JOTIsj#`WG<|`a`2l9j;r{I55 zf>v1rj7vrL;a61&H*|*|{7v19IPOgxi!<PXk&N38M;X+=%5g&lrKz)lJK(6)JAo~6 z?41YsMBoTGlFtF^d*Z+WzJU-<?harT9K|KP-wWdgG7o@-8Q|k%P;NGe-o5d^_`tNu z1eU>3gr&f>D!mVAQ)#XbE^Xj~A>R%-1&%T+0EYEdXd`f2Ci)&S640Zcl4cj+`vcJ0 z;C~1#x{80O1`;r6pdu6u{CzNL384ny)FF@oT>!kV(hq=dWGVc4Kzp3NY2FiVg{uX< z9oTZ1(u%Rb*>F@b!o1<o^Zl^%7JzsF7l(mEI5%7A%jLj1BhWSA&jVJ$Q7s8S9f`!i zr#G?<qcG_3qq>A1Im!U^1kQw`%*?=d;i!_!fi{)TjYjv4M*puwLKW~_f!hSS9ylcz zwE<lWoRo*!fX)ZLIabMdG4PjhxS|37958x<T1#NxX&8jyX96b|aNKi>3tR>#7XlbG zU6Bti0&y2E7NfTT*kT4gpadNSoC-%ZC;)0_D$VNz+&LdhGGqvQE>IH60LGv)6TpuJ z4lKqL1WhQxQO5QQv3kO70>2KZv7rB{>Fhz2!O;|23KU9|rY3YPg=z%88?ZYZ6(|i@ z3P*`92Ch+Q!h*MybO_fhR@Mf>X-kxat^hbr#6E&$dxD7hpS@IB!wHweQ6Cf5sWf3x zImQPfc)rbX{ouNQ9teDn$4uFv36H&p0So#Bkgrt6K^*Yu`zQ#>0GF*oSAt#+{PP2j zI~RcU{{e^@A9C{l@~8rMa5bhPYDxIjI!rUrJAuaa${J2czw1W)SRg%yrdC-4q~F1d z1DykO+6YS=Xe019xD?RbCXV|WP5`|dc(DloWWaL?_ymrcH1;#B`J17afDem3w*!tc z*a_Uf1&a&lD&Sc-l0OIZ_(EwpPoQBNS`RXWAHm6m15Vqn&;`Kla1^(Q2+NnsASwa6 ze5KHC!2CTJJV=PJ*Iq28SUxj=&Avs=K%0Pta5<o717AC&3`_xd8IHQL9(bSzhknQ$ z1p3!1g)jhLuSNe;DQ1)B2>J;^6+mqr$L#~{2^<7R>vk4!wMwr6o`s{DoddS~Nhw4u z@DLm=M76+(^H^>X*980-jt2N?;DQV2ZVde53z#Yo;GsegzJC#Yj{y3up1RBEGth+e z|C}lYO}HM8b~(bH^`ODe06N}8_kwl;THr`iS_zE%6^j@6gnz?b0?qx#ac$sM7C|Tu zL;-m~6Mm-ByMbO3786A91+Ke=oelJQV8Ct2qY%NsA#fbx;xdPuLwwL=$0sX&8e|B` zHg3kqCnTFU(S${0+$JGHvMUo!NQPjd3CUhdG~oo5CY$YIl_n(1E6EU&Rh4MMdn!$q zRqs0rpOCDakja;cbTxnsk7PKc$b{d*QAr8Ojz@e#va}IR=yX@13CRXVe8R3OO?I%6 z3cYJ|qNb&0my@8Gt|?lEJ&#A@+N1IC*RULmE;^>kZ+H*ei_=Sg)yG(`v@iWp>k%IL z(tqMG{GKoUvoAm%X*Z=;e+7Qz)8yxctGk}2YaBD1HDJafVE5XpwY6(ctd-U_tc_cj zhy@3`JNTQ{>|Rr8T&0QPxwy(ct2C!PQZU{SP_TCP+Tyi~E8Bjc@rkNOyvDWMI;VA> u>kR9R>!Q}ht;<}OwJvAfgmne$X0Iz=w|HH}x|Qp8P5(gi4*zbP<NpEm9Ieg( diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/util.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/util.py index 9d4bfd3b..01324eae 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/util.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/util.py @@ -703,7 +703,7 @@ class ExportEntry(object): ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) - \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) def get_export_entry(specification): @@ -804,11 +804,15 @@ def ensure_slash(s): def parse_credentials(netloc): username = password = None if '@' in netloc: - prefix, netloc = netloc.split('@', 1) + prefix, netloc = netloc.rsplit('@', 1) if ':' not in prefix: username = prefix else: username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) return username, password, netloc @@ -1434,7 +1438,8 @@ if ssl: ca_certs=self.ca_certs) else: # pragma: no cover context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 if self.cert_file: context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/w32.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/w32.exe index 732215a9d34ccb7b417d637a7646d9b843ecafa8..e6439e9e45897365d5ac6a85a46864c158a225fd 100644 GIT binary patch delta 19564 zcmd^mZ9tSo_xH7zAPX*n2*{g&h>D1~@9g`o3W_GSiY}0fnnLSpO1f)mDbz|yEBP{Z zXKAHrYNdwZd#0c!SXNZtW*SOmIff;cDZb=BzqxkB?)(4$@O*hbJ=WPdbLPyMnKNh3 z%$&KX*dJI?7r4?OYFlN@E`KqsMD@#5^#j<l?Y3HpeC9Kssw0KetJVqWXX>Fy4R8LT zmLgUCGDYnW=wJHz@A}hwLOOQFG&_^Y{ntNh4#$lW1#(aK`+FiQieo=5953o33gNgz z$ZTK~jvFc`DWXjN^>@iwQD0tigDeuI1WJQZPBw~?qRXyvoJC;t|1@#j6w#_)aimQY ztU3r{IDp9u@H6<6WbkFqoj%n$6)YJbq9T_Gs3VztKhf{RhTr>S2_M(>U{{bgh<u`~ zwOjbdM60CY)d7J9!><<38#SuQ!g<;@I{OUfD(hUal{cMgdoD_B-l$v@ZSoa3<Lvvp zaGZm)hjSv%zJUx5h&S2n{~=<*KSfOZhX`N6e~BoAh(wW?ObJL(-ST?v-cB56XXCWe zEAbpxU3m{wD=qPQz3$^N4v{yCV?E8gr}&&sf|gH`!T?3@d&33RyNtE={{O-n5JGka z#PhOl<g<W57F&URKRQ;Ob5G&5+v5jw)~dz#sus6V2P7&^>Knq=)Z6R8F@{Rfu!Aq~ zy6(|e&>xnrjUn2=_(wo%XIZ<CY!85P9in_%9>sC_o;Io#x}2<UaQ5)D4LH^*r`-|w zT2b}l(|^8Gl|wy$v@SV}*$A-FLt$ilV6rsOC&z{{x^B=!NI?oYXD}X5o1#vCE9{jz z`}gQomFudNmXdpcG3A0J+9=^T_wfu*-la}?Ee@`K9`(BL|2_8wj;@1M0zJaISHSwW zD^eI^V|?9VuGPZbI%~7i&S<05(`A1G2=3#lrFyX|uxe6U>o`vi4ZMfKg}kE9!CBdO zNBRT`vI<Ke%+rp}HN@8ml0N>MX+O0(JIUMHwbT=SfQc;CcXI~i>W|I)y|q(SP8*YQ zECCgB+KMI@pFB3+bJcxSVsm7AU1G(}Dpx!Gg>2}YmXVCA)j2n;m?tZ36q>!^5p+JH z8yj$>{=REObxynWh_sd5=-fY=M{nH6CHfPt)2*EpH;1u?O1lt6P~XsTf_+!sbV<BX z*`zcmD@Dw48w0q(oZ@)Z($urad2+=0wgQE>pr9%{HNhKYAV-3t6P2h~S?83vpG^%f za|J5uEUnOmU4y<^X)5_MsMmx-0lx?9b7U9Ws<Ts-1y;Ixw9v&WXKI>9Wq}aKP45{i z3a8T>P-sIy>H?wKC#Y6b<oL^1Xs6%$3fBmQ+sGVoba{<{dwh%{dr72)YsgNWh!u#1 zYlUb?jbWlQtOW&_mx2PX6MgVTMWRgK;4)&!zzPnqbv34XO)7;i7(2#VU}uiS8<oaH zEKO|z)ry)i_9iq~K<n?YsSNkVf#k_fP0im_hNbO|N(DREprJx?O&phzfxJESD$~a| zs8Upf!f@X@J%y<q1EqS5)~_twN4ZytD!30nxEq-mJRoiys^rspVaZk2Vaes&%Rm}K zhm(!LQE@+`Z$(8={R5rn+jk3%#E_=osQA79V%OzTx0&<Ya>j=3>+BF)=^NpzLwb}i zeNbDhs4K1N>^{L4&_VaS-uzo<?N=ct-s|8BpSxVvb1;WxWu+T#3xh3n>r-5PQN?Xa z$q!poXHWi(jq7=o`s5s!K$QC!?{457VRk!8^XX^7g4v;~Nu};*5>uQp3awuPR_roC z!eBwZ1br+8UtiYIAH)0uj1fe)c4frR$?lLoyflcM4@oQ6V7{0NT#|;$)9qfb67$lU zT3yDsc4NH74+L~UpL~*Cm#g+LPTz7r%zT^8fjN_6j!NA|mQQhttM+^kp4QNYN|ri6 zbW`5*3d<-?R=?4Lw6sbxr2Ir+T~|0q+6;o{tfH>+Jaa*3g#l%PR#fd~Zbfmk$n%;0 z?7XO#x>ei>bxJDkggAo+>l^1@Yvn!HQSBzH)+SWD1$SOlR8U=c1CGLaqBA$(Pg}l2 zY`^k1vqTNL$$XvSy#CLyS3>IS3BNP-AG<}mN>a<mboDDtjPaIg6J3{uiRx|71s5jE zN}my=JNU^yWhOtU5Yp)u(~tg`iwaeG*%v&hk}Fj4@wbKg8~6u*b>&%%)M~4Gqnsf$ zA4bGHUZs}}YP-<CK*&(kX$4OEStgm8)U}(?ZP<kT0>Mn^@E~R-shG|A!k$xIc@-;K z*fy-R7u-EOmyc%kMvcRADRq}8rnm+;!Y5P@Pj;kO%V{VNF9r1o3N7&4W)+%&FBEL3 zI$*NZFQdBh5=;)F3L&ccW0vtWR9pE{kJw$umsXy1{Waj|SM8oQkI?PvT$F?hC10bY z{;T$?$|g4Lf{6&F<^NIaY4iv-DTs^_DxUX=?9|w)npJi1)lQk%MmNKXA6if3H8!LF z{tLUKV5wfg7QK6zatrLaD7D%0=?}t^lIGLC==Ul59+ZB{pbusK+Qt8Q?8@0_Y_<}+ zw9UfNw~}Cj-m1ly6m_0SV*N!ZA5JflW`W+ofsv!<63K6&y?Wol7F(4gMrkPhJCT*f zptSWQcK=eX-jz&gB9V2C%#Y|L%tq@*Y<S`5%^k486ISJ1YRF-`p_ih2*e@<oBicBI zZHvwVOVKfp7Y@(nafz|fbf+FP;fOA-lF~tmWNp_}enKKS+*R4*6lTO5H4C;5R}f`C zge>MMmjs9P*I!RzavWkBCLkWKjqd5gX5B{TBehYAG1%w?0Usv(>U%RvYH#vnnAvi7 z0O~_*2s-7B`d&SlE22u!>=kImZpRPZhye&*WgN~yFXn<$lg@Gbr=aZcbaFK;J$;p5 zh{VA;c;-G@AV*O*>6jzgZe=Q$_Qc4<^ygm060Q=Jk@?{nQH7}-rwwxcroTJa<jwM) z@tmbOsbpWcrvFk%es1xll}P4wgVsE3+`PBRopULm^_Pa6mNf!H%UTlCO_lUFR9eJT zDjX#ihjUIbn!3!W$LNp*GOJsD$ifVG3$PqbCnvl0jdKdBbj8?cBDCXa^X+$5`b8WG zjz~<jgWJ=F<%L;^(dOK9cBjoaO)xFd({UsxBAG`|pNQzor>V(`h(!LyO!7g*u+S7R z`3>lkSi*Oo8*xy@abf$~s&m?`Vf%ut^gR`Mp?guNCbom|d<?nTy(;)ve<&!ChNh7v zk*S3X&_7RmrPtNhAt}}drp68CoT2$NHd&Y}*SoDjbS&%#hVqDzcp4@bw?^jEb~b35 z1zH0qfyF86CQLeZ>ft|fEwVurLH0%Uj+6IzU~6Y!@mDg~*}#RxcSc2SM2%JEqvM`N zoS>`n+V!8gdeA-KYTz*M9DNU{hr6;9xHeBbubbdW=$dG<D7ue4k5xRbKjutm;7*;w zV(Dh3ufcT}ffU@pH8*2r)QLTL?Vhuh=drAe(d4V>*rX4XY{kSt#oh9Se!Pcm9y8!+ zYn9NzXcF8bG5Q%eu9Lnc<Gvb>l~$eCMmrN@kFosAoyn>m($GiX75x+NdnEayM_*Ap z`Kw16FENrQVjdH}WPp0Cw3?iZi5IOUKgU=?8bCrb^kR21C^jvqj2SBzNs41r`#AhX z&v)+-+6Snjjt1h`h(#Pj>m_7stcw3zLN3Hk7ClQ$ajBv2MRc&|MUXjh38BwDq@+fW zwQ)P+PvAJ}@8=A5GHvu;DZvET3TP{t8Xqlsmn?{%&nI>xZSmto-;<mKrD!Wzl<=r@ zCF|eIII3Xx$PvSFVRwk413~bMYr+-}ajj|0+UEq5+X=n9R-#z&IlJM7ipZQtlEaRI z&0nJgD0%adn6PyZX(mSd<dMm}w3(<;<ZkymQ@nYX76f^6T9(DMcB{^5vF6hR6q1}# z1H&$2+w@C&9w>QkROZ;XJ7`xt%xG34?{y_WYhG8R{_gyAk7j7T%|6<W0j+|I#N@l% zaXhCp!06VbxF*rbz^XL3Q^XW7Mk31VDOLDO`_cZaG^RDNvTk8d#mUmDuB>`EjR3iU zgD>Rh9_-E}a70r4Uq1Ph9=co4Fv&JIxIZPvo(bV!VqX>INZcjiT#=174<j>rN(XO; zA;VY}X`$?sFcByr?3McJ!QNuHHHp23&BSYH;Xh_V1A<=@=gGOAJw-D}d(ZTcg&+$v zu#IFSW<-5ElH)eTgbn8C^%2aiEDWYoN028I<NBl{U*u_TO|#M@hUy#U$9X!Jx(y4A zu5P$M;t-dYOSUKW?%KrKe;$k65k#}Pk><o6T_RXc*gp0WZ=y8o`CN{xE)zL;8$H^_ zG%TqKQh2eM^`M8kuzoTZL3aW5V3)hp8lR7wot`75Nqr3AC_9GBK|ujMg_Vd#*25<L zv0)Y9#vkNaEP<1XqIr<vYxz2u$(RxH&`?H7DXJ2CuD2c{o00~O=#~u&_q5aP7`=@K zC^&5E-(!&2D4~Kt_m|$*2+yztxPUHJ`uH_;xHN$)a+gXt=H3LQx3NK=C$Y&I(Pc6= zdGJ7)a0eFCMR2HgFB{ICR<>DzyU-qf9o!~#s5)mY_RD3YE;&-Pl>9q6ul&5=&Rt?h z0?lW;8y$rujkP3pgwr7cp$$&13fIA1a$$zVj(Ga(Z{YMsRWu>IaU2^QeR?$JF_O*W zhk1T3F+I+>=zvH3gkW0B2o4c_iS<l>!ijC5e1pKW4Vt)_b6n)?9JVj7v9*&Uq-c!Q zW@o`2T6V?jt;%W9x-PjoTWMeL(>(mNW_oh2dc>aKs=V*5V`u_163j{o_2hgHcK6~- z9OIWT{tW#1&mUN~w0AJ3lukgOuaM-F`s}rA9A)S|`j!f65!<-efM~z;6+3UN1nM?2 z1`c`=QI7H0g9EraZ&VD*pwi`}Pw&k5F07zv)8@WHC@9epV5?HqG5!3JM`rhS##cc? zg;C4|IRoiq;P$E*Go9W_{_dR~wG5{6+-M0`0Wqy1?i0y~A+0Bd)R_1QDDmV-p3C(W zVgX~7)+M9m^<++JW>+mH7jx|+Z6NQb_Uan)AEerIsh^30u3|iN{5Z0`?*Q=);5KUi zm0axGL(F~6G9!K^_mPPdGU>mPp8baN&*YQReyS)bGogHhm=x!Q6C>1s*i%hEi|B81 zy`P1rxkQoHJAE_MQCYW<4a(KstE{2zzhJI~d{shCYrH3*=0UeB(sYs~JZ8eP_!6Xb zBPY^SanV?*MecW6aBAfoqi_-jg6Ih?OD9qNcWK#7IdCBGO%E;@D25K$1i@y*u>>XV zyIyBdKJA6LRuO_-lNFy&poSlaw}0=*<zOrJxjwHdH-g>B%ZNICZd4Qu0`)uZZ|^ji zgA&EZC?8BdPFIBZPYm`7J^3?zb;OQ~IM<Jsc!r5PeW{RW=o=Tws{@{jNcWR%0$E4n zE|R#6r4ef`__!aCmtG(>89Dq0Be|7P7{))!aR;#1ah{=K8u=(G9+=5*X(lfZoSrfn zmcg!aij#`36<+TzI2t1q;NZsEhnd(l|K;T*GBa1SnM}$|O{~GFY=^mk!CZmIROxkg zcP|MLIYSG)-xSb~TF6_OIbAk@MQ|I|>*QKy|Fnu`OiB?B+ttdySVe83Gr-f(`lKU< zUW9GJ@9OWm!ssMo8I;=3H>Iu~1@u=8vxuf)kMMcSs!5kxyHuH4g9_-E7l~(3cGOJ9 z;Z?&bnX1vG*j{#>TpE-fQ8J3-Xd<c}vxiyOqS{81r83cJGFGY#DFZh<A+NknR!Zke zUdPSFzjqi(SD6gQWQ#10-`-3n%Cf^7sfELu_k^AT(kZl&)X4UOV}?A@wvEa0M(sy( zG=Remz^h6PWYc^I6i*utvGHD2V|QWV1xg1)9m}35KDIF#{A`zxwt7`fK2FjskIg&| z26w0+?aF)%10$<pOX319u=Z3e7~i!7I&By>QA1J{YEdJZpePY#lX^w+$oWRha~tjs zH=Q~(%3ATNPIdIe+XQDH<G>PZ#}17+P*f9ZZ*+jSs&i|wjcU$=s<?^3)G0DR85?@@ z9P+Hc=*C$xMww9FDo}=HKn~n$voELg<-YXg48B~wFK6-PYJItEU+x`WF4vcP)t8%S z)Iifr><f(G2&G@#LFz8K$9b06VR3QjSJY{A7;7l(Ij<FibHXNy&JmHy#*5C8B2~Yl zf9Qyv=Yy#dXJJL%<YP9Eip9z8Q=r5G?-~0zmTo%EbaTd@D`bQzJ7c#989{4j+(yR1 zc~o`eJ5{f~Y=Sp2wvB9}8Uk72p&>97LCKqZGr6B6t9yr*pYeLVD#qYZZ6j8-p&v6p z`{HcOp+NS2>uuWfzz(wQk?>LQPg``PRy{z}L@ugRU~YVtj6au4R9PvSQ93Lc7Hf*G z9vZGcHrFHsd)0obb5OmPw^l3n!{k{fPh&s8@x^1@Mpk6?mAs8p6_#QfJpDFOpOq}( z{(x0={u8^_C#T78S!w*{)1-$cF(kPSmrV9kK-$8hx=m(|%(g#`QQ40`C5trOMBB)6 z&Gw*Vh+$sKtUVanb`H}QVsW=e&;6a<MLhwZ+#sPkLw5t)G~Ml@u%$t8M2=2$zz8x? z*DEsPdsL2uvp;ecYAT@HE_uCVl`g4V+Jso-T_^;jO|suRj~JefmZ20!D%gxfQD<fU zXfh5hHaa5%8#v?}=BL}}L}aQ1a@H`tHxBs1(dnz(_8Sl|;JbV)t%0{jBnDDHwqFJD zFl!u_fZhTfqFwKJ_XONmLTUjG>maVOhbObg6x1l7%X63vNo#RF`*&p_X4!GW8)YHW z^m^4!9Jz`R;1OeaS)uE*7zo=PirMz?^q1^9h2ht;O#}LHMj^9%IQd2|$EBmQVSs2R zk>hs(DK;$OH$;;QhPk2^GR8QXZ;m1xjRSbe2=aw7kv}q={BG>czdoFFH*N0n%qc;x z#BTY695to!)+Tb*l#$V~Qtta!ij8JAVLfeJ5ImTx8gBQf1jKa--E1V;=79-QQC!tC z0C!*a{rj%OhCqh0ox>YNNwqmebcxiM-_E#-F(Def)P|z!9BRd?Z1Iz_K(dP%eP{xX zwp@3z#FE1wh#;R>?sl1bQs`!+eaau?v%y(>_#hH6M3?OEW;f3;yV2KCymow!j#d@h z^F78!Qat3<7(Y{Yq4OPVd8bI|P-RdLjK7GEC%Hq-aE8l<z8=8X$%>&1$y?b&ht2;K zQ?&OG#zDIb6IQ_syOF{5xQ8~z)!j<RKV*0glMA=Fw}x>WAJXNYlC+$pf^MLAUO0uq zBNg}=&|szaF)GgsP5xXPa^Agl$kCsH;-u|);WTo_1tD-&@!GYlW!C14gQ3>FtcV^u zN#4xqo%SmDi?oyPIU_v71JJoH=p1}u1tYr~!o7K7SA&f{b&_1kDda2uAVY@@<UegB zbBC4icN@uf!<77zCK5Kh5C4~lsE1n=eShlM-A?|9uHeeM63uxQha>zY(cg8(>i@(| z!izHhCipgaclbE|eG%~v&kUXOkRI_PDIb+g9?K1nkatk8yvX{V@*}Aq6*VB|O$$dq z_Feket#CNK{R5iBrLu@#1BOebRk&2<yh#q_#t-<!U)qO$#7bjYaS2%%k65$sO1T9@ z|CMqC;YUcxD<e`xZRFsHzU4S5jj0}$Djv}2{voVWef-HT^+i_>TG^|KpBnUlx*1d< z+&6{|*d`put8#Ftk`!%w9#^u{EvywB$*=)Rl9_%6!E(1*oT2o9P?UnA)}Qhf=eLQR z(N)C(+rq=h#F6P~x6fj1b;<fRr-c3{;1+$GD-?%9dZY;U12+<6qce-h&XJk?6NTi$ z$Pv8CO8P&N!nYTYyhqZa{Q<}*y5wSq_Iq6<PvvR&afRglykA46;7sRE=4j3Z^493s z_@#XYb90lZ8q}&7nt;UDb9$tid^5U#@NJy5P;jxC1U@=-*a~0Hg~UI#F+f|@o}*vU zs=RQ0{aifqv3L$`L}b!)wg@kYhz%vup+XG_o7qR2$@WL%_*<h$<D*+dH^?08o-Q|p zW$p@L4|IdZnE6K^A!TD;;^{eZZ_Fs(HIj@Ro7hhSrmCD9P=fM7l&_lY=kC0#Uc4vj zb0lnw4<{?ee)Jy#$LEjZBS(<;^GC#HLxdx+7_Wpq=#Nmaf2_ZrC86W`By9Kt#&#EE z#FE{2@4MpZE;b2J*1yh@(c_{M1ZBn1m;BT@4<$sLB^Bf3F+=?&X+jC^om}g_eCo9k z%WE-lt8pr1H+S66d(c0>?P&e(x8$v&*!0Iy=NMYfr^5>{*S=#u9wsr~C|V=1Gl4Y! zTVfucDk%rWcTw;CEtxYu*5v5#+ddj#F1Y_8Xp{zaH0Wl5)Q_|-$-J&q#cKKQ@*g^) z3vtBt-4ICK_~hVcVGOv~UD1#$<EMl!W6H8Pl8bG0Xis9ZCE><B-!{7^)57AJM>Zp2 z#dK2wb8Xl?$G8KR5>dghE{x;Zw_7`VdM+hn3(`gR$l?MOPW!b5`Q0s$>S=d-z0M&x zd70qj?~&NTfhZkbm=eTldZXG&Sz+AJFUCSHj<T?(;in&Hi$%op5IzZKLzLj^#Niew z$YVioHoxqRmTl!$nsu6dUYI_*j!ml+5;~^!mv4O2`WaIHLd7Co19erNUm(^CBb*yY zkNDX(cd*gn-(a3o1M;bDtT3lH1=r-C(~%GP#(pEr=>brDbNVJ5jBieFS|k={XZHdz z=ZwQiOHB8VLZl_^&5OOp?8B)k`fIGX%Jn#+<G4S71|jQTYwvwU!rqz}TXV(2xm&{6 zI6ZrggKuVCaM<3Z(k8O*t=|0a^J?qfGIR=9Gm_(0Z(149gbOKIv&kLsHoOjr-~4#M z=Qt#g=Qghnm=yt}$2)^WWwpcK3FiY=V-u^Lu=PVxK>kpabbI%GK6pDh{O<Jri?^~J zwt56tITKs-3=6jp5aGZT3E%+K0JfrxG!{w4pZ|_ki4gqi$EHfB1GmzFxfuPevqkZB ztk>*b6=`o_(_%Y;Gc<d^sapI!&ZwO}BdZ?E<R3pm>K~iN&pbl<PDl*-_7f-(g-@O& zqbCfQ7K*H2AvMRn-u>7Y%dh-`xns^4DvP77Ap1F9?%+5Ej)EhMS$}bU6m59O;{bYP z1&R^QWO3*p-zDEqnASB@IHe(u?Mk5f<0NlluIBo&4#8`H`kG#fnw~sK<w@to!%V!^ z6+S<bo_NSQ;~4pT;s%k1%$}s<5t4jwQeS@VR`T_visaAt3N8U{jBp<ds8%k)<1^>_ z#H$iQieGObbBj|`w{HPcpLRUQ3h`jPb@|Sy%6hzvu{&av_$zX-Sl1_jEro{36N-=F zvQ5`?_xXcgI$ov<Y_!!(Vkh@k)tPZx79oPS4@ePWlg<PieV^f2QH5AU=n-?qz!1XZ z6*vnAnaR_Wlcz?Z(IU18F^@thFWfHv6BF4F^p5q_T<2R~N0B~UUrbT|vA*{D+17Qi zQB$3NedU=+@|2$Zh&r<BgM{=taQN2IJFJtwb;R~MA^Ph~+$MI$dFPAik<FxX%A>um zV@7dLz!lp5<09OOgjkrO&R>o0XsrEfid7UYf;04}So{xW6L@$K8UJ{1z9pM1db|(6 z^$gkgc#>#7sefE*&Vec`Ova+>LXk6teMn;tU}~!hf}b(laQu%;5_Ao%4R<#Lbo#Rb zv)JfW2?52(w~2IGYTWM|*(|pGk6E05h|Hc=#IG7kPEPxY-#&!woZgGC8bZFBZjJ2& zrzN}+A5O!8*_OYoX9gbZy-^aPD_P59)=!u86@`<#C6?Hrw=ge=a^`!Z1T)CF*BUI~ zWL)Wx{>jWTT=x|CY$+-MxT4K>L1$@<kZ12&_-ce4Db=S}zzHL&B|K;rC~h(rbzlQj z)n0^qZ$E9I6rGr#1iMQmWlzKr(-X0=TiN-won4~-Cx@)(mzXrvX4Nya*rxxdjI&R~ zB@VDKE}smrh5r{@xIMLd8Z)&wu<jJ+5xap;$orTKw+|S!cn7meL_Iorb1u2Pk<X?H z5xb6HT@4Ibps<MEruAf_Juze=#4xw=$4BI(eJI~Hm~@>nBzgo~a8-=ED`Kv|d>d@j zUQ#;aRP+;AtiGa=z<fo&?;*L4`O0~CVQ_zjSO-2c=tV!@Bba|xBCP@DdugWc0L4?= z6u1a%>K<bn`NOd|nXNEvR46N=T?CG*oc1fFFfXsGg>8*1W~TNOmLXn&ukQw%Ko^$i zP1g>(e>Z8GnaT&r$<3L?;a^?!&jO-fu{3KpnLev`=Bj_AH|%!niYs+5J{#xi?rC^! ztktWgzv>06#g*VFUVAxE=3cDbGb>mWu?c$xt{7d8I#1!<00pZ_)046Mpwp!F$uZG= z9+ZYH9f{%iIKs);*#r5Y56N@06ZmD#<gM8;{Oo3OV0NPDCOJEMv#5|Po|6=j25W<o zI!<uhF0yk@GQTsEG|d?(`}qU7H0%wzY@Scm`1c0vbFLWS8#ugpbf?on_i0~TLo&;z z@YOYBWto&8IfxuBD^7m+M(T??j%Q=DFY~|;z>r6oiTbI&{N5F$_$dXy(nH8o&$Fb* z+`)r*=oc$m?1-iTOw~67ld)WRXE%q)j^{`V?$pZdf--?kT}@t`t5EumOx+7??#p5B zU&8eFizmARuRN;ZxLXYf0dTI!`YXKq$4>6s;<^3*e8-#miuGmk``qZn>DA~^YW0+t zE!;|^myliQ#Lou6@oJJXuOAQ2N9Lun<f(ZHeXQWb4m=!*5|9B%1tb6>0TMvK%jCm( zxsi<1<7JZV04HGC%cQGwdg#>^tl-vnXm9-rQtph6ZbKn*tY9UFpH#r86=bI~iNCsn zeCbSM$!({!592`V8<1=R)ByGa>Hv*^CcsZC$Y@t?q_2^CAOx&L*8piN$$PH+jPmz7 zcFz=`*hD6=moyljii3OeMrFV@a5UiQpD&aC^LLj&zYAO?SMhYAfycqwpSl*?4LJRH zqjqE6uxID6rQsrY4Wkf%lcFf!`z;G0++mxtip8s}2*M?^f=x___gj{i2zf(Cz765Q z{7sez`AxaND(eEA;fPI6c{&EsvV~7?OS`^|9fBV2rvH34GjDrflRs@GBNhzff7?n{ zEzn9n#KFmDtG2D=%mO#xdn=i+P#a3Ir~8j6o-JhK!Xg|G?k*e|-2;c4|9*3yR!t@> z(%_o9X3<Q3(s3esCW_y(oWwlyh%p=*gTpAGZ&VAe;Ram;#FNurR9$jua`6P;$FfzG z9UT{5+r{jZ6ZsO^@XQo6D|*%v9gow>e>eMthZH=U$X`B1o_;n(`7Bh4mh$Nn%O5mw zwWEQFOaGS!E-xdWKbu|N4_aeebCQrsXb&L`r(r@GNrQwmhI(HFpPII_WPUPj6Vg;l zg*1(}2x$gAE2L7|B&15(D5MFrPDnL$zmOVe4N~`Ui?@8fg}x)uvgsQ_noD01(owWp zNUgL|NXOB|LRv^&LOPK?DWp?qiI7gGlZ4bx#|!B!nkS@XG)G9C)F`A2s7gqmr2~<c zvrg=-fH~46M&8R#K{Qf8moYS0K$kQ0{tJv@B}4xdP<A7sR6y4<v{^vcGxTc#-N4WW z0o}~dLjt;up}Pe1P&vc52zVVs*9quxhOQLQMut`j=qZLS0xI;miRESq1^XE~RY1=& z)Fz<cGxQMwZDFWIKrb;=C7={&IZYSvtBjl^ph9daQb5}nHCRAzF!Vkw)w{Qyp??bK zJ%&;NWdTFlETHVmL;AIViW%AfROqvW<qil1%rDTLmE}xeB*Qle<QRso70?8RRtqSC zuy}$O&{T##Eud)(oh6_d44o>VQij?DRLRgs1XRONi+~y!suEC(XbSLj0ncXSBmvE3 zXe3af)1z3fvru4V=v}NP@7{3?Z4=N!hW;#|6B&9=K&LSD%cW#yMX!<DK`HVK3(yA| zuw$_>$m>3eSRZKdQ<nKCQhlJ^Pnj=J<}3B8SpvmIr!OUEDpJc$KGtx3V6LAv!$*nK z2afVn;(U}CeW2A(5&I|!`oM91%1yV>tz>;*p`U`o8S7}OK5(L+^0kkm(Fd0KDRmQk zB!fQC=_l><Q7rnv1%AqUA0=BK_^hAul8=(B53KN076_F2qx7n!0>ws4-Q@Ihc8TT9 z2VVKo8zlGnbUx(GTIcg!MSSN&WU2dfug9LlJv?c>a1a06e-95w*6*=v*O7%w`|v%F zlMPF^i)u(-We+^7lvMWNi{7hUUim^Nz9h9avg#)hfA>7O{Su!4Y6p1gJMo@G^83p> z_%~eSjTO`SMLgxdQ~$8h!d-aF!i8_JaI*mW0mlI$pc~#M{Z@`1>h6m3*q8X`puyFR z-dVwV5{#AMY4Cj8I#Rf{-W!9zNx%u7g+t({*+I#7f5U+$*h;aB?_W8f(9X_Pmr$P_ zRwsR5HK_$>Q5G6apsN@)nWOU`N<ZfL=E_DiQ(1?gNFWQ%ASg1!&o{b*k6wS7=vVdS zxvpf|s)3<bUw+U%dgEpC#;U|FHn0lc%ow_o6RSo>mV={8LrVlkA*LmMnWU~B8Py9E z)`!jyOWv&}?$tVe?5pI+>YgGcX<nTql9InyU&FoUH<BhgOQP2l;&;xPsr>O6^7Wbl z{Hrm<yQUem=2yPM@4kP13Emf9mG)q7CM+`4X;0eEUa$T_`Dd-dp9FmIss(&uYZLL- znYp%K0^73uk-<SgtE8=F@1AT_p^9bXskN!)m*KM;IE1}8dJQPL%x=UIPo)M-11hEX zWm_+v?u+RdJDX-lxP2MZ{oi2kxF*;7>e97>I5wdW^FPB-%l&LmJzxu98;Jj&+`BX4 zP<=vsJSbz^3=2J_ya0;t$~?1k$CcR|m>n^g6TbbY!^n=;a?)Oc^Yx9DJ*@p>rX%Q_ zjv^ZI0*QM)d(fnvSiK)LhWcKZGi`JfBfx%~DK=^WUf&*Qt2R__fT>XFPE!4PQhJz= z#p?unr+={D!j<ix?3Z_tGhjafvT&0F`#~QrNZ<Ki>~HQMN$XO1WdzAtr{+U9lf~-> zbxV(6*Yb{Mv4Rrv@w!L(Ei*{h_0w?DnY&)ipLUb?)~k?dL1w+3#JusN1Z%)nfT0(` z$jQX_auYOIM4q6kgwFWtvNf6}J^w&QwBPfj?mZR=ZFz5GxsS~)>5O1QYZQGAUc+xT z8-e1g*yzO($^WgIf1$<DN|Z4pcXY4|C*gmnK8{ZH^9<|Yp;sypX#cmTvHo_TM^eo+ zOx}GzDrg(Cs>#ohPu@@GuT_%k??)q&D&C&eV?B(WeWsiY5yuqgJ&VPT5V~tAv2E|8 z^nXqfV{c@&UxQ?K-X$Rp_r0SFbu&MX>!O%h<5cW2pvfx92iu4Dq^wABerVOCd+sG- z&J}Op5QaCoVJr~4nRMTg+KV-Na~Qtgh)48+wc|cvX?1%RpY^y}(18!!q;Q9t_kKcF z?udpOx9%tjnSed|7}~i0Jc-_Ubc_MY-^8v}2-slRw6QL#mvk(f?)1CG5A@7BC^)Ex zQQHW5u%nnx0@{i<gvqgF$OnDWEs%_Hb@4ey7KaaTmt5l5msZYLIt9%5GpZ^v9lDqW zo%n-`$p;@KD~_zgnDM~v$-!IFk}inY#?bc#8ZH2F@F)1v0^c7aB#nSH?lX{1_!>FC zVn0#WOpCm66k_q=1Y$FYGKn3*^ph!MM~$i9<!9J11o!t6P&SqdxPW{-XF<LC3(gXU zF`v#P5xa5{Hy`kNeV-W~M$`gNS_L#yK%ArL<Y$O$S8~tVdUSIWi!b5Bv~yKbt%^kq zUELi4bZtHPaMxhbI&x!ITG~{EHHtbNvr;=Mu%L|ozVk8swG{h<sT_Y<0qsABWPjMd zza0|@jfJ`MZg^?oBG%z_^s#`7Cqs(X)$Z)d>fI?O|N2nD7uS=|J{-cw)swK=fx&+) zz%d4eBS}te5{$B>)=@qb;SYS}<&V_jVM|Q6PXY<+C$FGY3I&OVL6iTu6HaGl%)YmH z@bR?8Mi;<f*?U7z_x+anpWzcQmZw|M-h*$YS!9y_3xtOsgbzb5x(HvR{79GlM?vNS zGJSU+e#8Re+1-<0_c?iQcW=?p<cr;DiTf^N8RyX6N7-KhjK9hPOhpbppMBO$*Ig!& zd*sOz(LWgf0wh&A_n@mDbm7wvIzneZO-lDDcsC{M_GBjcd7a=@oWJq{6iI`@>(&Ii zy3nih2|oAdH~d1v_9lp?kc_>tk=65=q)^8A3Q%D%q-bvvulj{7+Iul{p^KFZCY0c+ zE!{Up6c_J&aI`rx4>eA-LY*<}zNaBy?e8D8AEy`ugJ=^9{ojCpGLM8ENR+$-iX(;n z#lnbrM0+5Q?U@I9Vn=-EK(gpJa^iqSR6@K5dS~PctH}9dDLk70I_!T3{rBrkT@T(t zW9bWX$>@Xe{EWF|=E3Z=Ee<s8f82asz&jo{XR+@op*uWo#<Y@e4-VqH|4PIkjpSQ? zAmctt=hs~!6(8xdk75t-e+A3inDJsHcy3O@bRgcnx<nWn#IR4nB*I4&)KbPG!Gag| zzjNLG=;_`Wzp@wt3q@_J!6G@fk^QpS#izmdae8Kd-bM}$?X<ngQd^I|qb=X|7su@d zGy={5+5q<f;Ws%h6`%vy0CNFL0j~pU0H**y0&W62-{QDvKpH>;umT<j{C=0?E&!SU zdjW?)YXY<Yt^(R`aphbH?nDUyIUpNQ2yg;c1GWGT1I_^M1G?VE-y;L$fFXd<fQbMH zzzJ9mSP%FR@G0O7;1Zw>a0k#C!IKz38bApcb_bWpaX?A|6@b?PHGmU<7C;-IGkTs3 zNCT(=LjYp|(*aJvO2B3S$8L1^w+16y0@wif?|(%K3)egx5vhC&_r)Our6QiObnsMs zij%R@vZ_~9^n*XX=24gj*nbUhvG%<qy5RDa=PaBUG!0UAvTFGb)sU`58jjS0v>T(J z#X}aOBkj)UNMn#TeQV)jkyap$LmG}W9_fZN2)QA(BE{eMaN$T1{O5B2ZQ+uUww$(b zy*PNuwy!PR0uW9iU5K;>=^~_SkTM-CN9sj7tG4>&k*-7XO6JX*`b5c)ISa<kc(P=U z%UJ-$b5l7dU=}3jaC7nB96&NRgPV=tQ7CZ%n}+jT3Fx&W&t{6oLn1dG)T!L#s8<3$ zCu${wYc6t39;1~BvZeu_hyRixXPzMS3E=&>KBzelsSY^-CvSZBC{dp4MnvZ>_cWmN zKU<#6O-JpyoQa!Q`}4U0e9!PUeAtMNjLpx^PdfeA_9fR1Ih8YS7yYuXs9r<{HV+M{ z&_k8@q;REYRj+8WvN?wDpFrMgPV5wxWFemBo}JeA^QAwe`_kVA`qC+bgp_|<N*ovB zItR)zO>*+mg~U!&<16{bOg=@)APc;#g~{X-hLDhpiD6$4wQzW@WlHCM7-}J57h}WL z<UFKp$gz+W&9QuTE)x*cIs!!_d@?H^B?m7ib}GpCWzXAKi+sQ|A=&W+D-2uVvM|ZQ zzX>jgx)956StM|TxETlk-Ah6ezFp{VORXSm(6ECRq0WP@HTt{wAm0Tm;K9G)9|>~% zZ~xfB<+6y0|6dtfMNP2czaCIvaG+Vqsvmm%|L(me{=f8I<mQh};l-k1bLKud)#+cv z1?2co_Xc@5>r|(GY{`>z=9dhcF{>oMtYlgNmm|t|&c**Du)cKy$Iv-XK0af1N$tr? zS%I}pe1ho1U(*8nIagY^{|g}X{N&o5*FFsxy!|2~^MK8O4S@B4wSYB%<$$GtX8~n^ z>41p<D_|5L7m!Wfz0t2A2ABj80JwJn@du<gkX{8`0-Oaj0geOq19k&y0Gk0T0ZRe+ zy8=$+x5H<VcV4iNL4QT{Gyu;4Bml&K8_kdjXaY2nsecU`ItAD$fCZogYy+eL5&#lF zJ3i~AfYX3Fz<xjtU_HPInDRZA$M2Jf=4M$B=?^IX*}`cq!w=&dxt1=2Ipt3lD6F1T z+_H3K<?^54gYmB-1mqzkynSWhbUY1=t9_?^a(;O@1`#OjB8`wHN&89%NG(#Uv{*V_ zI#aqpS}t8GeNFnV^pNzfG)gv3=9IlC`#^R;_LXd{yj>own4nm%*seIB_(^e1;Z>-V zBb7Gg9Objh)yfZ)CzQ=fP8F!?r!uIDRP$ApDx&&8bwG7Sby;;^6`<~_j#KwnXO^oi z>e1?a^;Y#k^%Zq~R&iE&)*D%MS+Saan$ena%`(jj&1TIJ%{Q75ZKl?wb!+!(?`gwy zDLRF2oNj^cO<jh5q~5Myp#Mw1)v&{`*YL67qG6nIqH&IKk+Iyk%DB$B!Fb4c!uY-M zvQcD`m?BN7rgW3u<TNcYZ8nv^Z~DY^%5=;0rg^h@ubE5cI0I%?D%D8yq=nLjvK09k zd4arCUa9Dj^>fzWSuX7rZIy0}VUb~p@xHOMsfVdIguP;7l{k(CY7Nre((j~H+9v&5 z8Z3*F#mffDR5A;6w_Nrw*=1Rge6qYmK2!db{Au};a(ShExtz#fmv5ABlh?@i%j@K) z<o}kRlefr!mH#QfFAq_KDUuYaigblkF-(!C7^iql;Z(R4D+T4(Dvm0?V1MkP=%!3k zDwQVXbmgncEtr6Bl)S2&>I*fOm6T=5+L(1b>zAzSS^YFwnyH$((CiDEe`&U9%6Dk~ z)`V%VX>V&g>#}u^=n8ekx^=pJx^ueUbiMQ@{c8OzgVV6wu+?zi(8nk<8cZ`xWu`@@ z<)%%hU8a4ebEY3mcT8Q)k>;Lesd=1vjhUJ`3(jKDgcw#eLi(t5ob++&6Vh4Ir=-tG zYovRnhoynC5Lvctgv?ql8!wwEn<|?lYm$8@yC{1?J|7xcEnh3&CqFFzRNf^2UjC!J zReoK5N8VW>QA8@@6nzyL3aw(OVuT`J;ZnS$s8O^jZYq+MeU%x?A<D;;70Nrxo-n-0 zswY%6s=caHs;^YvsV=I1QC(I2sk*Hasm1D0b(A_$ovtn)q*khRYLhx!JwpAcdYpQi zdZD^p{i1rgdX4%u^+xp;^$ztRb-lVl-K1_)-&A+al4WVKELk;Kd$JB^eV)~p^;g!N ztZteTjYH$ntkmq$e59$@*tJh;%e8N4cWL)&`|Aw4(YjZ3JD}CCq1P7O@46KIAbqyp zrC(jHzo3^GREEKZ7Y)^hR}C4)8O91@n@MEWn{&-$%~Q-X%=64!%^#W%ng4CRX#T?t zL$h)?+u)oH6MbI#3Y3{5n<+adyDgh7UnH-Vza>8?KMLLb0G&m{cKRqX6$-@=#SF!2 z#U{l*MJMH0<?G7dmAjym@&ff!>gUxTsp~brYF^N`=q~I2LWkZqd}AoYtj3s6na`WK zDIEKs8!MeGRmx1V3$g_HXiQX-LaUstEK#mdo>rb$wkfkzgH^LsOI2^HPN;6G#A>y= zNIhA-0UG>VeOVowH8|@rO^$Yw_BriKhA)i4=27Ma<>uGfh%g*>jC9L>SNwswyra0U z;FX<~A<BB?CFM0`glej)QN1y%7T@tt*38!I(j3y9(0r@;QFC9T)7rJ~Xg|<4Xxp?Y zx{<m*`T_c-hE;~w4b6tDhE!vw;0cx*Ux6Q(Vt&fJ&|Gd_YJSOl(|pg&Rg~i}i-u$1 z_m)f7O1DeDlzuO)-F9h!f89=z&5*fd+hu!X^|Cv%PVzAMDEWB#RH%BLe6##B`B`~0 z40Inx|F&w6>NL#kwyKNz5%o;9Mf<pRo%X2qN9|Q@CtVs=SG(?6{SW%z^d*LJ!!pA` zLrA&NU>s+hV4P;O8)q5IjISAw7@LikjMRA5*k-(8Y&YIBa;A|ct7)96&@|C>)bzQD zny#5{nA%PEOg+uRF`f#HXPJ4qc_kL#TJw7IFXpS}HuDX0JJak+obHj>rL(1xvMkvU zS+1;5Hc2*HHedFv>;+jl^GWMvTj19Z%8tkyWnashWifI&#@M9zR?(~w!K?qItW^zE zTj8Kj>wdu)Li7pxA^LIpQn=lh^y~FI^>zBM^_TQF^<qPuVGKGKVb-7{yUpj!+y-1X z(1AbXR}_m>AFHmYLe#%y73n7Frs_&`Gs<;O>YmcMbk}tEbWwVRevbZCeY&9v>tGaI zO^NBSiL1eN0s=Cmon>caLGqsRV)<fuoMIHb;2#Q`a-otacPq~*lT>z9gt{k|>Y%Lg z8oOqp=1t9anyZ=sZL0QZ?OWRY+S6K*E>ow~J%RR?=nm_ebP|2J3_h+@zgE8i3-N2i zw}u}KU5o>aQ;jo?ca13~o2k^4X3jM0%tOsK^DE|!<{B(S^rjA%AtX=0mwhAUW&LD- z$?l?aCb>;+Q4CicQ+%rEgJrfsIYU*E^(MT;-&qM7rB<&k)6LgCqkB%bT6abV0kKU! zL9=9K*j^W4PL^StxFn;pS7+yo(n{Ary<ltMgg<`Wj^?>Gob*7-mD;7tq}!y8FjuiG zRhEXCwab>G{x(eGX<3`>9wt&L*T@Uy3*b-p!%*Ah0g7aWMKMKDq1b>wU29QXRfv@w Mx34{4bSdcn0N`!UFaQ7m delta 18962 zcmd_SdsviJ7dL#*4akVYK?US2ASepzoM)Ib3ZBtX3?vmTPt&A@I%8TQ=!i)ZKIpd6 zvhvttX5~5a6pD$am6n<%8W~S*0u#-Yl*jpg`<?-Ndf)5%{`vm<yjru@UVH7+T5GSp z_u4m=RR@$E45%`QT2~vhr}PM&p<3{ix*gur{-stTKcMLgb-0jv)H)$Os~(Kh5OPN? zMXFjbRsD=W|Juj@(3iFe>9}X6*;t#alcH9)h?5ULd+|xm_phGgMO{Qe9CrwWcf7=y z0VGB=fWQAK86oN&GW!pXGq3`Sh}&viozz{jQN&3)b6nL0j*DBJ6dQT|cNBW7dVkMz z+<4LIq?m3495WU=K>Oh!e!>8U$OBPANJctPFmV}xW286VNAw#R&G+g10QtZDv!LAp z^JdI`ej3NE3PAyS7MdBg_Z|L8(Hj2%L&k3w&J!{0yoGbOY;vS#apl#{=<<6GwM~nX zd{0C+ine$Q95J?oT{zCp*}^yxXB$Pb{9{Q-z(DbmY>uleZ+l3Z{iAtNXEHS)mhTlt zN&*J@71*YeT>%k&Y?!d(jJ`H>?Qz>|v^<u|JRXmoFYq|q=&~q|W2w6bX%2`j&C3V1 zl@ap11&94H?mWB5;}p|5;fz=8ZlMY^;5wP8uXV(`TkHXal_eMd*yb(~r`P)Bw61rz zR6Nn#xD=B<hB^D^(+?qlik!Rd92&rJEe{JTa$5DZC4Lnpt?j-2r-v|%(tOs_UFt9N zbQe8+@yO$Gw<xOh_d{Q+wynhQE1b<%`XnRgG^gj>wFjWfLR4Ds9R%x}ehOJ(IIfc! z>CT39XMOq~i+II7JA{gvz3A-=lPaKZbq6DB{#0!B{VTT7JJEra{`bFZdMTCJCQs4C zrjVKLuuo5;V5YvSBXF3$cK+{8eib<_tgTm~Q9q}paLSpQC)~}hW=Xz%07Rs?SK(}> z$z*k4TE;oltjxJ<t;}h&(m#a0@45vMzX-E(oebAMat^P|X|;Y<_Z#_7VBbgu#^gFF z(Vuc&Z1Pjw8^Ze897IyYy@RcSv6SC)N<0x3QXtkOL&%%_xhzg`vSNA473ADGVklAJ zDJZDOPKonGXvr>dWP%hG%c~s{*W#40VrPJ&+S0@;s%;Izbf=L9aneNpCN_0_8%!g+ zD8Dj0MOk2_Bv%+>g(D@+tumky`#sNPD+;I6yHG)`e@co_%^%eY3+;b;3vKi}Z=ppf z{FO}Y9BH2<;9oox7)3TmcMDgWoiYjLj8!lSO{3K))^vuopr8QCDeyQj22Vsd%7BL3 z778t_L<??kK}4WNZ7Ige$@4^{c)2j+@gy`TCL;}b+eV>Ame41d!#oij$mgJ{u@I8m zIO>9hJAAT<JQ0-2Zwe!egVJNpqe>plfLl?VDX+$w$+NY9G?wls$Acnba-g%qLWr_m zhIuw+GZWkj(h?LA8|f={{yNiT=G^xk(V?GovWsC-p(L?OkJ5%vUv;sfdS*o@*D1b$ z7D0A-FuvJnC)Q)<CM^B6I6jMmy<2I>17?X~GhO;*XKz$-nUeED*H+s$3VpWx<?(pi zIVnL?t{UD|3$=Q=uw1CX^#!;v3dn0_0tVij4QF!uIHH95O(BdpjNIyy%BT2|NJ(1K zF+cA}os!z}dW^6hrqYyBS<E+eWxU^qkSUUgQdZ)15yxz?SzNvyy3BxxXS$3mpX?A< z?5E&q3a%|@sU1WY<=wBbjN;793dx|#@)qFMQvuZ>S&-uljMm+%s4n+F0I)CZO(xxa ztuUL)mzgtC+$(f{t#6$l@k-Y+(jtj5SOty}#!;A8S$-Fb*LtcGmwrFLz%I5m{+HR8 z5@TZPTya(Z2XiEgg*wAq$&la_#hKuCr9g-?GZUP@3Nq|v+YT;B+e$@3<92?ES5WJM z$=kuvrB~XEzq-%*_LkY&8w>Kg52?@8*Enfqc{7`0e#P4?rO)E0qPzjWit5a_S!$E~ zj-HspU1*9?8KX_xPMrg)Td3hW8C5xqhez_OEuE?V)_E`e_=Q$?i(BZSy`-a{qP!06 zxCPPwABFDIZb7q^R<sx8S39%(Dw;cSq1ErSUnQ$oQ0b6~^XdEk@G9*Zu{kg%E4_Bl z;~DMQ&(!Y`6ztj0M(o)y%w{`y^79Mm<<87|NDJt1Q0h6F0?nM$2Qzo&<o~E?oDMH< zm)NRqSvZ;!Pf9~04K;BbSIb4hkI|Vx71d2IDXQI0iQxXR-CT!eGEI;a@Neh}c;AW~ zvA)(BOe^BZh0yM4j*bTGk1BIotTxsgeIr3|sZE=#9%op79$gVhBw<l~Pk_0G?QE?P zuz(u32aX}%?G(eocBeg1XpbzakkZ4kBsVOD|23A(4^#Fy0-xcDm<of!Vv4ZsMAnME znj8!3t6z}F+Odo2+MeD9^Qo>Eq}P&9AHr4VQwK(zPwNHztngcnDv)s^N$zU4l=Ve5 zG#i9Lc_Ql6SzICgG>zkI4)kK1gde&IQf3awIX=I@R*(&f3JeFwqwLrOvZ-r7KC&<Q zvTIDpE+*FfF_?sy+Ix~eyQ)OfNqV=8h-*n4ml^1|sed@n<k5I8yRXnMlF00CTJb_O zvb3aOIoa7QTD(%o)W1r;?566uEFJ@6k~+$^#*#43Ax5*a8MTJiMw6uOc|ngbc*Zg` zjV$TjJH{qR$QhMSci<@DZo!TP3byVja;$qo!cB0yTi}!+?5NC~Hb-~*4EWgO>C`Cl zpnD>ZQN@M#PX15<Eq8Jc@z)0|?3~3pBHcs8?%?7e9GHd`qd!RonI1kQ_#<qsKGPZ7 zgM1P`ubUYaLqBO@W)S*Gpp_0(5Pd{p@JX1Lk8xHc*%VRHc~~DvDS@s@C5p(D+}X@m zw3d6Ez3q~s%z%^_^ckE--(i!5+~02sq}O3DFpTen#9ce3s3|;;mM{id16nO7fdwn7 zCr+**A4k@T;>im=dc~Y!<}Mf%J4iKVu#L}&^>s!;_VpO29D>ohPh)*o<hJV1I(yJj z;Hu^Nv9*u|)Xg=11UxfWJin{pv}t-c(M6@ojjZBHeT^fomOFQj<C?l!=~k$z5Twqv z+_h`4{c5p0x7B?GD?FH9>`qokMfcn+XKNw~0`Hn9jAMX6khoi#By>Y}awaMvas`(8 z8SlDqHHTsCROYtOuH?T_<M?ltWNfrF_(r!5VU~9ztD<|0PLuu7#rzEe=@av$c&Y(1 zvC?UzA|_V!9N89Q33>x0sGTn8N^ZrZ1)gVSTGf>##HOU`d_~b+I~qNL4OLN1Ujqdz zHHuCMBC}#u{MsP$PV5v>F?kf55}XAdpF$f#$-ua{;Hbxx!=YqC+()r&!}IZQKwYOj zjWeJCIH}_#DLzuPh-l*%@cTo^uK4kyog^SZDSCnE62?ed*!U7c*eEh{#PB>Yfe3ms zxTCpQK+#-N8ngXQf#lPKUbzwA7aYsTK(<0U9E*F1WV_25Mp+&2XjsizuKHV8Q=*C? z5@$$0y#(EfawM)9VO(K8oopZjlcZTIHG&|mdJN<Y#OG?o{&>7H%TqMD20k>b1*05@ zR{v*|mtcmzNiQTDlj234$g!k;L80IiMwdhGBxQ^*uyEX#s8G1cxn{Ni7j>pTnHb_6 z=~lI3Q^uVn%T*%g9C|jz9Vjbww>G6&>AMWo*Di>0cbe%kEHpa1;yjPT!IwrdFS(ag z$_5pMG1vp?m+WA~j>T>8=OtqOBj&?TIm7A4>{=tNiahdpvQ)#P&B|htozJJ=-eHFy zNkyQ-0~g<e7J}8gwCHGdz=!W|Xo}6tx6;r1l8j!dQSX6TgTrG%0euDsICLwAt_y3( zxnBJv?`A^X?pFFQOgEnnR&nq}pP5N$N{_h9x6u*yn?l!23CEVRAX6(F{zWn<MJuW& zFQsG+>KE&EaP#5BZ63BRpZlFTV{lKf6*F!PdacY^hy7sjc?(V|nX1$9;m$7kxYyu9 zw~#za$t}GIpA6xO?Qt}ZIdhtW)h!Uh=pcd62oD8G(chGUKE(D|TGRqgPlN-_ZaIlI z>65tlZ3$;udu_0fOH8$li$Y~gjbeLe+J_PBBKqhiQvE3hykKd#z~qJ)?&X{mIXZ=Y zl6$(z&mL4b)|zi)*J0Ywgab-WeWtU)*~v=ZyoEE%ar`t5z?I7_c6YAGt+S4$9;SU5 z_YCUJsRO&Kq=94nWsH9#erj{ta1Fpct)9kphGwV<V{RnFdmqn!mCd6VqsQ0+K`mql z$CV)3E?!{U>vEtjBV*toC=vY`b1bxV_UEcS5m6|Egv-e2J_GpQ2a_dzdhnM9lQ;S} zVrOHOlo`dWnj?T3(XdCgwKrsR<tFK!)=#vQj7{qvQ-ubgVqsO{y5wpNFNt(jhgs=( zvLr29R7KXM4G57#mC&rKIuU)XB^T3@Lj3-RwAZiiSy5mehD)_0$fbViLmGhR)4FEC zYN60Bu2Vrv&vKkU*OY~mXp2yM^zX%CteEGTlI?*u|8H3BcbdtN^r3vwaPqHoRYX+_ zS|5dzxZ<k9Q&G|YNjwN7-=|wd_ee~Jg+FT|lQMesTZajiS8rl7b$0hCchT96P`8k; zh}+c^>-OK}oktaUKSL*Zfrm2OLTf?fR)#9(FC*sa+Qv?EoPAWKaL}}4dAy%NO#OFf zGG*DZC3-bfS$+k45DL=~W*LT^QsR2(aRla3f7XyYw{42z%ZYT?&m?U?ukbRk6?uj2 zQ4Q;s1?^5JQwGe72!TzYey4-2epws@Ev}&t#pIU(3O}1wLwXHd(=F`=tY@?Y`@P@S z3W=6R-5|9C=XTrnlb4(?kahI6pU4R5@@}F3AbaXby)=g}&Ln+g6GCsm7(Ru^=iGzE z^lmm;B^$se)sthg>B(jkv8#dNjN%(aHh#e=5Vu1+H_jHRV>iEJt4Owdm}niTl&2(Y z#dPwIasF9cf!kE>addYr@fSIQ3q0QzQ1#E`Yk5wWwO|oE$gURBQ_(l=;g8TpA<mVR z%0F4f%mjzOyS8bLJ&K-TZr$Cgf9MRQ(@C)+rH@ya&K?DH24-JK2jcSTb$S((8=AUQ zn3@6$=!hHSup&F+7tFZOqxMAjEE-03zJ-J<`*q71#9`M$)f(H6dbXf85UWxqI!Tr( zl|j=nz8ZU?t)+!jE9Xfz;c)KTC9;W0CBreXSQW$fsVA?hvcnEi3y1aY4n6|JFZdv- zR~-b$dUZnn2G))zVkeRl{+!#B?@^@$F#YZV#odB~U93lSy1SrpfzrXSk7Z94)oe-x zKejSQq}8K3@8u-Hn&<%s!QcuOT5DW@X<%kyx3Lzk<051W$4c>@z#-GxA(PU`c#T?g zgt#>`L>6*elQ?3k0(x%2G5MZDhfY~99@V*yk$BGI)Dwkp5N(BnDuiD>$@&}Z@2Tk2 z)H$E-y#}h{9%2vGWTG}YcrMGc@nRTSvQ!&aI#Zww$v``BsS~_8r8hUin=^QG25-*d z%?<G8vc0)PZ*G`3*Ug)oWK4skS+gQWum{tx5CkZ6&1mD?OKh;X7>q08GzN^t6S~x6 z#pE2YiHmhamYL5lt|JwheG303BX*SSOqKs-7t1L%`EFG<>e{RjVu9zfEt{oVPBPhC zwrPcopt8$0sgMz*cG+cQ9GqLVk95{0^=1m*!q_%3Mb!qd!dFc&6hX*ayqereth!#o z53XPfP%#F#Y6B_L8Tv5uv*lW#LxJr1&U1cCyB%cP!r`3Y=BlLRXI(lBHdLPsbCc_3 z{2>#Wp-<MTrC2iTARbvcI80wN&xAW&8%^|49RrVhcx$EdAWWVO@*?(1oJriq4dgR@ zZ^^3zS!LYE*owHVPn1k$R@Lbb>{yoXNW3A9@A(}WYDfqyyM_By*)1~L&@*r)!&fpq zf$TDL5p5tx3_AnMP?I^T=Z%NL>wkd8f-J7q$a%jjs<E~Bf8IchCPVi)oTv(2t)kH7 zf$%+cKib7iUNt3!$JL>7I2`+DS0I@Jx&a49a@^FjGy{V^{17t01bf<=uOb+gPp6}l z-INf(Q&d~oKiUWTYd+0O#(s_V4Ga3^(;>)I`sb`=qCTDemEG^cU$zD`kp6w1l~%zs zBi4vkkJ{=$JjOc5>DN!7qiN?h&wl?$O0-%)Z+Xw*Ag;EBB_iHjg&GBPHV&T{MpIE9 z``5S#inZVML>LG$>s70Cn5nh0T#OZEg?P_lB5WrpVq3wT3+x_&>2DHtnpTk`0-3c& z;%AYI&J(pIT{Mq8jo;a1m1PNEA4Nj4=7}znrCFo-_$YEAE1hpPkw;kx{JTbyFsK** zqLB<9w6)8<uY`6bwvD&Rok3~**>6bP;Earp#qr3yIB>uI21{rYyFOM7wYgOS;%vY$ zj}XV;{&96Eu88-?P0#h{ku#w-fT3)&z$Kg<9h@w>Lh1)^&Zx(n5EERUkD|&PYQ;jl z=p$)BvWXada2yV#mI(4ub`JknIQcXCVV9d{g<*!<s&A8rIU4>!3Q-QxCHjWh)jh;! z^bXWYmwt_bRutLt+{Pnh)sQ!$d`#Vi!FRBo{)!lfDg)PG{)KcBSvb@T|95ccoBoWQ zd^U6u`FhykA-B&#MF)<slWG%m2g6^n4;kDGdt^pAyIbk`#|%#i44wnK827=)^bgOG z3B!99yb6kY1#XwzXJz<FueH)e0_nUj_pDH*8aX-@D1w&m6&H~+F2qIM5u3S=_00O5 zg)Pr>fE7{AIr7!;UTNLHUzj<i&C$(0)E|TEg2BQ4J%rQ>UA1AJT(Pq@pEjQ*(IY1C z^|#6V5&ij`vt;v#8T^8?r1MB6|NA$@JTjG^z>}FHEsFj>cWh|qv4^s|UKP4?mxLj> zBhf!}MC<>+9j?=3w)e^RBggYycrtL*0LcQ>@$HAv<e5>jahV;2#+TW!`y%gg#^E~Z zafa(3F6igh4$jNB{fV0eJqq*3t=iT4$T^g9taoj)vyk4yPF<mGvbv)R7$a#O6+7S@ zXsk4qo?xX>O>sE1$0BmcDu>YzL9FGlra5W_O3ozq?Ejayb>G>Qg{lg$#Ra-cB=GSO zbSn6~cPW2Xqq3JgIFXT(3oFF#+f7Hv<lMfZ2J&)l@6xfAqf*4_r(HjV`W=rw)8%+! zqizVtbx;G_sVSfe2j#I$cKNh3u2>a0IFd;Uw=ct;?4^1(N}S}deM%CUs0U&ZY_~Xq z=~SVJO`z%LJjK=RB1dFJk^lCv(B&_4WZ>vNX&bRM;9RP2aY*P|0k`N|oWVFwQu7#? zEBsGjK5ZIJmW&?2|C&oa8a<rfpF{o{oy;%HCGlg@NJ~PDI{e2Db$xOj4Ppq);vZ`} z4%&{QH&N!(Y86mo={Y5-0efO0+^5Hp?8$9d7>Jw7%BZHUl{xj+JX(uFQr)wEsO)jg zi`bD#Th9UG?gH_)*63y$YC!(~ViN8hFp1TVFe<M}45x>|$do>r?gj>v7%!NFdk0yZ z7@t?}EA2yHVx>_{2-q)*XL0fnuTe|~*=H1ijQ6x>N)e(=9RUWMxgw}pU=o6;H|og5 zF<Sojk>s^8znPNiP$d!j9a5jtAgvNS%0WN+KNgGis<PB9ZeCA%|0?UAUPIz#q-9sh zQET7Mhr9(FuaaBVr%J8foLXQLI{f|z+!NLA6BlSjZkYb~JUo=KxDlP*11h;vSTVT; zF{lLUFVsM2fTI!t6$53ed(NM2|90FrSgc{S1+_vevP06~Pb$WTWVl29zPgv?Kg?a@ zc=46t<m<5&B1#I!?e9VbKkN)*?_$w;X1>uxp3HlNfBy>kC2!QwP+V1+N6KkPpWJeR z;=~|9%&P^@sgIur@IVQJQ?`dGN5dcsUAfJ;zP9LdB<wtvLl%uc{69UJpBT@7WFW6S zF+7?DH|+sMh-vhoTP}Bq?SsqY{u8OAxjQg~{UEz1i(L;NIb-QOWSA}rI(XUi4?qm% zR~L%)@q!{^=tv)%w1bU)dx@ByOChWBL!$hBrHz+ZhfD!YpXAY7`K0RKQRG~HaPU5% zfGtmY<P!PU#OQu;s8WMb<WWrl)>y|a7;}%Hb359h;g?9?f|SV?P`oas;UY$Vs%e-I z{f*3YO6GONb1IZ8rvCBXgF!9i>Qx+D^(;!oBE^TnJ3oZpL;ZG7w8tJf0XJ~6t01xS zLRcJb>x~1*_XSgfS=_{Ku_qSg)0<&r$b_DvGBR_*bAmWZp8aeqm{~E+4P*NmPDEo} z0cGU&gdts+z!q<A>g0|uBZCV2iCV~<LKQA;TMF~K8_=q|)#dRx2H^m0!oJ-?IzQPT zrJ5&`16fT^L^GNCWX#}`<IpZV0!%gphk}k((v*wPRv4R{1Xtg??d`EJJ<~jUO=ZV3 zD}8i<9DTCi=tQRQ794vz6#gMHOyO&gb~M|#_MZx0;bVKYgN=q<fWmw8qsB3{#@rL! ziG;<zdeE+qxuP#H{e~?9#jD@7Y$`P*#3mK@eF$T=(vxI}?I~$Bt`pe3$D{QAmBQF+ zHg*>A3b$F<pl6@QMS%qyDoW~b5Rbk@zOxPBOV5zNXQuHh&yd;ABm~{WDWwL5u5ZcO zXVM4uL)Pb8zr=~;AbeqI<1d&Q+cV&jVyO5r#|K}L-=FCb-vCBmotJ?&{tN8bPFa9? z{sEF~pBD0B9<w0?sGV_i!dHaYhXsc`uC)70LhT8WKY!WL%nG0|&?(pz$?k|J$lPZK ziIimDvpT+S9%*^DH}AKPM9nVazx$26IXjM@wwD~7ox&UUlIyc0C1Y{eILEHA5pCz! z%+aN`G7Z;GnOIbVPyl_UyVt;f>2S0K`BXECOqtVHB^kv$76OZYKnf8Kb;RY<`y+wF zP{yMfOn|v1)?h1S&5rFO$<aB9PsxP?4g$bXfDm&Gv)PyijG%Xz&4VwzX7eM`$7Zwq zpJwx|kL_Rw8=d$CGn=IKY$(F*G4Nit6tN0P9=!w`!dDm#O`&w=7sS0gPI4Lbyk>KI zB>C{UINrLOoPBOgQXmwE8x8LFwhgm!WfOvQifZ4ELu<a-tAE}q3hRMmhFiq~lTdfS zv7Y3u;$D2lSW;b_${(#KzZUnz(LQjlRHP@`xrw4MQaCr3-}Vt%FgGQp^drnLwS`43 z;R2f)+OIg|6xlbokiRyDgwFe!ZyinU&P(DijV3+jTch>ZlWpuZ_*fcF>iKyq<1_HG zjOTq)Hh&%8cAmsIdW-x?mctUg?gKVgXB)l|5R{tJ*3?<R$(xQreZwz6EeJ~EqnC&< z;EEP+Fo2~JA<v$VJP~3N;MDh9%zTw^?gff_%=eA>5HoKr#QCdFW`GofSkMzj6h-V# z1%LD+dDj^!Vwo6n-q|BQ7A68_#+{LgP350eVo1V*82*J(M7JQ;SFLzKQ1_M0D)yd$ zCaxj~gmpKqmQ*fC=imH<99obNx)r7+1p8R%Px(IibwNVVJaDrWDn3O*Ul`0AMw9U` z42qn2oNeo_5Dy&=%<I>0R+F7CoQqsu-Ci^cn761)HCeTAf$}q^nX`zE;4M3e{`Q$* zkQE8^OJKDe9_=`K0w^A+reftIqUtur5$&RqL}v4ds-morrU@MQ@~3eows?=Tp84QQ zi&El+g@Fgs5XL6Zg++7E`60b@j6}Ye!snTY;l-k`KFv(h*i{fMi>Bj`ksUAg;&1*$ zzJ4((Ts=%!ptwAo5Y7&2db(-v!xuY?`me`Ugss2J3CB6S3wYFYB?!GvfTXVpUA_tn z!R}X?(_lr3yPn7vkBy9NuMxU@1m@$d)rqWI++UY+#3vej-qSb*=4bRg!mTE@z|N!o zbwpj+@X+pw|9U*9avt$bCZW%hjl{D!LDWnVO13K5^w>s%hskqxKb%FH-8pwt(Nifq zmv|!LV0Msn$4MmqFb<uoN}@^Vk^ugJjzli$k6;Mg6+HHKW*Ik-5j-|~Dv$C#mE#3_ zDd=8BrXC`WB~$q=hsfn6QvP*45tkN4dc&MgFnQUuC0N_A<_#msiqhWv*bU^PQUz}* zAwQQcW67+ttbxhLJRa<EVtXV_0a|!3AQ5YpcXYLjY<Tpv;6^|3fFJ>2ZSRop$`nfP z=H9&^-}P&#>z7ddqoOI!fW~bIxZbZt0D^OdA8+JcH~d_GiRbnGW1A=CHLi@9mPRIo zmtsIEm0@KTt_tZ|WUEr}V+A-$$-Jd~cyPY8G=(L{m&T=X;H(_Z-A8!?pbl^ma1L-1 za0IZgl<>=jg)>gKhqW99NC9ZeNYS$C!5fycg8Sd2zfnuc*UO?KH=+<ZR#3GI_(ecl z8F{p<C%<7S>FG*i$#9o6m2seVE0PI-sQ??G7_bmf1|UnxJFa2j-cELcun%w&aAhfZ z;L6K*Z(qkwHXJBi;bitm2TM_L@Nu4qMA!wkX`Zfni!55cuXHo)!Zo8A?<88d9o)*O zv&5Fsh!IC@XPXUPdqbCp!ARXkA>x;aqCC%cEP%m2Rzx_CEl))K!dSr;)`;gjmTyD+ zF>i~ZBVRA%E$#U&!+@1n`#ZvHr^&AJD1_(Ely6Ua^|g+-;{W4fI&)9EB?j#w>sJin z!}pM%R%A-P-`!3x*-d)C?BYl4CLg?<85|0_?{KhZ7rFg%AvW@nD@R1Wf!*kzPu-8! zlMhyEajk7w`7D3!D>Aksg3nq{Y!xGo{*V|vP66%zrr=xd(i9+Wd@oozqX8Eu#HSQr z(v&qF0~a2-#q137@$00eVk){F`-&y<Eq1c_#|t~Zj{N(T1b*oEr1q6$WidpFp7QAZ zb?qHAcXV*S>i^Qgve!twJG=D6S}gnpVtPnOC3KIFhS42D8csJ0X%u~vrD{rqY$A0F zX$o}-X&QY=NHgesA(hf+g;YtW327XiD5P4NC!_{C5~=H?#Z$V#LM;L<n`(u07?ld? zD4HguR+=cJ<7t$TPM~2zI*E#fbSmY9bUJM%Y$`U|BBZk^71Cl_FQg87MMxLY^Fq3q zo<>^A263Q_Z6Svk`2aiE(2oUlB}2Cg=&KA}FQ8QnT_vFG>`Yw(x{jd>1#|;L=LqOV zhCVHzTNzp)pxYTbN<fd4GCWJbs~M^m(31>J7tqrTO%%{`42=LPjQKpv1quZR8QKOb z@*KFr(Axr9$IxE{w4R|?1+;;o=LM7kEv26ecrzm(5>O#B_OXDrFzQwTz01(`0@}*Z zRRY?^P?vzRa3x(RpzK^k=Lo2np-%%9#w=mE@j?Of3v~FJQa0#thMNR(6hjpP8pqHy z0YwOyqwxZo!q9F4n#RyT0nK1&+iKRBl%cl;RLRg^1XRn=s{(3Z=y?IPh^7MnT)?v# z`H+APW9Y{~g+Y&ExlKZWm7%W-=y-;@1#|*K%LH^1L!AOTm7(_4<nXGb5ko;K6h0s_ zV8>!td6kzU)(2R8lwMwnR3BjTQMwA01xmeYwm`|Jo+{G&Un!;EV?Cp}Fnz!<A8W0b z60Q#z<)iHPQlj($Rv%@HmlCHB81JL3_EHk{0TX<bQZFS%A27*Bnd7Bs^#R2`%EXCY zl0hHf@R5dlDHePn<)di4lx%&#Vjm^dOBtpQDDzRe3zP+;^s40oC7<HyJEh7dnYF#$ zD|gvW)>ZZ6W!v|jtO^nFD^8J%t1l+~S&8S8A@AY5SiS-09Gv*msRmh}$Npg(IkP5} zcbz9KYj%n@lDA0@Q8n35Qu)ps_STUVe*9tS-sx}rEaC&|$nf=eZ`}LR`s055PyLAL z?GJhFGII0nX?;E{VNMf!>S|))R;{*h)quD)*jE7#z#%|0Auqf$df4~)qQdnxK5(dY zcBM&h@!pG)yVm_((+J^G*)|rRwc{AiF7CqD=XfU<u4FjSbhgs&Y=3C_np|HLHS@pN zAKXvi;Q|j9;kMIvEFASmF*^WHuB(__FMKiZ2zSzh>p@NA=-Wtr9oD$NZQO)4Sze70 zPJl0jQ|04xcJR@t^~C*dZ$3YU?0vU?aQDXrvFpjrcN4lS07dvhXH5)=+&CiqJ?ttK zTKXyypO6QxBaV$DB8Gv&#?b$9$@w?Pw;Of*yiFu@Q@kjR^xxD|ltOYh-NJpyxH(O9 zj?COV0l%MZeu|GtCCOXT`I{+Z%$94Q_22qEegn6C4c_mzNqewI92VB_+n@Fyk4JyK z^lX#x<$1z;7Vu4ZF9GkZOWx}fHyU2RD*~Div_jfs_Uz9_75d~Fa{RrN(hz|mj=8Hy zpcpc{>Pg(?S}>)TOYzHgV|+JIM4KH<^Y$<sWwK{(r{gmG1C}J4?q^6JUxwK}@IOJb zy*{>e?QH%W)vp$UD2uuwh>{v?YA@mFkku@1lKdVh-V5;YZn!XeFL|DT>~2|{js4*U zwUYn-J11>77LB)O_RjY=J$Iw8cNEdgDq`QBJ+S5w%<AyzVDB6EfPA`#5fI6BB<Iuj zfFExS$geb%Z^X5dzJ7>Y+upOE)63#<fc=@ju}}U7d+s69`~9B%GXBP%_&4_Of3W)> zBG12{!oQM0UVC57SAImke1BlqvIG{fc>GE?mjv$^!)GlZMLVY9=yPI+ntxbH9_&yd zBmF?lYn;UP!HtM}7Kz2t$+Zw)gvH>>BBts1AcTA~@yDbo^P%!xZoxAk<Ro~8P00F; zHrq{(&x#<wPAZDth%=|FK9q$%-1|?0_u1;sp$C>rt4Y!1UVQq2y{^dyKmWi7Xt-u- zmH*&YAO%mk{3E^ra{Z~N{Vn(sfMh?t#=j9aS@Ofv1K~!3riJnT0rxzfy$Lh+i2P68 zLCM9LAMibFWa+HwI`=f6OF8i&zF7Fc!rcXo*=gb40{k0r6VQON_Ai6-3!vaaEM$_u zb68vkPLuAyinvbru(K(WwqWn{*?em$Ikb~SDu38H!pG*4bVA&uDT3M`v&}0dmJeex z^Fd)otnVlh&ia3|r5Gyr@$~56p@)_b@?mPA_zx_KzNO@|5Bo)biijpc>TQ`tVrR5% zV<=u9hp_PEI>LXX=~0Eni-<S<Z3!$}aTQ<P*@Ngy%ZT-(ROMQf;VuznOJmJV1BWZO zLAXTL*}Kr(WoQ>+D=`Mv_!Kr$&>k)$?|(Ejeh({BTpf&FT}#Bc`u<P1!5^n2v2O1T zL7XWTQ5M#Z>y)L*<u1A6cGhD6i<XjcAFHGHeu{Pwf~tpmd8$%!lr4b@5sT$+`glfA zDx7}}Iz3!Q!gig=l|u4c*i{mNC-?;`8#A}K!!LBFJ6>wn1sk#8fgcCayHR&XF&*#{ z>A8DQKP6hk1iN@Q0Tv1Kcg<+v*e7g`XetB0JGY@iOv7Jd(J_AcOXU6CiIL54%k1j` zcTQY+^^z{I{U~~6J^68WoJdRBcFXy}JBf79wD9e?2D8sk5Ou;5VBg^dJw^V#$JFQG zqK>sN5hxpM8Jt%h-hd!b{dLC-yD^XMB9gs12@?=l@_v>%ZzsEWFDjrn5XyqQMpNk` z^8DUJe%5}ndT%s8YCqYzHwzyr{=PRYErNx+{A#Q;Za)TrSd;#d<4JrW!9J4sYlpY2 zfd2YAG41Qy*WeXHs4MrbhxT2_2G<YcDWJJiz?tc6b%d~b<|(9VpMvkUpM1V=5dRgv z0omWbbIk$|p1j)VzleE%Pf;V8vfm!Q5&<B5H0KNZ;w4W^Z<A~L6Q)_%+F>7}w0}@- z$)|&0bu5A(?|M|f;A}pXvpijhX4}7?W<gOp3xvmCkM}^X5MSxspkw}r?s1Z$1F1<d zf2-Y;75Z^*82-%XcIITUqYE2M&T+E!KrhiPa{548!c$k_r*fze3&J0bv1h<SJD<ls zhNd}JNysPi#0fB8tf$#XDstLjFg@r9XZ!f5*-55;qTu`2k=H&M(9_3j1Fzz0;|hq8 z{(zm#r44X)p+_C$#wV%#h#yGjgK=Xff#?1e_7`Wv*5eP!dY!0v;x2o0=u3-Gqm~P1 zm1hENpEo{#ceeC$-gQ0lgf3?f02pMm$4S{xfBc>}4*nQC0<PERW^c~hJN47CqL}8n z?dP~EU<i%ly(WrXZcE77LwzGoY{2O7A@(K|`o1%NeJ<&II6<<2RZM1o`VcdhC=Tc1 zG_dq=Jofr`4=0N5lj_4-(G>F6;a;L_5`QFm;D6p^f^2^ij;000tikp-;e+_7kZUx; ziwLj6x?(cpNOsx_HgxWL`+ZixJKlaHe-!M3z5SlJPA(i7$RE8%_@g8ES1ys!NBi+P z*T{=U^`-@zJRaYN!b9_!sSA-oM^DHE5qm4ns31UP>{C05@Wl%4`Wy>p3ooYRucI&Y ziu;WPOIW;Z%Pt&&Yc{c8W(WAZ`Vp=P>~G1)vB7>D&s+9B^?M&tX&bI#L4V?JM*;l+ z*?@6?>41fRRe&9UQ-C_a9YDxEj!OY(0mA?j0CvEOfHi=vfS`vQ_W(heE5MrpZ2<9o zjtd9$2514J?{lTxR3HvO72qSl3BV74JAfEm4Eq7pfYE>=fE}<9umVs8*bX=d_zLg~ z;0}QMi{m7KNI)_`3CIRm0Z#*p0m}dz{=#kf0FZNl8-O-|1fxy{7y#LTaeyMgbAU2H z6<{Zz8t@IE84!orvVYZ><v;&P2U@rf2P1+u2GO=d7Vi3NO9x9?|I-%kCouf;U;AGL z^C-v#MBM^h+k5D^u5)SG4;D@gS{hPjlIOp-a6w2{BE_+elOpZP=vORUH%3R=ozam- zA+7$-!bKx3MjC^ZLmG>u>N2h)NG(WljN&+?J&|ZHS-3=`=Pz2gBq)Td{{}HY5RM>S zgmfd)7m+SU%I@!rkz$2#llH!S_Op<^8-K_Ur9Sc8Q%}#Hk?43naoW5YPdR2J7SDTr z+Kl=06P@#)d2Uu>pVZm&`*0+wZVcH~7eSWR{knJMwKP7y_!d6VL{sDPvhxhjP2HB& zq#a$R>GfUn?mN9ZkuQH79AwpCCYcy@$?Bv?BCU_&9Wi7?eS+VU@fMO^AMbZ1)te^u z^`?FLdDA!3g_K{EL4tmY=~UO>!nO1#{eDXD`%LLA*`g<Qlz8+QgMqclHyg>0pAteh zn=Kq%7Mmlt+iW2p{}dfMJ?k;8ILkr?)<^T<*{p%U!#Ni2s~m5ePmCl_-bnE4Gsc_! zU>xg_k9kT+j?Q3(p~GicSj)n{h*@arr)YlaY=PsXgK_X=v5-{1EsVEno6y+6Q3owT zo%W%f_6@N;-}R8UoH6BoDzw{o&tVI<{STp<@Uw?m@Oh5A3%}6*?;L2Oh^(Q<|KGiZ z_5W*cK{o&AJX!wBgU&gkJjXozH+=7hziI;Z{GJxjCuJEz`Ty@gemXW~??-=p;V;|p zqlH@sSPQ5Eyb4$eSPobWZ~$flrUJ$TMv+fj`;^83ivolL!T@4G+jZO|0L_35<QtII z0WJbg1F8Z00NVju0UH5p0c8LO0DslMf_K8d*~m`^OahDti~?i>41kR5mTp`e5DB35 z8ae`8B+vXcaHI{`RDcy=07wBTfOUW<fCSK1hf_1)3g9GQ7hpSJBj8oQbbz(aLjHAU zGSNOL?veNt%Ku~G(td^i!Z&>NU9z~8pDmE!aZ>is62i)pe}?bEzp@~Z2a&M0s(|sh z*k$kC);1-tlr4$?X%}fXX-{cyX}Z)RwMwT+H%m`QFG<^^on#5JJF;=|XXPKr56e%> z@5mp?LloJHLd6Wl5`|l_NpVbZNztqbRfa3o%8^Q&vRt`VxmkHkS*yIJY*7BLe4y;C z3Q;Ah2B;LOmsM}7&Zrb>lX{A}Tzy^LtnOZ_$=2j*#%rF@lxyD7e51Li>7*U0U7|gs z{ZTtF^R>*qnKhXf-5p)4PNWy>`|01+|66}Z|GEAPeVzUneUm=W(ACh#AUBLM6d0x$ zo;NrRD-53*&KPbP?is?2J&c2ljmBF>k?AFq%e2n4#dOll9RGGqdX97hrdhgM`njw? zm8v$X_iOs1+k=@}okcfRH(BS@eXZ+ma2s}^!CJ#bL%pHdaL3?pl%Uxn<22(O<1wSu zq&68$gG|FrqfJkkCYh$0?54S<g{D%|%cg&s-av;tO}kB>n2wviFkLeJWctN)&-BnF zG6$OD%_-(|v#iuS()^@(mU+I}Wq!rH-n`Ac$9&mL&G*b0q7(q5>mrSi>ZQ*~OQb8L zJEY%BsdStCJNZ3%gkquMEyYpAX~iQ&h*GN@uFO|XR?bnDDwiuSDt}R)Qk_>_Q^l&2 z)dSTk^*r?|^=|b^^=);C=4p*Vo2{LseL-9LmG)1qC{v=##H>f?Ch6wumg-*C?a=Mk zeWAOqYtY@)arzYfbp1j775xo;w4tXV1>)8i42CSjSVI}ay~=RO@Pi@T7;Q{6rW!Mh zGNakJ(fGdcW24421X6k0WP>zTncg&Qgf#Y;4hzz_Y^pOgn0_mTK-x^5&E3t>=3eF; z$fC?#Z9ZjgfdtyjoCU`+NT8dvAI$O#=}qZ<=|GuARwi2^+aTK{dtdgk?11cutVVW5 zc0qPUc1`vl+3&IkGLOt(9wZNuhs&eoJ>@BKg?zYtjJ!ZTNj_cvjC`(qfxJZis(iKl z4SDHCdA0nM{JLDE5G%qIWr}jeD#hFQLzB-HUnqW3sFXV8MCDXiwOhGbsZ|+OV^nig zOJT`>sU+$I^$c~1`epTI^$zte^)dAwwMQMU$$@x3($r}Jv|Y6Y+R55k+BR)wrZux# zcR|-hAEi&$OZA!h!TJ~U%k;07>bL1X)}Mea#~a2OW<Zp840_`Z;~AsLG{Q95^t|a6 z(;A3wHw@?((=F4Vrbi|}b2oFQ`Dxh9%jR#Hoxq7_BT1IprN^X8A(KAxEcrOOTfSO( zNcpKMOw&ViS?keu(Ix8+>Kb*=8B&Y`j5mz8jW$!M(=@_7-h9{0O~r8ujmV|brLRis zWy9sq%boIf<qh&CIj0z^7^_&USgY8hxUA@;j8YC!KBIhIxl4HztNgyQuWGF7CG|V% z_todrv6^MtiJ5aU>oTKtX}V{13v^|=)w&Jn_^vKlKSlqHeyM&37O%hIN5g*%SSTY+ z<FQcQGhH`PQ(v>vY%-656<{RAI0Yk#k)_KNvP_vtmMz-_ZGQv(4#DhZ$}8lr%Qwq+ z$oI&P$$ypKl0T5gD0(RdDrPF4SG=Uyhm~;#gAG;2DwWD7ltobD8s!_xZOXY=1Cun* zYgTDCYD)KNK8L1KwGVW0`X}^@^&9l>>5szF0}Oi%8OGJdw~Y~|fhH~1^&wNU$=}=s zrW$3AGbfsdo1ZsVn%A1wnKzg>nzx#_n|GRbnID+BGOvT-r2bN|R3e>+g}YAru5_z( zyL6}YOPFw9nN+5fX=MhPr4+LoCL1Mt9rnCYwpF%W79~%RpTvCb$Xn%NMX(}4(OZ$B zP%8|IA&OCoe8m)n9rIhLSf;4N%D<s#Q<T9RD^<Hy4^%`wO><4dY1eC)8(ueTHykpY z6I@oLG0mtmj)L1NHZC)+HNIy&X#B#+^j(UNT{scOb^>l?lx(`}71?IxdR0Ho9L;*o zA<f&FoejZou@Q!7L%bo$kZO3>u+#9l;b%in<51%d#!Rf@Hd8ZG*G8`qM!-?k$qvX@ zC_Yhq3AfZ=`GvAgnWEaP>ZI<g9;}|OUZMV49i=&;`BF1i`(vs04c#*RoBCt=S`!-C z<u$@C(jl@6*?!q~vT8*-yq7|)QR~%aR3ECIsD4ges@|&p5z8$|(@#^P*`_(B`9Tw? zRcg)JIog-B%e5!9m$lt8HJRC&voqhxd@r*h^NQ}8?pIw`y-Yt#KUXg@q!}g|W*Y_= zOO-~8akz1k@h#)OjR%Y;jB`vUP2Zcu=Kkh~Y*y9Y|2dIJ8Z6D0PQtKq;cCvy(&ZE6 z@5!H6tWa%L-BbC&#p*O!nqut&biY))2Hm6XdE6zDOodc!ve~j?*<v`Zi*Qp7x@N3~ zHl4p-^2TcgqNJz6Rga4#5{WqshIAS`05UB&q#_YZ)1<?sHtBNdN|?}TX#-RxmZiaj xtg=b6g|fA>b+UtKwiULc#lWV^%jC?h)nnl%DlD)Xo8ptW38IET(sC^2{{bnq7zqFX diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/w64.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/w64.exe index c41bd0a011fd760ce20ba795d9e535e0d2c39876..46139dbf9400b7bc0b64e6756ce17b4eb5fd7436 100644 GIT binary patch delta 26888 zcmeIbd016d`!~MNhQoX~pd4i8FewTsGAIZdDCkB(aW+R3BsEhWJecKh&;urJPhv~c z%FJxCwA8f3p+rT&8HW<JlCohFYLuopp7(R_y`krMzVGj!-}PSC`_FrNmY;jw>t6G8 zuY0Y{Q@X;Tbd^J8szzGVH^baXSi5$9=h`;snDlk+I`~&NTWi0NaaHYaGFH{T4Sd`E zi`q57we!2wo|5@1RQZD{9*}Xw%={UYCa3;#GdZqcxdT_y<;lc0UJDn>b#UcfIPMZy zpDFCJ-po(a-Og&qvmu(qm<2%^E|o$mHQf8Tz)}r&2{GD#EgG&s!*OYMI4*$w_<OXr zh2!e@H6eZ-x|^miFhQ>7BL4A!yTn}hAott7K|+X2f@@&C_%0d`HktQxmy<>EoErkd z*>VKLvd{4EvfbbE-85@ronEsK?C8XCf~fs5Si=eNH9|#Mc&rnPwGYls3_*ZZckeOB z*$hdIB-H@H4|5<`UR@F`lwUIq7OaLR5D>&uHiH|<RA|dp0auy%%5{!w4$HP`Z?kvo zV_vfl(->H|eN=AAa5?pEa_X}oq@;c!`@d56ZA(3kWGb|~Q~@RR13###k3j03{ho*S z^wHBo#l&zYp~4jIF;=kLG>Y0sJ=t}8ACI@aI4)bPL+MR{qV{4>=BVwdabn3@qj|?8 zDvqH#9obuRk}~6e&aRlAB;}#y(#6x|)d*V)5&hc3(t@Wpn;=5Qni0;&UyxdZsqi^T z(%1ln$j)y`w|@N?V&%4Uz3@0QzFNF2orhSq9Bom!=4*TOO1gMCOFWYy9+v`LeK`)a zAl6I2vLCfRW`l^d@@-jBPXxP1O|PK;4xr2J#Zyvk2PGF|fnI-9j<U4;lgqPeE*av{ zEb+1+o|Sg^ko;Lnno5q^pf2A&1QWFq8}#cAOKYE?M%J80LA;n=g*M_O^Xn_+{3&A8 zQxrjZNzZX|sm06|x%TnZc9}PYs8&?%mmsBT+2V0zih4*X$W+N`ostt}(;eBQ>-T1I zg(=!M!7kC}h`%%!Qfk)qa&uikMJ}Par03ZGNV2I@Tap9*qwrTA+iXLv6QgW7Dc=|S z>Gx{W5L}p^Y-e)GiMkHvyuX_5a>N^Q7v*70XIm$Rr)7yXS>l9P=IPieTp2H;$IxJj zjYoU_X&u{Ls8|%9C%uM=DOh9Hr?VlBA-&74aa?(|zN`#QbO;^DH7~}%{_p|Gj0&G2 zShK^&NjFe(K_ssTuh<~nWA8ae_K1+<miWb=$i}$ILmx~)JZUg$MYHL6vsO+~jwBt_ zv4@U5`CqQG7^iOTJrPxGfRLNbpuNVXJ4NJ<5j0%+36oR&1)E{zRgSBo(g>EC$fGp& zJ*e<LqXp~s@Y4I}+NC#3ovBdTE`I7HFE5g3{e$=m@h5DC(yQ!}Qy4$wDtq7*;1JP@ za;;+i&OzpVR}jr=@B>Gzlcqeh+2SuyEsHX&{bxu6hH>1!dp4V`HY2=f1`1#^w8#-g zV_?-uYiXWfdAKh}4n^dBe?!=2IHd^Hk<eU4s8tbq@&H12WuXrt)KjpIt|zI%iqsuN zssU11nBE0*Kh?W7b2|sL8Q2xp<eX~$zJa=_B-3Vyr<^PW;jySiPEq<k&Yt5k#OnA9 z85P;#kpliP?xTg&W;l;H*^ohj=OdSFu{QpMG`rPi6EmnMM}?2WYF42gKTreVjS!}e z%Ci|BsN^`yNz`l}`MOZQ(}2je87#!G8Wt%9Z=&?FiQf@?=J|$q;22t<1o3wW&4G|0 zKCl_A=uj*`>0h%tmq7Ek(z_ky`uIX7j%2JuoNR_%2+@*>ik_yG*fq=U2SL2wbdCms z)!+-k^eS5S1@WA;T3N=x4*|ct8r7qfEEeSvtS|cL_qrEiG<l|r-%1I{Y#+5M61Kw> zHX~9ZA|Z;TA$iMjUX^zvd96}7Nf3W1%t-ER^40HkCH^feQ%J%^upH$P5LWV=8gNMt zFgX=wRD@9=tx?8DZ3`1m6_g8zWsMdMvlSz_(DF=ZN!LP4N~p4^=pcm(P?_dXa9dKA zTAs>>BF94_y$WlVT>mw$R2Y-7&|(e>HTnLNPy!{iPEIHS389G85D`>?7^R$MUrN1M z50-v!o#dl%-KgaHy>^AD;x9tQB;<lcMG$9)3xarD5KZB^n2pBt$>|fP92G=ttK+aC z6=7m7@EM&xDt&bNnDnuNb!!q0<)t?_J5o8d*9~pcv(kYE<+*P#N)EaOc6k%yHvV3Q zwa|(B+d11B({e}I_-63U9n-~2RJ7@yOrz^K=LTv&E?I-x(;h?9dq0vXOzt}XOtIdC z*ojFXn_WTl1R+>{M~hjD>}c7nNEmZmTOX<6a-}dd2$p}Vq0c?JPt?t#0h3s;S}1Wr ze9B67!DdKd8?qYo5TrhP3nFPNdJ9<Sa-$w)i$_tyeN9hj?w!M05h=&5$Ot#u3|=Uk zcvjwDHIz+$D<v=<2^3bC7Uc@oMd7)H$!Ja!MkzYsk~9YM6@BR}ptB2;dzVOsX3H~S zazR`ER(9wBM5i8;o282)j~=T<k2%UchVc|4$GEM=_#KN*dyKy^Y>>l$#Gv_%{yj%p zMVfRS<5iCTE8+`g8n2C1?)Q;Qwzw!fO+1?;x5#};9|0l)4#=a?s+(|^8Uow%CFvkm zVRYys>>}3Qq<L8L(5(BZeWY+7B&{%Ph92K?oONVJY-^*!gG#z(VGTA~$KRGF@Myk# z>ZGqRATT&`FxEu54SkViVMT8t|EOSHN68|wQjDV<t9^#e@Y^@E-hDuf`06an30s!< zBuhMD&E*UK+L0yNg!|{St?A&Nko}KYlY7kQU$vBqA%!YU%0p|Ke%*0Q-{p5u?TXpq zN!e(f*leqAI3&>R&?nGj+1QLCvnvY1jnZ(;Lt2lru#lX_$`>gue9Y$R!~Ldq&~PQ5 z!r{Xnh4&BJY$g1iqr%}69!*z#*{AwM-J20u<vDN6W~oaA`$Hd*w=;xNY7j7ra%oQJ zq%pD#8jWi{gaxO2NvJfSJsL`_tc7Sd=``4tmGRZh!B7S>sljALX|cR<w_=@_9D<n1 zqo@1jaHw6BYlV-LAI^4rg!3PSvnw9G0&_0W{M>O~ZU_^t%u(QAtFjqJoM+LV5qWRb zLlr%JdLMe2YwF&GJPIA!ndr2UwD3zg-Q-xPiR2)Yr=-{9nw^qvccq|H(hn*YBMtaz zS((t3*@f-$EH%gFqF==;CLe5vT|i5-ddNf9iPUj#Vyx?qSXRSp7YKCu0BH&KSG3r8 z_@6<j^m`121@@Lj`X&LI-38GOt!m<J^;qbfgp}*Ww**VyQ^Sy*DHsutfRdhgAv&b= z(AeTnq+{&Z44({RH62n$QVU!9-V4=m8hx1tv1+wHkq3>Du_EF!$ntCYax@vLq>Dsi zHp83e$kReS8jHO3W$!7Wyd1jd92?QGTUsP!k{wI<=3j-Br^QnQQExM($`Ko+XbP-w zv|-HYb`GT^LzNKtH^}}D=h(rHJtqWHw^?-@K-w#-zJ>sWr6bn+U{z+8|7_CB9VQip zVLLAWxukQg_I!v^2PI%E0@5*>k3s_l+6*6_VS?9WC-edOttf_l=G8f@sE39lEv@y8 zJk>i%>y1>;2Ag3yIM8jLF|zw!!9lSymxi)`az=QX$smm)K{T(cG}y=z43U~HY?2|U z-{C+FXYv+H>(D&qT8l3xj+0cB0@(^X>><TISVxivIGOwfae#-oLKf3aBr#{{r8u_F zFiS^=1a&(78pL9~i#_{bncAzTMb4<-+aT5UVY|Jj1dWeDtQT`ohL?r%6D5AqvzQ`n z5d&FlCuUw6jqDI+ly*czaX>gHWnv`nvk^6cwl`5X5kjO<p&6P^QaxL!p3%wLxd<>} zu$_|1(U!<Z*St+C45UnGx=It;N$jf49i%Cck_NUj<EvSJpWMW0&L~6tF`*(8YSX<O z5xU4<f_R0B<t?4Tp{#8VKMT5PE(W1=D3#Uv^ziGRighZ#5ruh1dYJ@Ka+>UO0+@$y zU%p2G8|ItIN2apZeUtf5{n>fnsK7`xHA>n66%xd6sqFJnR<WVkO`7h{eEoK59N8(q zEZq#h_O|-lmxcHb;DuCH=)X)en>G0tcRZhoQ6(PFhN>_lOZ*lC;XO7dAc%KMWv>Pd za=eTlk+%4-?*d8!CJ@~jE1TN%yE|cu_Mn_gJlG3?M!wmDtqy#FALq@U1_txBH4MW^ z7Xv9Z!hUZyG3dDOcr0Zq=Qjh#S?X*}<Eb(JVkyCXnmcSva2U^G*}~u)evg5j3?8nD zVFAW?%>$NejN;eyV2h1ol0WupPox(}B+~3BwW0+osX&g<$wheyU93Z+poJ!eC()8M z<tTF-ljJ+27maCKiI3F%pL7mDg=CY)%nwOUK!U9{Q-~mD*4uI#7TB-wY4)H)0F?0n z1hTthUf2EAfwr!J8cg8>nQZ;ou3-nku_T4+%U98$`3$}apQcixw}6J++)Q?3Y;xZ` zMXn@48Ure$!r0ucy!xfCQ1E(KTA!8#O3#2@3mXpF*Hwy>J+%Bwhdhz@v*V0`mYYce zP6T<hN=4Gi!pKSM>2PAHNjl0l&Gu%-5D!f?>lqU09z;sQ6FZ!iPf5pzv56t^?w<W< z>#vo&JET8*J0v^85lO3^vKE>OwPy^}c^AtgjY(^H#Fu<83C_$t)Z4iZXE@qZC$e23 z9a&na&OECxx=#=T?|5JWh=1zKEGR4v110f7a^P7oO(oKxQD9YQojkE%+l{25`;xO1 zH-d<gv532u;v&N#Lis(@KvKpZ45viViLRJIrg*7|{3*5beZeWTq`uU*M6Zl}J1J#2 zI~O`!)5t=?I=Ua!|1;ABJ?kIV!yJXE+0beBLrsBw<FxeGZM0Q|15JM~X(}S96MYOw z(WlujM?40-h>m>Ohyf{Yj24q!c14$cP?0&QA6iR1+H9axt}!$I1noRH!b&dg@&J@| za1-g^P#d#F))MB%@+yoTmk`{x2;_lg<3>d5HxN_q>m$$wYT2W(o^dOXE&5Arm4c|< z&~x`N`B0o8O4DgA=WNX#1#w6uwT+83`xcuV{(8960qg_^a>gTyweOu1bRtO(YzCU_ z<F7UM6!NuxCz($~f_V`Z8PqD&()XASTS<vl>I}|4I8Hj~+qP_8kZd+75F<|!eNz>E z<PQOi5l&juF8UFP{(z}oDW#&M=v|k65W}wCOh@gc{U}m}b}7_+so+^|AbI*CIVo?P zD(pnUUqe{R8LhG~&NyisSo>(dmbx~PzF=}hE4)r}ACF?5orBG(ztU7EEgl1o0=rOP zAN<qoC$?G+hVMjV>`e&q1@pAj1OHailCUwn3L(PMn$3VIN1-Ljs^B}%f(a8w(+Er` z@NhbJ!r5pA$y#+0D7DS7dLKNZ*27)njjCjXkHY|<T`Nzqo9I4f$2#{g*Sld%Q^~UF zw4IF!JC2U<c^D`MwqkCF3SwEfoNk47BhnSDs@GDDQ<}Cbu|(}Mf(q>&SMrsGS5i4D zw0Dtqt@b3C&h}h&t+oafH4`GN!C{iJoiqa;tLiC&W`pWhXcG`oY0&<tD4ok*1|K!M zhCZ?xro&_D>kc8V#6^+wgPhG!uoou+%1hQRdsVUeF8g|Y-V1W3AjJL{loTM36_dMc z4iUsNHp6RRi$m<wWy4UG_)V7GIq7wFD6*@=D5NK?XSXB&)F(nsKdNY=qE<*`x4VX< zMI<6UVXR!&2jQqX#$dQL*9mi>BG8Efqm;n!)Ii57KUBYYe*6jOnYDM>ux{P~I}q0V ztK2t+LwB*!Sp~Z+u`b(cSQpBcb`$!(Gl=6#`$juaEwqyFIBGv#FG`UHz>gSOs)kJa zxK$T`G=!8Z#r*`UcJqDa5Y<gnz<Ngc4Sui(%Cu4gLA+{t;I+7;<*<ho=K_6Z|MNb2 z+IlA705mvOrUY9=Eycj-vKp*(VoEt#oQOm{@3VDL(cLa$HVBr(v5+f;9Hpnz?=SYz zBhjWWDUedKMda^c%~4_eGVk3U(Qj!yeuQj#m7P2aE=p`NI}p=z%vPDHm6@j`cT@>2 zN#+n2{r*2XG!I1y*2yAB7PUVmHUH8s;sFuQ<|v4)ltpOmBaymaSpV+Nc&wj9GgV)H z1)8c&t94;(x{nIp0h2*xybXsfV@lutu^LWqy-CsX?_)G}Ff#idkzu7M4yo2OH3iXd zg>3$!*I&?K9iYQzi-EPlY*>$O)AQvmpbiFT#C?^dD%@IW#T!knvwt&IYn$Oqq=G61 zq352L6LNbXdH*CQGXefAOZX$MIUEV(D50$s*JhZDXvirknElWrPE*D_WBvGlIkA}7 zu;JTpqgZqYQQnAiX}Z`d{v}?qJg{Mi%*;i16oeO{2eQLW@fXso)}jtl98PFt&xPGC z6N*BPM>aMFfhHl0Z0mNgcVi8_-#e@-Hi^HwiTxFu*yr!hX!41zHp2~2l$u~O)PqD- zJfNAlSsvN2)}4~dhEW??sU$-DX({m<o7yw6)2p9iHg-~GBaKxIh6#uaTV6+2-P6VM zf&(ls;*~7gKmW2B9(~HL^_;=4X<?ahf;kHPNb|`VBksLFF=^}tOSK0jb5vddUH>io zm9}s%Ib5BkSES$2&24Gqx643hJ&OQLup!_f++?3^&GkSvQZPi)D_|C@F*3J!7v>({ zjemV3>k~hX-`v7B#T)fM*rR`~ZgoNh4pU3FvorByhxoK$*TcYx63TCx{4EQeG$w<* zt^{vK@-Xl({GODaFwB;t9)j&`$t9d1oByD&A@V?RwWMfD9BhU^wz0VhT|7RdAiEQS zh-_>To42tK6GFQ#loz{B(snl(H--jE8JM-oxncYjIyXEL>})V~<@~|^Oz?6)H-u7` z4W4>w7VDVUt%H7>+-;p`YP|RzcAxHSSfaNc6qHe25%RPdE_}jFiP5<mZ&LxT36|9B zFf>SiV~asYI9UZx>HBMxncH>JRpn)33W_-U6UdW+VgmKQxT2nB3?_iR?R+yQEgQpH z5)ID`BcTdyH8mi0BCRVGx_#uW(2*4fa6o~B06ARLeM}r%#LACZMz4`+zd%b=^}k|} z&1u=zh@K=N>be1%Rv}}sx+|<MfM%GBBD@v9-kx3V)tNtal-YXq?X|&9j<*JIi1sA9 z!oW7Ol_s2&WqG10ag48SdW?{q@>?Y?>DC-O>G2g-+<TJytt060^JEYgwZ9x;-}esV z4<2EB(&Da*d6XO#`44Pca<8D6vmszJbpD8KO^PtS@{zK<qUl84mRq!j+Dtl|VLkX3 zUF9uyD=Cz}Rn0t;d+-}?vHr>Z`Ko(td2%1~o)1t0-IrLiXmW1)keXfYZNVyN_SZp! zl>RlSjcJM(bs|J@tAz^6vZpZUy>T7ag08h0mJrVZLwSW=v!nF>W!zj`qa*rcS(dbD zwXfCvPh!vX@zh*q!}~;=A&g-xP<LykUW#v=;)_&#GZden;ww^oZi;V#;(Pinr6QK1 zu@%NGg|V3!R;~XbKuR4l_y+t#I&iTHufrutC41b**R!@6;m}@UNytIk_$}+467Bl? zUF?^Bh`#g=o0Jl6mXmBhVYL~RzPW&@@bi6HYs<s#xl3-5-`*!J<izjj2OO~kxkVK{ z!<tEXQCpjOin@;=Avf#mhyYd)u6!`R+-R>cya}G#tk5i8OAQO?DMzl8EgBINTA>XJ zqmi%Ghp}f;dk3FaW+T~rY=*4&Xe>5j)Etui$?vgssh#=Ek?gC~Q2s#%`!Ur|_&x*4 zAm>!!u+2`e24E?LU2OoOBGpvH%M#DxZb$YboNhxpD}kNlz#p3V_-j(|c{ZT$KxeTA z(X4&TYS_uXJ)H9u-$dEx!6e+{xlo6Qt~Nu?7UtY<7azBi9qgCFuiMTZ^vmSGyw1{| z>1=*Fikc0!tP;#C>rgxC78qDSbe6ueu5)DMnuvg<Hkw=dGE%y5Yq<0gHnD?Vb~J&t zm#}eQ;Uo+8`5PFGCujrrf*@A7l_V&e>kBIq%b(Cu=^LA&_#JRz|NgDKx?@Q<Bry$1 zDA%i%NP*?1d4(1H2~CMETstNpd0Vw$X-%C!h!{K=h|}p))*~&D7e=z7Y2Icx5(oG6 zcFF_itTWO&<tUgAbFIp;C>#tk!#K#LS9us|Svez3!yep(Sqdvl+rf*>;YR6;79{)M z%+3GKROH;P#8WT$7H?vIrlrM14Wn|>Kvym7*<!UUjVmrRfFQ04Ps7}&EjacRo17l) ze--6JUzj@WL(6i~DW$l9mcGLG?1S{)aSfXwUZI_817H+&wHdw!C)Ht|ABNo!aOnaz zHhrHxVv|W~!zSk5KTea%()&mHU3-fD&2_>7*JgO`Z8?RK7)#$jo^s66e{$F<PpbVj z!MfNZ-8#epx7kNCDkiyQS4^@myt57C?CK3>KVWL$>T6VF(qxK|4bJ9bmOWJvT1o2L zY}tTZ{;CJNG$4h4b~N+Mh~eW#0y?sh8Akre2v(Buo~D-hW+w7q|H^VRdnSv&VBYtq zW4eEf%@v1Yt5*1t(p47-trU2gmK*If2nrRi;4Fba?WkYa{>(7VDfUBV7ztP+GBMIP z%cG|y5w+T0pfUb1D|f!hf(8Z!yR|cxyb0yxlI79k5-&1$7;CjH4>>krU=lyIfvp?Z zji2;0J3KI>$GJCX7U;`<rToZvNK5P%m3d5N4h<k>jZDDOODti~K7Psr_Q#+=_swq9 z5u;wfKY@>bVtzu6<|w-^Oy?_mvw>M5{PY2AW>z>q?0dF4%P^)6wX4`f)oQWX-CJG< zg4dTn1l-dO`H3cAIrbXvYk3_#EA5kyngi@Bw3hprGXtC|v`b{dtwQ?}5tlIu?!M0+ zW)%i+Z=^(>aQh(YHiOPeIgM3$a4qE+xVn*<2YYuM^d0fPGtntX3PO<d4cj`nmtiqY z;1aKWv^Gl4SY~Ur&v~&sgP-MV8rjex8~M(S?B<Z^{K8+@$n2;=$K)*mmusAKU^w-V z2xG0hJ$PG^Ni7NaH;EWU0_GV`a{f2|lNxp`JIq`orzIftP$_CIM4gk4DZE@t6W%Ws zFHQoS^oc@`qadrc^#=g<6+g?qJa8!IUD;rXA_+)~N!Yq@+}942uvPmNi6;+gF?E2w zoi;NUOETU)X{FIM?1da(O$~bmDt-<7C?}o2+sJ;;iRJ(Lo&^l;$^Y>q8#Xiu9Xn@e z0{(q(sDQzEb7(Se<5`zs)4h#aqzt9oApMGigQ^D2V_Syh!-(pT`?{|2N1P`VTi>n` z?Bm?7#!tYJZGEqxaMDKjDtwxkNDKtFx)~$bU%5%iBNU;Mcu4?7hQ6+_j6@!H#KJ*# zzBq8(?R*ikN4E2Q&hm%9tJ%-ABf{9)5n256ud?eS2C&W}7j$^)ieWRvN62rvcX(Mz zC#m0cwsWK(A9|gg9+@5;Pdl-t8oPCa$z6(2e0lJdxWb~gxIo@4<k1sYz>-Jx<X!UF z>`~z{H(N0se+(85w`zswPmB}n+>SImq8{PjV`Qg08BLwyFNkOH+Jcu##<Q<RMe?hs z?fz|44~@V4vI)-x&>v{NI_SJ=^Ns$Fy*;ise{(APc3c7f**%u}Y$$*AD$9FzQ}WAD zb#X*Yl7@q*<;c*}bPT;5Z!15cU-3Su6zvfh1@x8;pkb%a7F%(h$JVpn<GVR`LL|g( zxyojZpXK*mBqp$8$@iCDyh`a|O5^3oRd#!PCubY1Nr-yzb>=?d27m4qc4tC3ziS=y zop>;ydAZykE;hrz%4MoBdGU9qshE>j*0Mh*X8D|iwXJmVI2Uj%YhgvxH#>q{u$GOO zRLRf3%zmEqBLDFSHfeI_ZeLi)q}qtR*kChM!vlMh2AY`Vl&$F`MsvzhaZ)1&aUf%# zPCn)~v`o&>-DbG7j4hes%_qFZUY}AF`V@QHeo_UH&zEapz)K$8VDfHuYvXNM!y@yB zMt0tVlpj4!*JpeOGvu>2!-=J|8EH%WrP@nvO)7nNiEYaB4gC?Erf94UFfaN^yWvCY zt@=mI_Y%93m)?0YIPuqTf=g@<YWf-_iN8jjf<1^!>HFbrmNwPEv)gR!)NY;bU!+kl z)dQ4HZypQf@f)^bYG}eHnJJs_cFSUD2m}tuL(3dRF(@2yU;IbR{vx|QHB#4K=E&3F z+yxdkEvf$#=#gz!(+bdtpGucO9fXvN#X9+Z_gg7423y6cZ1D(G@5gsB3t&b*CZs%? z=O%r1fxSJgzvrK3G%YMVoOM<mw21y8tYw;yN8?QlFfB*3>P&_1>`msA-^2f1Vp@*i z^~~V=N+6c0;$xz&>?WI*pAtP1;c`->jx0CQ;!$myAsTv6JnITd4L5*x-&7a^Z?Y5l z867S+Q-ZZlDHj*5Xm)^oqnQO4M0qSnkq|Re$o~VAt+JVoFX+y1Ucy!ugz+iOY)3(o z@6^SJiK`@1mUAc@LDoVqEO~8u5B|Yo)^mEiQz=ws94VHt+0*-t8ajcpYPFSkNbj9% z8wzC3AAwg7Fon@w5!6`6C8%*L$w6%`UWCTu<ExRpGsf*U#A`l{Jw||?-)gwo#GXzc z8W#<&#mGta?Ws1cMr)RzSTHZ8@4_aIoA<mRUe6Z)$U6K)i#Hv!o7j><Kg}ifMqx;= zO=sJRBJBo=bd>SXAEiZq`b%#sT&d(ALH;Z=eir0+ufI=Pme@e=AYS&sa-VMFn!Tl5 zD89QBc6Zb8OZKt6J%hr^w`1?Cf_9OKgF;1S72Q8XOCN!uE=UQd*~vX7e*QX^_C**! zeBJKpUu0|CcAup7Jtc`)M%Y(h_UEUcWSYG}_75=iS=ioL_O8(D*qXgHe5Vs^Y}E?; zcQHeE->53p*gtDP$aDKc1N;wQTeci`qGt!W>H4pq2>JI=gle0?)5Jd5pB(dpY+9|K zhpeol>4M5uzwbG;U13H_Q;8oo9&c>=yJVql^O);_5PP3+q#b+U2m6&dAUYh}!ry&w z_m>CX;Q5G+Z1|xd-QS1XE4J(~TXZOl>7VPW&%TY3FOKw}%bhyOn+<xd$ob4IOqn`~ z&t=umB@AdeigQr>wRBt{(A`B!pcx8_-dc*%War<2vNjA${$NXLBWId1SoCT{>sw5d z(Li(e$p<KHkMCLU8U2Hrp^6~<CxmQ3i0x=o5&l&Ua>I_W^)sgFt{rMG;*3MgVP+UV z?;RF3^LbYux;7TdE>hwl_U_DyzO`qt_tSY5SG%XaZjUtXYvw&`3t#Xx+dnIu|8^E@ zoR!$&omujf=|dK?G|YAXp{&dEz4!wg*_7ua_{Oi;^5+Hj$zP$RF_;hHVrk`9?7Qbv zdL8>zwzLeVSt#Tm$8m0e{2Xi#M0%1qD)RkX^nN5F6kgo))M|5@SkCPDK;ZyR3+SNt z4`N#p6`2j7o9;o=k}YmKXFKqp&0<xvGu^vj<-!{@ye_dBM$TfcFU06m2Z2TN_gOS= z@V9Kx3&xo`@Z`qAfmPy2Amykj2n*!hffOGT^!XWbBVlt&28DZF93~MSiSTUkI?@*o zKhUOIZr~Y&VaGQV$?as$^Vi1pKc3<1gl7#VH`Hk00apJ)_qcoem8p9csBX8|FKFsc zPYKLK<av|G%q89t4nM`zjp+3ai=E@gZyLz5=lFG>(;f?h28sHixMD5lY>Ap|PP>3~ zbst+hr#nC94OTN}c$bb}q2+Kh(q@!`H3sY4E({c$o1eqhq6D$cv%OgETnGO1>fPh! z9?^7q^JfeQt>juyqd9OkeAqd?USvK0)s+v*W)uI_-_rml$t=fe1eqOs*^YntYkp^^ z{uS;QvlkK2{nZGV3c56fGv!hTZ)eWMF};ez<dz-MHWcg{AW*Uw!fq51a@FgQs3qd@ zm&ik~biqYS_Zt`rKCG<R+q?T0%5ZoYq0+w;$rrw4JB!VH)?n6i-Zg%97IQa=z5RYv ziYQ_Y!jUZcXxr%;R}|vq<JzDagxd_;r%|SXXBq(&edp|9^`;8<-Mb-dI~8A@Q4#pw zZuVSB0)JvEd%I*bzicYITap|(XsTQZJU>4xU5BEOCEk_CTqp49kpcoR0-dL_(etCt zr#4YN<oBnC;<j9mc_Rw`lv*sQ_v>*)-Gkuf`81<o7ym7f5`b9>stL#pu^}t|n$2MR zH+m0=Ksu={@GUU1>54rYXVNO_UK^cn!y1IDcx3n5i|#@FkVn@2i`f-{sZ<yUr;#lf z*KU@Go~K##i@`B#4r4^z!XY)?avFVlR$9xbZ^<B35{c{AaJo>6Mpk0q!ozIAi_b-_ zscskOmaP4wn&ak&Qnf#rf`VJ$k?a5|ftRb9<AP^nqBAL9Ikg^in~NATl^C$%1j{it zsWFCEGwXu9n0fy*#&?J0JP_lx$&^Q1jG9Bt@1<wV;^$Z-tyAojr8N+1<l^Z<{vFKw zKTjcdOKOWe65U$vrDIw3kW${o#Kg+rV5^0ZG~DvYYf(=K*O0L2)}q{0I7}j+ZimAB zQyTxOe7W%s{J7k>!`Y5DuT~ab-fb7V`jUy~2eI6jGko$sBUR7hYLj|_YEz+(e9E@J z{5wDAQ?_B@sIb$?Fvr=E9uOiu#ulo`UjS9h`EVHFwcD9~Q6ir=fel<V(%Y4^i;B!f zS;Ocf<kRELy~o*(MgGw|t_iWv3_w)r={D>+NlkCrQ^~QI6^>V@oE)J@ju3PJ%9vP( z_3t611u>@)7a(9^-~L$oXdCOXIKr(a5edj~9FDWR#Z&czNcYdIqw@Mnowu?2#a+7Q zU<tqw=~QT0e95oGL&zUpC%Q`4(nuq=nQBB=`F39V{pxPx5*JMedZ0(E!|AqwOqOc) z<8Pl~<IMf|ejl?9W+R__h3z*7@NrjIgSm@l>1(B4crRlpjn??H{-t~Pr=wVFX|$nq z43eU<B;qkP6cSYb`;S=i(hz=59GkebH=p<+Temda-T$M1n*XB@S?$snd}A@#>BL2+ zEz$l+X@Qf|Q&BHv#<QGdWl0A<MvI_~F8i>Z;7T6ed)kN(U~MpYDR-&+p?svJTLoS+ zzlVJnBb}4JD#Ig2!)|YjVAp5PmiB%05eAt64NzW&v&F+0OFp|W)h-YNHvs-P;nNdA z9%(d<SjsjmzhJ&<Ma{@?lo2kZi&D1uSGL$BtwlXB!;=F?B0jzEEXUq~r9gLQCaq)! z?CZ*-Dt#H#P=Mhi0^||kITQrhXkMlO8WWd^V>N7$CC*|Qdb*Pmk)ZGhW)j$|2;{X3 z{3;97kpP9SSHg|$;b-OWt!?2iB0L)tzCwEp0o0tK$nOevQ!Fm{+MrEt7^PF8-2frH z@)5PGcXDy^+e7Vguy9BC3Ps0qIW0Tox(Pca@?I-P*1bR>VvEgCmc!m$k)_kflIKhb zHbeIu*0RE@i?oB{o-=i{8FUl}rJ(aS9yFq4qHZ68ZH7D9EUF@uU;PprR?*Wv`<;JA zOT#AiYK6ex+r%0whPZ#c>7S6qO|0k21$@{hwryod=muKCLqu&QutK+zd>y6cx7)|S zg16c2mC<~+l`KG9!21kluZ!6M-LO3iQH}JtIgM7t?X`}lPs=fHE@4l_cz$6Pi(eJN zUmDEDuNug&8qBt=O69vBWR0r=_?wH_(^U!l!z`9y?U(Q{3!}dUM(~!U$=`o3@66)M zNZmT@AI8gI(4+JmCn5fflvKhtt&V0FtR48BFSB2*8~EePS?Q}u{M~_U|Eq%nT%lCR zg`*o1s5852?bO1ZS;zuccjLb}z_M2R_Yt5Fp-~O^rYR5KRM59Wv<2(~8<(H|UZ7<Q zkU!pi<X~i<k<#WN|9*Vc0`|jdllkixk()TgL;6A?e5CCPVU*reh)C&8g@~2b$b_p@ zp)k^<QiTwt7ZoB`nxhcor0EKgCrwm{8PZ6FNRkFCM3I#KqFK&;fs`b(#8R5g(F(nS zXrn?`678kX8;EvN=*>hwo==I0TZwK_XpBZq`dOi?h;C5mYNAgobRE&=YQ<Aeo;?cP zK=f9HZX|l6LQ6zfDs&6cWeVL&^a6z@gRV4Nq3Lmjl&8?L6>*e8lg&{Q6xs~aJ}0Fr z9@#J%tI)F5FkGRNNXAE@(};FcXfi%ZoI;b)QMy|qw?q-ql0q*a`a6X#CAwarR}fuO zBDcR-NuDYtU<1+H6?z8In-zL1(Q6fYC($bux{ByU3SCWfkwVuIU7*nQM2}PG%|vG_ zbc4nWPnzOsq<{p4mWYm2=oX>_6uOmY4~6EiY)keE?L>5|Nv^#I(Ki*^hv-IyHWGbF zp(BZ|18v5@i6zfLB_N6DoeE9Y9@6^?EfBpyp>v5|rO@MuE>-9}qD=}tgXkFwT}1Rm zg<e2(u0ofBHcJ_bX9Wc$DRd>#(F(nRXrn@JCfZA(w-W87&^w8KJWsBD711q3L&=<g z8*j1~7s`*_=_!xsETrhZXSmJKN+%(O2ZuxOyj*utVO7bA4DkTxgu?ky;fw>vrf}9O zoIG&;RyfP%vC6eZbD83MRtYyE{AGnRNZ~|+Q>1Vb6%Nd`x<Z8$rf`zLnW%6)6;2vB zBNUFUn5u}2;S`;qaBeA_8Q_dDE39h@s|c)Yg>ypTEC8p!!r3cx<kbeuM1{FUVbZb# zW~9QjDok2`!1Py`3y7&UuX+)WMrNO_)4S{0KO5Csud!3>qWEuL+ugcu7tinBxqI7& zFbzL{A*+95K-khbP#5<=qgBi{n-#Oodtj($o7h)buQ$6!eLonVMU_JX!hU;7(&I>u zy*$Z^Q&P%tQzXpkrm*b%YFK*r3}!3e4B^)dW}m(pue)h&Pv|8pyY=Ra!PR)}Vfj(t zjE~Ar3i*fe;-oq&zgn=X8>s)X%(;<S-g?02u3}3z=J3v|*qM#J`J~Rw;qBu*C+<G` z_BoB_6}IV}5I$!yJN!;ZUg*WXeJ9PyeKBq0TrU>1*~jVEMdWEr+}&@pR>MCCVVUo~ z&L<YK@831@&kSI;cT@N+dAn1$bm4U~%i4=~&%%n{57#y5QA1KGJ*Aj3_U`+^QLEyx zrDudE5ffU(-?8$3M|Qe6#BRn}Qnk~~+XOqEH`9F3J8q9fIq<Inst5kp&~!NP&rj)V z2kl_qod2-Z-F+LjI&W6`fjvJ_&z64>k`&P%Q|hB6CMXuWIw~Mcb^%hTn#sduZJ9`~ zEMwO{=<Rk7$|vpJqBeXk3;i&r>)Q|sNs{KvlCnx`2NF7I(_sfvvG8%$4IJ@*-N+Vv zn4h+EBiWYh8`JM)I69T;%XZUbURxpSp19qch<7m6G!U%Xv5-uD)@!in)|e%_>Gf-m z+6-^SGXIZ!`3148*GIAZy|>tmkG}3Rum>Gw8%Y7hCm5&z9%L%-rjd&8Tf}k0yFm4e z$vjF7U*y;fuXbm7TYK}z-ehlV4dLUuvx8eF^S%67z{eqcM?dz=$C3QsZ?KskNASI( zS>?wDe(xLXqmP&KJ>FoR+d^I5il)xem-Vo*zT5m*$tQ2}IU6X1-xtLax1Hr@MltR7 zsJQ6$atWZC<54#i16E!)r;MoMHp9E!<o<TG!+YY4l&9vZW+yzx?8>HXPxMP%)ZX%g zLE^P})dL*84Ti34*Y*HDxEnjS-OwizK}`z~K&j_KTYv6#%FX_;+`efh*YwiGoS(id z4kFTlg_L9fnpdh{$bvrg=ieL6Qa(+OD?)(9h#gxNaE8FYnST$h`yb}to$JW_%lkO8 zlb;%U{RG`l8Hmz-kVr@wP0Jmjrjh;kM};fx`$ur#%dr0%-8Ys}X|Kp1$V!*l(9Z;8 zuXds4{utBUQ*=Amq5z$Re0u!3buIhyvtaZ6m)f%|1KDPVq9@xv0=^P6jv1Ri3U8~@ zFiZy&A`j)b6HE4HL42s+`=`FVTplYYr3oL>dJ|K&0E5*U5rfR=XqgBbi<E{ST7DwQ z6Hjt5&E+=;4u>CUrSK2gr#k|B@%l16;zcdaNe@3DaW(EO*`vm7gh!1_QO|#XsF6`0 zsq!!joRi*E<?)*FoMin^`6VB`wzE6$-jT)bOyh5EV8uH}_x_~=WlERxc(&j)g3_lq ziugQq0T%8wd=xqvf2BawjV@4ZaVIoFvd&`#d%ANbA2XKC+_iu|6~=Dt>dSAR&q6=X z;?u*}3!g{u{22Db=e^j;&pYr{qh*pTc-s#-ckhBNV%`)X{{dPn7&JZww)}>B#izJe zwBe+i@i$pP9Bm~jmcBnsWj79W4(=UJViZsjfOlcA?I%K;Q3uQ55*AsV=x$z(WM>Nb zc!V4C;%fGMbp+pIDtoOu(zrZKj{OA5y4leUm$zhrI-o}1U}9IR!y}8qp|Lx-J)}7Y z<ECeduI5!F+*Qawk7;r6Ro3}%s3B4n=GuiN>pV8;a8G{3tE}>HAisM)+i}>t>x2-F zyWik{A?%?oBi?PF3r(3<P;wlk^g~3qUtuBh^{^SjL)h)Zk^D(3^Enb=|FaS1{CZr@ zXCE;Hbt36%JX$~A^d*J|_O)B!;PEWhq<kwgAITX>nr5YCp&M5cg0qZ=`sB4AQb9!5 z4C@R{28MEEvz={#T7+pRb|#+H6_C8mU<c8A1MqIrP4PbpX2C~0@t>?>Nk{wfvsSSM zM~%ixvZG;-p%30n^MrhQmXNRt=A{bzdW$w<6{|Y>9G^aqh14W>zXv<Nx~060D%0Iy zIWVQ~Oc71F7~2Z^T(u*Q7ukxMUZE?4<>JDWT^fjL%BF0b=nT>uNDG&M#YL>Krjzd3 z_LQYx=Q8_a8N5d&8+^=<zq^_h9y9VMgV>5=Q@YKZO9hZOk$X@cn&|aKwiuFA{<FU9 z2I&2L@eh6E@jmlB9_+DTF0K4e()DX=g_OkNxh(B?q@kKlzVtR${sIioFP)`7bJ*hJ zas3|5!FVda*QRP{ufUHU&7zjckcNCt4W)HMnvx^z*tpGqN$Rze2vd<XeGa>IJhAWc zgR;<EIi+e79xY;RNGwtkE-@+}?M+pL#;Zb`6rmw=*p%8h{_<yx)yDF_yugmtcGY~y z{;19LH@!gSV<*&2`kUe+$A!(sNO-~vY*^j!DQgv`zD&TagpiNVUBm0}F>IYvP6hsm zgtlomnjskz-&8_v)BQ7>?Q0&1LH}{USLTCBp+bWo)zK90j_@4%B=wwN^>UCVOk%dW z(8#&Ia!0yj?Uur3Q#W99l9#n-htR0A;q!k)&SqIB0$qNX1v@!;BUsUiXw4$__6ciY z{~V|+vxM>sC63Z$sC3N}1<S+67YASwy7*Gt-;XH2Cg?L7<S+HIVJb0n^`T7Uu9};y zy$m_UoPP@g1_h21ERP!Je?@xXHE^UOvsmRx|ELd;Ka4D})pWw3tCMoz$1BWlq*ZMJ z3zdL`S?t@BiGG=2D{lv|iuBFW?|`Wof7(LYRm&nyb>n-kV#81M_nG+%8DEwA*xU^3 zApGSOSgJ?<!nU3A?Xd@=GW`(!B?s$RSb6YKFP?8*29s`ZF=2a3vu3tsB2Age?wsmB zVE|s_(}v(CY!IT5<+UaL&DW#|$XSxKC9cw=85BcD?TqUeyxT)|(7&N!IwRq1@fh-< z=VMXuHIII25`FC;egmO7KeNiyKA8<L6y9$T{P8LEb-{K{fYMeHA-&d~s`L>^xt-=i z65nn@Kwst#H3BK&0U%QH{F&W49mIdLjOost@tgk~6$rWELoSe2L^ZpJ+Am*Zch8Ip zassc-)V#J37MG*V1%fy_4X@%GrQNLXY<!2`R-l(YN0>5xq>l>OwzIKrCE!@6gknLk zD`dZ&{TIK%i;X|$&tL4oUOX4nvH2Cm`f@r_4@HiU>OY-rJ(udWXgW2`gQq2KQWi!u zM$)Y5to2+MKG}<f)c1(piQ5rzr2Jy$$TC=sTM8&+QeZ4Ye2loQfF_fK<@er^G?Pxc zP{2y-)1z-pK%=xjr1?hi<+VMeiTPb#xY5O@q%%$IR(%}5PhuhG!(-lBqL_96`ytK6 zY5(IPO?V?KJ|E5Zk=W+*sU3~j&E?P2=$SDFyYyRs_Q!cc#HdknR$2i!Za#{}`p*kT zDRi2A!Nz_Z(UGNI@G)<PzKLMmBy(vQSkR3ygn6P(Dzty00NC>d?DrPj!il;cWd>eF z<6iy2Q`|rpuIpuy$fLH1Q5eFu2>KHTwun6Z3-hr2OT&j3%HIhQb8@AXXqHMk0Zux4 zS`J}(7ww}gUzH+z&g7~rbT6a1|3fI2{>%uBbyD^O_Vj{JO5A^^LIV4s7vd8iHEwKM z21Yq9+ULCJBu(AQhF|pM|H=n+#C?~p8%*#v!zw*nc`<<hlGt93G;yT2@>%*fzK(80 zFO<c3DVeoi%;NnODLI78XH&m1*yl>@7Ny8mUUG32aInLtDJP}-&$4$eh434vu_Kpa zeSd#h>1J5(vqbx9#d_}|*={n|Z+!WrX>!)GMi4j+SqJe?rXuZ5no9Q4H-pTj_<Wmw zud29N#m`i%Q}J6BB^4j5*a6Rr>DOJw!75Hvu~@|wD!#Aceg)0kS(WjFinmqd@M4;N z9x6tv*hj_FU6k;HDt@No+BQ_>wyIdAVvUORGQ!yPlghZOqC>cnu%C+b2^RfEsW?ML z{K+br|GJ8ws92?9y^4~G9ubPXQN<)e<j)BzV~mOgD$Y@HiHd7g+^phu6_2P`ui_6X z{;uL(6}iqz0XnD{3Z(qG?kXcq#i1(Zsra&rYgPP2#cCC=s`$H#oZ1C`Du${Ut758( z*<Cbgls`9FWz11=nTkKCC=21A*=iS!QE@?-27e7MBJgG(E{*CIo}-{NLBa1TfNMfJ z>h?4$jMFJVMCsz)_9yDFN^?1I_4sQW^$&OVxiZw*oc36eCIyFnjgJ&Rt<&@?RsA%n z=ohQ{u{g=U`iF`<Ry+Aur226Jl7B|k-%<5fKTzbMTF}qT<*E#*ILc4;`>Ec_RwaVJ z>Nl!>Ix5qz{=O0(q<Fa%sy|ruSKm{@jjBIa^@qs*ksOzIR}sJ&Q~sr?eyBV0yb z^e8{o|5)|5-cjP)RDZ+nD>uyg%$(`-=1qNWdjGi#M$LR-`rH!JSS5TQH<dHN&E||; zCRdFA=E51dncN)wn~jhXQ2E>pZaP=Id-R<Q%}DsCa?=oFIwVYxHzK?kK1zvrMRF?n zpy%N~BT|?rr}rG_E?fj+&I9&=PoZ%ZI(T;?oAI|N>-qO@fvFJqk8HUqTmcd&=K66H zcN^~MdC%3KzB#h|L~Q>rvy!XamSldWVTrBtLVr%e%_yqXAB{g9uBB0_<1EliWIsFE z8W&4q8iaHV;kb}cj&u1@5!3j(aK0uT=Ud>$`Hphsd=ua=HdM2K2S(l}pY?k%IJqru zX%xrN^NH?iTu;3{r%!O?^l`35Z8A@~bDU!jEHWg+G*1J1*lRd@?6;m?4xCqk6X%5p zUU44n5k7^9?;xBUf#MWB@G-Yl>To>AjZ5ITX=;vzpcnp|U*lrWxg<DnE^!V;|GU!E zCUV?)xVFTdby`lB(23J&yf}v<j%zFRj$T-Ad*j6d@&U3#4~O?^3;XLCj_Z)lac`+* z@a^u*1s2qBfeA;sK+V^jd$n7UYc9w6<YVS)*|bOf+oPu7BJL%)FV(2d)E52!$@T%l zp1^5P?<WJ<)kl7E&B?n);J9F1EM8Khdj4BY+ghh^Fvk_a{iVv3gWe9;;U^@3pJzvV zt|OYeW1L=YZXP_hY*-E9+TsiWy$nu`!{$BqZ7&18zsnuUaWQIo|6TmrVI21mE*Ig1 zzA>(xf5C9he^f5#pD>K`*9_*o>N`|>prxsG(og{39?o2l=?gBXU=J6Ru$v3QwXuiV zdPQ0^g#(|-ImGd~N`)Qeu5IgzWn(z*3S6;TAm3mo&LhEt^C)(!cd2$Na?EqcrHnAv z<#80(7Ev0{acl8>qb;JR15cT>&x(;#IBq7K^(Xe`lWy$0CtX?K(;gk#qSw#G9783x z{=_Cf?d8+PK5ybUyAonkYjfQC4H`{=0~Mg&o_+GvuYGR*z6_Jb!f}UH!CmkVg8$E- z*so8!jZ)?y=3W8j-Y93xK_||4grVBg(}DBEeDsXdRl63s<hAANSix~oaNBMu`D(l{ ztq|7>alH`NYlKG;+x#HFL!)=#^aZY%q%NF(gliGYvxWE3__%OBkoJMJ52StKls@v5 zg`FYnBn!9Y=Dn8VR>HmYBm2-6)D0cPIgDuQ9~aK0*r9zc?}ADC5}cb{F!qzJGnSBb zE*cr)G`+|-OB3wi!c(`ddqtBew^kj`mOS-tZ|$sVOhmY?KdpDrXhLNXqd$psZ!43v z4&PkBwY9u&h!fXgl%DI5;Ldd@mS<$1PyYI1jixheeBibIphgqfp8oA~c=cJ&aiedn zzo*dz4W>kSv?TrIp#-=hwORL&%msYWt@{7Jy5k09x?H60SU;K9tT)T;h*c5m60M9H zHwVrQ9pE-X8KAy#_MA~u!TA>X#L`%a#X}*5=d5YxK^6Gl^iw0vC0~5P>}N{Q_$9<^ z{4}wewza1S>VhWNjhho}^5lXGJh)&qMsTrDi+7{k!MVP@bzEoDW3F?-Bd&A8L#StW zxqm{HDO!oMhelncs_<8G+kQeA=oW0$sv1uG72s|C3x5>%|0(tVPpSXEsnlbG#_#{X zQosIX2Tg-Vfo9;`;uofxRHbq3`dmlN;~pZHJJmE}#Pk>D&YwPT=IrSsi>Bv~<uWxR zO~v?M78J@pvc!P7FHD;`XZqgJPMU@GU%G2HJLcrRt0<-aM-YdIse6ZdYksmf7x5bT z^ns7QWt^d6fr@!5PE>K6in%HZDyFFztD;dwFBP3sv{#W+FqC_&Q8I2-@ve$3D&Abb z#$SU!0i$}WRotWEb`{&Ay|4N=s<>9g6)KjhSfpZ}isMuqrJ|r>tb#meT)#9x(<#|U z<$I{;q#~zcE2mVaMMX))hV_RLz2|1tyF$eUD(0y;PQ_dmh4l{tG{ej-Hl_C)RIF36 zO2s5qe5>l;pyCP@i&V^0QBW~f#c&k^RCH3Y@u{4z16N1HdgBaD0BiYu*!ponnuZw8 zUa4xOlY$;D3dZ(To8TD*?Hzy^a!1!2jhZx<wrIS=-dRRXXN~g$H$`s7`Zq!}bIm{F z@8O;HfC7XQXg%l+KpWgn&|C+OOZVcqXzV5#z!+FstHF;2z6F;AdL!}RiXcOHyA$j% zpj&{CaVTwt4B-M?p$Om;E`uZa&4D<;9)c&WBO(Le#-S$z^k(24xKhww!8mop5uFPx zROwRSQI)O%-iKQO8TxzTJ&jOg3>q#0ctfS>3Ts{{bV<l;2VQ_H?P$hdj|bryhQo?K zG6Rl=qs+zuAE@+WV223&>3HzHfZO4y287c(E3H)ud_I!nzJttc;9fX;eBoLR{0Ytj z^v^(l9K(D-2LO+n@lPx~HNYR?Hj)r<aI{jOY@ln5l9?Ou2+nAYzK{n7^x(KWN(k61 z77CD$hD!pL_vE-T@K*pogrhXK0>|U1RuG`!CIY{Odmnsr0|<tr8uj;D;Ohx6n<8KX z&@NGF347oYIBL>T;ID8#pe5i^9KBLOmjPdgqgL4f{0W*_0r;e${n!Wg7|^i2a4@UL ztxI?(1^rJE?!q%RRc&@)g-W*qzj+4zipUMX9_c7G=p>+}KN1672K)k!(y0P=82~<7 zC=xgjj%r^C+zCfAg!L+2Kfug!-Wf{9vA|JqEl4yExB#vdbSZEt++)y{z%_83H(Cqm zlZieCP52HRHE$Jg6HbOc;BN;03`Z?T=r{-i9DFBW1YD6BB^n1}CC-Zl2zVcO8IA_v z&p>B7Cql*zco=RiWNLuI5G7H<*>EIJ_zup4b(AqM1ZP0v!_>t+r_wWk#W)u}hKvbl z7V!^dunJfMM<peEtkQ&W!xVlsP?L-GpcB>rpmhXpazIxCFTznemw??yDs(jPI2@%} z2OKp@X(7T4oCS+I;RPWPPvJHoA#N;GcDRXXMnWyleh0yK0{%Jy|L+W-VX)&oCMp@E zO~U6$la;}H5ZG~wQe!V*1)LLR#46y_JdTS5T>u<BRhcQ-z_ruN9JdOCW$iSM`y7r2 z%N}5-d?g`L@)O~xW=X(xa2qgF>VZK8I6fm$Bk<Gdn699!fu9sY9`ttL)8~}J!5qbf z&Bg5k_~F23ijXhpG&6`oILcrK@DDheiY>q+^H6&T)d0tv6#hhDN(ov5{8ZpPIO<~) z@EDv4i&!0S+I*}leyB0<>!lc^sAM%Td6|+<DsTmynIaQmvY?3|R0`Z$#&I)16Q-6c zg&PM9T+VTez&8R%zoN9<IN<kiH1ZpP8&@d(O*o|j(-rc0!1ZvH*9PDfII1vgu4eA8 z>LIKVm4pcUu2K?80~T48IZgP?YGnmX1NN$<O$J>9j9#NG&xF_E+)#T7Soj*IBIp^w zW|h7P{1+oW)_)U-$8gkKgdeWO^a8yVn7a{~gC-1p8-o*cIB*Ocjg>NB4II^`5$N?U zE`uRMm;*=rBB0L}?2_OU8sA48(1ahD@eei2R^W>tDD(ng6&yt-T(%W8Muak;-^Uo$ z=;Hw3%by@&(2IZ%;nF}q2LApT3J&@vaNG`MA0eE!Q{fi?>C*zr*W3uAau+rdMA!^` z?sN1#=u%+fZWID^5^xC|MJNTHhogaZ2{?0)(sHwbDTgqqz)uC9hogCa3Anc!J0|#5 zK%Y7cUeJX1>(Kv?;c10u$O$EbY~VIHs{MANPhntUaM8y_$!D-|fX)C`z|lIf3V2ba zF9DaHRmM;mu=_bI<B*93z7I!f5@yw-px~S8%^1-a(A@A4j=YF9AM_~TLpWL+9s~DY zM!#WNRsrK16wOBf`d?9|Qvh&_O1}?Gy@oLac|w=(P$1B5z!W$tL@JQ`0ZX?T0YrGg z(SRa+0d5iaML^qi?ByiWh>sKCXz^J9+zq!C^d8{1aFl2ZkZi-1SS1O`)=M-YSy5MD z3=xt=miUC`#cBZITWSCy**!^!@RCXs{-x4{x}Ox84nQ(Al04x6l_ujNneB*AXjN%K zGXH>|Clh2`BO$^jmHrb*#wg+wj)tQe5R#dP_=KWLlc|TSYQ!h3QRuyg-86}skDR(| oMtC%Ck+!sKY2D)Sp3i$L-m83XeL$%u(oXucb?<;u%~y8+3#e=V=>Px# delta 26060 zcmeIad3;UR_cwmdNp57ixe2+MWVo4#Ig%hmNJuy#A?7G5#E=@=G;Y+K=uKMbafxnI zi&pU|ZB-Q|REY)&f>1*gmA2H0w3^Zq<9*)ioD=$ezTfBf`{(z1{r-9Kvc1<{d+mAc zz1LpnoLjKep<szaNl(6EMSN2JRB`1>hgKC$p_zE7Vin@c*I6sRQE+L+Z3Rm!HUKv) z`lez9aHT`*ib{pQM3X<L;kOD7eJv}S%H;mHzw~5|oAizY_x1D(uTYX7{nx;Sa4v!~ z$DK!H2XQ!VK@@AwceHcg%d@_Gmo{qyc&;b$OL%TeI<SD}4kE{kzXqP0#B-d9eZofz zJB1a&eyo9a>{@sZ67C38wEYWdg)nhkX2;w~6U-Ah?od~-8W3U;F0qb6>o#sE?q7ci z99PycN0nA;MhiGs2!yajLTkPw`xO5d>^>v3<=?jtXok8(N%u2ruy2>%ISmo3{?b#9 zvl%06DWwL9AEoh}SX7qRUM#w59w1tc#~>g|r))-hk}1|L)C61<=DRmIu0Ax?s@uyx zw(k%p7LN^c6pPJa?xRG@?<PrC5ryKU$D;IC^wm`9M5?vVQ^TR^6zPOKx&_CjN|mU~ z93bgFj$%LPI`i&K=U_^0K~-9fOZy<NH7znZ`ethJ<Ve}yh2s*Xs-iNaH9p}u!^$J_ zoaZ*1C_%=W6edKUmm~bCwP}%Zi<T<E-gNM4#e<uwNG%BC6Q#?ErS=%76zQ~l)muSP zs+MK;m4i=y=tAVnvZb_pCfYrzdmBUfE#}HzIwco*s3j&^4`EiG6rtXRM1%M=Il4@` zEK8`5sz{Q`Q>4qHbVi<uqJdQOJhT!mN4tBX-RLsL+OX<~y!shhZ%wNer5_SYJyDnZ z`j!Ps{gkoYb2LHj=EiaD7B*0)+AaKj@t@T4m(?1zYMXj=nL72imoiYO?!n`KR~Lcm z23g0pv!%sd&-F9x)7@vhqi<@YJSzZ=w~h@<NRf`FNMmACq*);6iz(8tiQ^MrO&pgv zc3gQ?l`^{{M$*(qMx*<GSx0pgix-4t${#tQ9&3l)eb`jT;BFhQaa>WEp|B7=I!p>% zzYrSo3bn;LJS<zZriNw6S!lT^QB*=ys*weD*fGL4QOTR<7Y$uW3d=+v49F*s^m^aP zQ>b)hUQV3_{u)bhYU`E^QK<&H>S{CkUSo@$!UyE0@LbUevt#smoAJ}D99K%Ui<YAi z<<xd}YOK!)(K;inKwSJgtrci|zm`z5=%ZqBa*ilHiasBG!e;#NDtq7*Dono0S~~kX z+-O7#eqr&>f%#W|L>{a00yt8o{LN#VE&4q5Cnw3;CtE%?kmL40u-R-CNnttJsM=<9 zMk_??2+Vk;{2?i;)woa1JO`Qg{{vy0@s?8Rh)NR5Q-yY^Lc>VtZ$+pKLY+}mHA$tb zQeUc49U+CaycNs?)Z>ce?Htf%{PIWU;?gtUc13A*Hl$Of3S>Y2$Yx8D#?s&q56i$x zPAU%A)t^U>zeyA`l4&!hP^)NiEGN;%Q(8g>&0)ID_=8$zPqhVkC=jCHPY1tB(-pCJ z1`RqZc-PfOl<uJK9U%_Iu^F#pT(GbczhfrXfc%ScU31czJYUG%dq}Mv=x8%;q`||s zgoG;Ek6cphu8Y$1y0bK^R^t^2CYI86N9*mfx;=nj2Yyjms&t%oh=YwZHj8`=`wUZ{ z8Xk$#MR@@#+fM_9g6*&{^O5NpG9im>C3#DQpvk+Ee45%gS(I)}N$TF(>}%NPLi{c& z-&wSj3rGmfyR9YoD+y-DDM`hllqgDP<*`qiHi47WL8XDB)JXe4zAq}BV(Ar<*Jg?( zE=1ibE<y?&pf){zh~%cCEXM_E-%G)#B)CcuJO@EEfjSya4P1pi3mdF2Wv(}XW7t<I z7pP2EYM5c4-ISuTki1)>WDZLgi^pLou@8vSj4)A@DzJ1+wED-Fvz*0#{#Y}1G}yXv z-rAZe49fw>W=y@zy151A(+PuFJ>*Dpg28o4wRW)k2OB_0sx@1vZ!TS;2Asp$hgP|z zSdIyahJDE#nvi!h)!OAk51#vuHn`{$Dbl4R>6p#f<}$}k=^lRoDN=Qw3xvJ~DO&Cb zqIGFlrhwGs;;@m&b%WdTTtvEj2_vA6pQAd;+E)7>mZfO5sQdm?_8g5RG3?@2<FyCM z0LrP5Nm|lc#Kx1}?n$atE?wGR_l)C`q_a@u2(dUR%w#hzyY%uHI**)UmNJJ1WX3R0 zT97VU7lfrl9VUgPn@R1YOY(7p(qkwJCJq{uCr`2D_YPGXdWforrk=rOQ17M=(P}jV z=7==084b{HQ!KrM)f@-49P9s;W77YSgO)1h@@#7Hq_70p;c3&*?4f3-(lI2H`i(>} zH2(?8h&;fE9Hk17(i17sWhs^uHXPY0(lcwiFy-D?Sc2libE(!u;wtfH?3sIq(RXwx z)@uBiN~gjb6Ai15qD=WTcCb_oVQeZ)gUD2?o*IDh#kq&pk-dPyoe^e|hhq)XT1mim zf=#3fCryOh;u$-#!q}#&YRVKt5S=NIiPOL7$#Z!g;*lfnQyw0%+46+@<>HYO?sY$V zvF|*(=r@I68|1toD|ZTEHjnVk(m*P^hGsFHbW<<Cs>t|2rv5NyrDI-*Jn%&}bVS`* zQqis|uuDp!%j$z*vLs7KMfHd;YDq;lV*ixgTC&3Co!xE(!(gCVi+$wj!K}P_nDAvV zt83mhApR1qldV4>2dzjH-x3_`<u>ChKd_!H!ZW`(uk4)rY4Ox`45NrjhlCQHFpNwp zIjyyl3q6q?6!Mh(q0+Nc^4(UHbV~k7!(5bsI4$Bab;)7uNQ;8}K0~p*q$_40oEP0d z%hTE`s_953UMpg(YKL4_<9inf^kV?>A{;;%u~!i9ja2zgTS^P)q=>{v0_xpF$&NZE z*s8IwIEryMa{mx5@mGhSI&%;*o(CmA^F($?8DKO;pGbr{*^CE<unV4X!)Syp@ebiU z#~TWH<f_o!3qu5hk+dZIHprr@h9XpjF6m=Yn9cY;>}VKaG^3HI+fcY!O>M5En$NL` z#<mIJkm=qmPpJP*jC-CtUX%<rV~moqMs81u#m#KcLVZ~}73rs@AihnBJD+1!#?E8B zY1*v%M$C}BPchnzo+?WRkhg$Ul3aAtY)~dl&M{#dEV`N3szUd3sM-fLVH6S)q2J}o zQao!j?mf#gyk2$09AMsZ+Ok7ltwIYr^Bfs44LIg7>fZ9lFwZdG*m}Uh>D@noHS-P% z?4WQ-m0NaC3QI5><*_7);dPOR2CyXW2tJI>^bYKGE{NyMUQ$6NhNno!VDIEP$~m!+ zEw;mE{<a@B)b4#9%`HW#ue-EF5!25gF(-L$Cw9Vnntp=+i{*5;B}?$h_2`BjVxNI_ z7?WXNja<=<mHUhjoYo1s-bzCo=7~io^8DlpSR&dwyz9pjd|CdAj;Ic4Ci$y2IEJCS zoSY0z-ftsn3|2lye+GnLs?ad}Tt)qCpnkURuFFS)8OjEG9Ak-k^z~oJulrFYv|Qz^ z7o;d1uWv4ohm<_%1v9#gU8-_t6a5mp<hsxqb_{kOj;sf15>EGfqI89t<|SYBeYuV& zfUbK3iYT}1!7lrC^82tmcCV~jG^n@yHVI-m(1M@s%S<ifg>JrVe9L5EZFlx*%kIKA zKJ4d~?E+R}xY63?=#(g3qV{`%Bh}Qq$}@df8~?BQp6sfBiay8t#dtmQVx0r}3Z>oI z?0~m<kv$5?ZSf_ps>#yvRP5H-I3l48`D{^OpdiPwj{^HQ>yI+!&yDQ&z&!s`m?62N z@kMdpdSaP&qM8@#*}@=`a9__p4w@-s`!F|CkZ|}o>tu@5cY%~PU*ET2Gfc;QC$@OO z`4eW1rP5aS3Jt~?HZa(aKg*^BhYFjcm=v5Q9Bj^h3?9NevGyU+{6&@-(oXoKGqZ+_ z?EbaKi$Z#VL?QKlGKafz2{ou03|Dm<Edq;1Xlz(4ZCkKAwuOiB!`bQZ0JqPv%A{v@ zxC)(;KTl?j;n8k$6X<lVPzItniFI$4>befbh1P)xwxX4{?=|eFCAgN+sGpVZQdPL< z!X9HsT18|#;F1aT3RrH0Hb{RN3N4sR*qV9KV)uY)V4Cyfb^~as=(Zpx-xg^e=Q_z= z3XwMod52M6n5P59q6g-FWQ$(!Ple)OaDh6Rqvg~6*^t&gLQsD;z4efg85qWr=&LyY ziXIqNl|heM$muw~n+}d^ZtSPlorW1uNUAlHKOmq<DL6(K{e`tr>_98QQ~n3{LQUuI zfRudd{nDgkDbfYH9?a=Tt<3X+ePgyuEpENP8H$iky0gp(FWpCaRL@Qg4rIDEom$rP zp{Aaa8s*oin;6@Z@yaf<E`<$lV-zMeuqka;hczC?&h>5DD-f~9KRH9TPWOQOK(l@H z)%wn2mM-KhGqsJ$H)E$j)ghMn)6KE<<>}-=aP}i`@*XS}b(x>6x7lPP4oX#YlO}o} zyTyL;jL5|=L_a3cOIQ{qvO8La!FS#dG2F=R(%mTn4a4LV_rm1t8O-&gNHU6(cMj5o zG5?&r7sB#}ff@_fRZiXvmU0V_gMX*<&D;zlw~XW-4Pd_Qf}&&UNG-|ggFaAV5G6+8 zzj{BZ(Q+_sCyK#diWCnptK`;q+4OdS!k>eg)Xu+E2eiUw%s)h<s`F_99YIkUIQeMP z%v5ex@|7>x>2{rj8V~loop1iTUy!LRN`+xdP%x<+iV&?@)KUi5imo%UB;9)i#kwc% z6e|oXQRSa{@HjWmgXv_?l~w4fK+y?`3?DqD5|il4r(31bIX2^a-(l<0^+dk?JT!rm z-yclox6L>W2{`h~F}yb82t+LLUXbGQoYgU#205E?#zBrdKy@jGOsnr$P5b>_55z&v z9EjXsgOWX!X*RnZP+M{u*K4q)f%b{Y^*lwom|}NUUdAdqv~fs6dGd1hpu=B=_{SVq zt{Od(F1-hP&@ni_18jeJEqShrbvz7yS_)y-bVsOdaezA|&Q}v}wowzCm167X>Ssrv zfJt-q5oPq`Ma%b*qUDIAXgg(g!f@<^aQ$z}#270elWZ03Ql!dMt8qhX9xJa*JkqGM z{2Y|RR(Fbv{}W1B5FewbKI-Jxpk+9(dQpAwGt^yy&S<6@9VS+N7*!f~CAXJo)opvq z+#=gXkHscuI56GEpjc+*xa*=hu+c<Yc-?-OdTOUd=|{^W&xI{4N8IJMY<i@h&pt|o zQEWA?YNSF>=5|4$zE9bP$o6edLCw*rNEDI-Im)Ux9LV)CAb;HsO0*83#9{l{{m4+^ zy_UOuI<Mp1Pe3-Y6n8lE;s^QH;jBEW^T-l~32PqAQ?diPgmI=@uCw95pUvwBp#=*S z5n5U#@~iwp#2q3Y_3a>%tBBCfLn37|8xY;w{q`_gf`+0i*vxG@y@7oYJv?Zu18s;l zcxOy;@dLW>oWc4#Wy^Yq+3^c!t}zjzbI=^J@o3G7lJRF4grekm4;wk%Yf`0vvtevh zOk2ZlW$Uek4-a`)hq00vuY^y(p}cv4n5bDwKG#RG+btzm7UC(EuqRx7801H&sf{#{ zHsg3?L%|7Q?6;VxfCctcG43qN{n(8AvE^vw_;Pr~^mFyBOPA2-J@+9&o}{8$$R<jS z(mm;l<&h0)vcDguWKviTW+64q9DP2qbS3&Bf5@aR{=P+fsGk2gLZg%$8}@oAqx{<^ ztgK6{@aZS)ahEPVZtbRZDK*0L0*Z<eZN@T?=$LzvEzi~R#D*R0lzaguf+$(331W1W z+~FtoTGuY#t9N0cdaDbS6cw5<5SiJV@3vs+1@_*eREN>|5jIhxbR~t3u6s6P{Vw)v z*KFZ~e^^>KaY`trk`|m3l<tcMSXK6-rOcfQD_8DmcK=FW-jwdCq-)FgihKhz-BiZ( z7c#J&jw1mJZrB6%q+2L!6&oT<`<M-goha=5hwX?p8SXk^zOAm6VlnLZxnHyEv7-h? zz_TDhMcRo)f0$cZ<~#CcqbOm2@&1~I;Iury76+y!6zihgU9_FayMRl}lfNl#U?UyO zE|xex&%tKAy_4m4Z|%O9lI%{15~{I<uieS^b`NPIDVv_RydCDHbx?qugvG0#VXs^x zQ}u~xXM>kx+~17v;px^|IZYLhl|jyBp*`9*Zw8IUs9}Up$#31j0n(L?@8M+t1=~ko zfI4l)lV7pI9_`b&-=_v#6)in);BG7b16u_-9j&5=eElb?%=HGTpR!wwM-wM~1$nw6 zk5P1UNj0qv^vm9MuAY;BOnpDj*t<W;6zi&K{Aj8a-<<vgMT_+(RXC+W6+lUs^!tfp z3x9hD8y+_-;Sad3G@EK3$ohm-Yj`3_NctXtx}~TWtX?Xs8=!sx;;qzpkQ2Kd*Gl-d zk{No&cijdxN4`%0if@?w$2Mc`m$U@r6w5O{uUT|i9S$fit>}+D=R|9oo&59~d%Nd2 zx9b&{?{metkW0FY73`0mp~ChG);xY;8*4MP9R2w(*tCsaL3IltU^518XQlDsru^;d zK8lf(^t<lSW@$6)ZN}x`Tl5?6v1joi!u4Y;xK}4(`#m<aS0ADB30vK(NB+Uj(E|PP zzwkd6;%Xe%<WayJiB^$6Pzgg#K7r#wWtyWU{W^%^2>?2%$ex0g;f2Q(4VYP*aW?TR zaC8>i)i;y3{0LvmRSB2Ek&3Lc)2LIe9fL9(+}nfykxl5`J|Du+Vv%NBCwEn28EPy- zjb*DbKQ)%4#$46d95wd*3Y8)iXlkRA|4<nliDA{b90kNxqJoRyABNe;VY^-R7@Esm z6MQ``H&EwNp^p%9kk?;fT@%{7Jbr|O(GS^IG_siqVfjjtFYNgyH^#+_SP3`btiy<8 zlBJ-G$c@j)8oBZ(=AqGM7L^fI&5c`eR2Pl4xwE9-2MJ|ZmmvdKf$+j(dAZV|VtgMw zZCGJoF!#hz|6(O~rE-Z5pyXm*#|T;hx}XR)II&yM1$8yv#(8fuCT=5TtcBW~RpOnu zvHv8t5>}<K(}^L%kT~{NqMz7>ibT~t#UnO5(dv&K6Rw;%WJRf|$d@9Wu^HPbaio(w zJ5Wt*tt9?bFGOFJgD$XPefl}oS0bA={$?fnu}>$bay52PiMen715Z(~>7c_dHe>Qu z=G*scq0cw0s&AZ7^fhxzN*0daWa&w*@&^s1afACL4=0IrkewV429#58iJy0qBfrX6 zBrLYk;xZJH@r7qLD_|fd9`u9-fM+yEaY|t0Oq4FjZ{m^?eS(f^INFfb;X@Ks9-K`n z?y^dt4qmhw=WW4B2Z!hFqB4)XwkTpKijb7YP?62qtI1xWGjMCEqQ_Fx+oEt=nP_S3 zIlDhGUI7C+n(bn}lLLgcDI}inN#fuh#2&8PW=_kuM5P;v@EB^GA5;#s%qT)GvDDo} zyUJ;K7tYzb|Db=#(I?2qgr_;=UNp(yHK5r4ow@DbOjXWp74fu3E=8Le-!GxVe4MDM zIB+9h_)}pLQmHnhi0+Hh(y#<9eL8I;&#>3~wQt!rMVSe+_kOah<d%qH9s(@!8NaY^ z`gMz{-VE_#-JAAkHFT&A96{jpbeQD_r5gh-J^YGH+;5NE<O3<$%mVvI@kwk*{|LW& zJD!8h3r_+}lualIMR%~o+w6G8`o9_)hlxv$zA9Q5x+hu(I>5nRo>V-}HMMx0{gnG( zK+EpP%u}2Y@Nq3QnXHu@RD-*(lwwaE#50_jP3(O!T^QiOZi;cjX9HPqN(bTX6hI4> zm0}XoQdm*SX8t5=GoXubrk-UE=-hqX9W47kbPaCVU~|FM)2hq(jmp*b2b~;nmUbIm zE(FEmcT3SV#3tQgCkKS`XV~2Vp(J1lA3--3%aiAM;T5`mpb^8G-1af+I505C{{>_A z$FM*ySe`u1^Asgq(JEjc@@)FRSmDj<>^}qB3N!22xq(TYDn6pIHx!~RP<ME%Xovk! zV;)nOgZ#-LBcG#O$C6U_3vWDUw$uPOckFnn*5NbpUx2I3+NK@lkF!71CJS;0HgZs~ z@J)9%cTkux{ulP~AmhkW=w0zg)U5`a-Gg_qhB?7dgi8$fKsV+#Ex#hHWbTl%jh>PB z<62uH^|de7F|0?auVb-pnL@bYGE2l|ECTz_ne*T&K?iS9p^k8OOZuIlQ{t+yDG$J7 zIk7n4<6CU`V6PUVekT4kGu?&cKqSf6S?S=eMyoISmFKyiwno_ryKIH-EnjArK3ceN zi@ln@Ug&y@Jx!l1NOxG)kaqnml@kGbBPY9Hk>Pak0Zqq+m!&%yCBgraSlC~Yz$KHD zo&LptR>3X~3C*ui$`X-!xBxvD+ntq<sl0S56VbhDG?EJVT%|K8$*Oa_3Ba-9XW5?# z4%N)}1xu1hKwe0~*7+HSU^bHY>0bfe`!|@I!@*0fb+&YO;NW~1QNb1t_2nzrnxR4X zf8WqV!TvY)cxa>`{L0!7>nzxQW8;SfVqzB!i^2c9hlx<er^C7n)9qNd;gh{epjU9C z<Qe6=zr3)5=CfVHv-r1|*ND}6`Da`tto?W#VtfkQKcbCk7dV#K5JS<sXq>zSv0|V4 zMe;fj5Goa!Ffz9HE2>alw44qqsn}HC2Cj{W<IS*zKhF!0dw^SkcTk|^E^-GYf_pS4 zAAn5@_Zc%;yjjV}LjF5u92Lq;YfbJwk<#Qss)TvTDkIt(IASWLe2>Q$6|CE+0m8%g znPpU8R^nmLj*pttd>Xlr2KtCu4G)gI4R`aUpICZ^pK#zOHX|dk{V5#Ou=8=o*O=Yp z@6=c(VtFobbd7^U2-}aMbk9b!D;b@II~mM%bXbR_&#;Pr4x+2GIQ>M1Xy^KUeKU0b z=+9UXiH;_-cl3Gb3?7>c@`!XcZghn3M#k=UM|a{`tI&?VHBeL<bsxDatZdEpEWe6Q z%?zC#{26Y1WFQ2KQgXE|t!9q>5}$f^dNv+?LSBkQW*@d%{}{H8HZhyWvaMllLXU!@ zObi*U@?OM>ed-E`{v0$KH)<@qAJ#oSTb0X;kzWNxbJG@c(*}?CI;oS>AM6*C(?f|U zlT!g>hrJeVVppQ|?7%C|`oF=@TEF-TJNHUAA#((C9y3Y!rIF=~2@%|`vz22$a{06d zwx6vyR$f@c;>Y?L?>@u{ZCMNDhUYs~YQ!V6iVxWIv2C6DLK1TRHSD9Y(|Tn1V~0|G z<1OXQm#HpS>Q1RbI)bdLh5kxuV)S_oH80P?Nrb{0*0Mh1eiiDAS?gEBgj;J^+N%fs zn-wc_>1;D@fZ}3Sr*vO<%{&3ykbVsdAD`mW3(LJ=VTLm}`&PsAXQ*!ma{Frb&iE2x z(*@>}`IZpSkFCgT)%NEia?IAE5TnibJ0kFrb%2p&Ic2Lmi3J>2E{&_jx*f8L{hWC$ zY}z|Yg>E(@2U#*YR-^5x-cV=-fm*UxvXc{XLUNOEjdfO8cNn~o-AB}zz3N??c$XL( zJ8@9ffIX<{$@4@*(lx2PezeW_kA)6oU0zH1{y7>YClnz&o+lFH!__5Xd$csp%(Itn z)PIAv>XB5}9!xA6KlujoV9-lnW=%fF^jV4V?|~D271!4ZFOuru@8hDc7M(CG*#Igo z{yeN?gJlKiROxoA^f2Yfb6u)6zM_FiS#7;Tsc0YhX?2sH)Q^IVeVSEeg~Xgxm`YP` zfGqaG=pivK6T?}KW>GrwZhk2iS<ONwMd)8wIEqF-Im<>(igjxPPSf7-%~`f#l6Sy* z6SkI9snYkjkCfb_<@UgQOpJRn%T=yB%g#;e(_Q~I20H@{w@#~sZFR_#42uV_yUF5k zqz3`h1!!oo&Xde>9p{2}oi5ftzROZ4cM>k%WeX>J3p?+!)sy4ezk63JOGHTmN+P=_ zTQZITU$icvym%64cd^)*a+f`uoYdTo5-b&taX&0rQtu!I-2a0Oo6^pG(_+#ObF!HA zCv7Hwuw_#^3I_|=p(&xl8-K80r^NbN^N|yeb7+T1qiRIOG{0NGVy1Qyj0J4+)M&@; zzmruluYhfw+DqSW4As?W%X62{R<Zi2ejP#!(3eJH!EXO(5k^XgE<;gH(2dJgxB^$< zm?dwJ)oA*iMP(0)$^h45;^au6s$X@>F?2;I=FEzV-&)Ucv!>(qkyPo=lq1hD|6MlL zv)2#$@m1_}c5v5|Fdj6eya5vBs8iho4Z~EoluxK!`IDD|9Z#{^gFdV*FVax`7}F;W zbEiikm9iVN<>fdH0S}QyW7(Y9F`;Wu!`g_xnrJy|!;4U+xPWdjF$`Ap5>K*zg&o%H zcXoERmvd0LauL$qu4jMF?i08f?H(X`{sNOiM2fAvE(iasH?oJP*x<J&>ZKD+nk)O9 zV29oc6<Tg*zrHoyWf9sAJ!vi9s$`?*jP=dGhEtp_o_H!aAI5^VEjm`RGjlcx{-;^) z+%Vytxoqv+F3kteRc5&dxw#UcpKD%WH|BN~vOZ(p^TLI-Rjk)M(aoz0vxYf2h(|De zs@TeTaf1Fgc5a?uNZl#Y06Zexh7+JT{6|Fb5Trt<YlInwF5@YndVJcmi23ebjYnbf z!?YR;IXPO*lYWW~nr{*Y&1E_BlUoE~Es4@kcx+@d-k(EsIotVKfS>Y~qnPywtFm+$ z*ral!F`AceGTVGpdNFv)VBz8^FCc-qa&sVrbBLz<V#~LbLBgI}Hycx3LzhCNZy8Fv z#SPRa9(klowET)E=*Fox*&7SIg$GC3k_8>}KSiTS-xdKieZ$fqeVZH?@DI$PS>wn# zCEXX5{U-b_SeDyC;&fX^ie&${;ZSO<qvdDb+zsR8hvt+J_d!xDoM<j!Rp0=Y8tdKU zz!&91caX3evQkC8cS|{3Yr!*UhNislm9vBjPnNmRPk8<xwqW59;oIZvEMh<8vHFFf z!Ye0tyD$2l_a1s5x}cMHE53n%FvM^cY%gMU`E7&`Gni*VpAM(*p*N7}sLD(|j%mXl zbv3UgZ9IFB*fSn)8{k>tDZHC!DG2ksbqvKoAIhK)z7Q{n<IL%lyoDVp=)g~7e-`-q zA0;91vNvJLSi<)pifL<&rzXGs#ljYM^!n+Lsti#`k$qLE@MA1{alUYDB&%C|74`}{ z{<hTZJT^UQVDu$29pZ1EqkAZ>Zn!n$^^oJbOXrb}H>2L93ImF0i!6?BRnC@JiruDv z4`JJ>=rX)_H|Bd5R2U<?mcvFAju1j}*!IHi5x3t^^a3y0o{``E$c7hf{#KN4ANcfO z8wpUw&)#7AqV|I2OBPoY(09g{*rsOFl7a8~T}2SSLYvVHGD)gQiM|Sl@#`<?;Uog- z*rvp|H`uzOb|DY8<0kR&ht%SLzd@<~ZakiVtJ@O(^N;9MIWx`ki8M*^DR4MO|ADJx zqNNHO&>6YsM%swTOOqFYM~`9TjIBbIQhZ=Fi+pElM0dKgr~+-f>$aZdxY;4plVoa_ z^<&u%5ErogEIa(p=nmV4Q1wb_onQ=_kt0~m0nf_enUv!rEr-)tmi%sJhd=*&j+JMW zI*=pkzvNhOhMj-6cYgFiY<Sl3cIvKp5qHytlf|t2Si_&8tMJ4&C`#no@F3C0Vs@A7 zKPN-MW_GYuz;hX9dE&XCGlUnAu;kjHJ~;SvI>q!;Rpwu^uugf_FbZ)zYq-zZ%IlY_ zo2ano0DFIlSy(iTITk1RxR;WlVR5m^zwLW@VCU{*lZx*Mtq!vE_lAdx{oo_BqoXNU z-oCd<{%uedoDXJ4erqqQc(04VO=HjB8|L-JYiLVxa;<W{^$@e@6}mSrvK>;(_PeHH zW-8_T-{5Ar1OG{}b?X#Y05&EEWWy{DR5L^<83Ly(8L*dq4rx(JtF;*mz`~;ZOdj<O zyDf#gCiFpdN+pM25H6iyICUFMOs=H%`pTE~uq8`d3+Jw~ua|nZ7?nmwnazAua#4IW z^6R&E*DQ7BI}Qp*Ay~;(O$qVDfT*Ex1?@{NF!d|dr&e<AkIa2pFX8SkmcGnnx`3xT zikuO0NlRvC)Gfo1G!?V=7AslS+G}o@QoNz?zEYT4_at_DS$n<(d%SFqup^7@T;ASj znyl8E*9FTsOBpNc*X*z5!NQ$h%(JAMaO*3URublRYUj(j8ihv*B{O|x=qcTP=;kBY ze_t@k(eb%tkdO3cx)p`qS-UZO=%4d`#YUTi=mXvL-E89uqg$shalm33aPs68Hrwt~ zD~8$iXtn*tak={oWW^4s`0`VwBT%y%JfemFQ+=pYK3?%{?-x))ov@5$tU90n;d=Cg zJUvNaVxlCcO7~KwIyvcUvR(0(LyIyb<3-e>i^v0`is68>8NVAxTzc)sP{^>W;Q*SZ zoP1D@6Jrslgn5)e=U6^TTaB@b#2J``htMdzz)OPpBrskT;9dxPst6R20Hya<)5~xk zVk1ET%aruYrt~1Br(!QC))gUv1}_Bl%~i#1#@HsgheQ<XG9ZK-p`;spn2S>0PSf=R z3s3X<Q}(zD^TgD+8)h7lsJo|<S>J(Dqz0R@{aE(OniTyJSn^`rS#yldcxepVxyG~g zHjUH5X8eXsMzn%%zIf#gEtB+fk!&-r8^h|?gb2OgVTQGx-RwVmsT(ER*r2tduwxrr zyLO=41Q;4kCix#**<Wkt2v@hV=^q4#WY9()jECO9V*LP$wU9S&eW3%9TiK2e+6#Bq zvTGmA5vnuT$PZKf2ardkT`j$Qnn0W7_KIdzFp!XQy~GZE7%fC+u=^i|3kyawzjghD zn9*$Fx}L(I<80$Pe_{Q5?9jRxVN(YCdtI-XO&QSj2KcNS7I(jXT{-09-bLxw!7mwY z!H|w)4{#KtPs{RR_UcFN+57983z_e-PuH&%-dV*GHpB|sN3eMt`un$osh~8D9$vvk z_;rPy*0_aB*!2x<g^S0S(?>0Pa3g5fF&ptEMP?G5toSa1PKOC#<B{aQ8x&Q9`f>QD zL9<WG4f&|Qmylh=KKjU<|9T;6lLorWlU2e;9;*^2d6-H>$OBX&QchF|7rDF2NRT5{ zLX=ypM7kWP5*e~lB{F4KmB^OuR3cV>Y*FgWk^i#fE7TlWR#;L2R+W5BrI!$0t<oh# zA64nKM3<`cMxwW?^fsb5s`O5xSE_U=(MwdijOYa_T}gC)jvB$?%*m5fx`yZsm98Z^ zRi$O36I8l^=opo5BsxN+$z+!ORT@w2IN4pL6}O+gN|TdNZhV`1F6Emj^1B*QJc+d` zt+@FvsB|pJRH}3W(FawUyoK^kl_oEtyhWvRh+eDGbBJE5(gj2psPqz|&2KB?FO^Uv zTTNI?^jMY7COTcEw-KGB(mRQcRq0Zq+pBaL(I%CyB-&G@tBH10>5W7`U99xJhR;W& zL5<W>!cCQyiLO!U2BND}x{>HImFBQ*%X?JXk?3tI?N0Q1mG&XJM5Rqc7pimw(Q`oO zV-6xIGDA&>B|1~3={bcwT%|>#MU_q`x~EEK5FM$~nM8-FbT-jGDxE{Lt4hxy8jqD| z?h8QY%YPRrktLKMt8@v`*Hn5f(bX!wk?5l;y^ZKnmEKA8c9kwAdLz;FmJEG}1PAXF z<#lL!$3}9(>);zUDX-(qgB0Gt8;BQ>^-N`DD}@Yn2d7x&j8r)p;2c&tNh$|!P5n12 zC#ry@Z#Ly8sWB%t-GuZWD(9iP$|JyuR5`a)4jiTWRx0NQl@kk&zsfnHauUF4u5!Lo zIU+cYDrbYr$p*(OUu8)uD+erBl`~i6%mGJGIa3vmvfF_9&nL=o3{{!5?SLt(%y^Ya z`wy5`Rb~V+wc*tsoMVvL>st(N73*IfRKwOYX-hj{+4|k3TfP>A9}euEzBQB=y1vIs zw)G9|JRg?ijI}mfGVGD!<c)M)YcG!hL%ZzDYuJNrZQ5-Zjn7ny{zDDGRcsxdT==AK z9qvSN$ITINv6@3uv&!J7G>>KjJ_{CxjAk=Gi`H*h^P-^6YuJ~cy%n_4m4}&SsK-Y< zC&jEIctNBrC96!dtL$gkTj*5A`hNaMa9_uIeUT=d_>ft@=q4PDV#mKYE|h(+d)fB0 zJina1x+7T7FJ+5&v=ADS*y<e#j%Oq~#<wN0pLX~-eyT*4B<+5_L&pmXJ2KtQ)xxb@ zwqd79unl2{cg6`rXYPKwv$dcPUiPASJFF~ncbI;xKYB=(uZMig$|md%Y8R7;BRwfB zmYA?2zU-}>cjQ&NV}(}<H%HB@6#XUpVs~293~((bY*Ik&mj6D64Y&N+aq-{cdN%9z z|Ke5JvmIU~FP5;!UZ`!!`t1phy}q<5r+g123KLb|QYtkL&Lw~ON0l${dMVm_Df@U& zH&-thqjYXdx|<8xFMB$)83B>tSh=GjsTi-{N+=XJ@*@spUg4{mUvZWH^a~dIO;$qU z7vw&&uT6ZA)XcHKP`F#bLQaC)mL5+I4;w2=OUg(ItU7N<b|39IKys~3kz5UimE|^L z-`?!%H@<?YH+%3+q_FpM7P0qGkGt`7fvqLua}ZA*z(;2y-mF5mthy_o5yus8?%}I7 zsv(mK!}l6CW7l|Q+Sg4e`iza<7cBhPlP%cys&JqcyN1}BR?N09LilDIYf~C7-0i{A zOO3)?+t}37cZ5H;vUB@GocqSnL>US@+1RuFeyq#>4+XuFB24JP{@H&<@aVyg9cUNz zJCqtNfLV@@7pWQWJ-Rw3wL6Z_>AEX(?P7;l$CBcn=a<$y+Km3OEaclRet*E4(Of9P zJLJ@@D}7|cs|=^Ru{Xc<7b;@e@^6hjY9UoO2MJU<J~Ps{R%)|9qKt2X*(I@HA?If( zjDm<fODPg_3`3!a{q${1Vc10W<lDq3T+9`f;$GDGFGuBDTmGA)GG_}pDuwke*n0;} zT|b1uqqX!akWeR8o1Ed&RXmj+s9gEYmx9kN@Kl=I#xAF^xw4-=eU@)9x9>#LU04rl zY4v@e(;egV*_+XTR$>;tubI7>&HgSZ|Em{5a!-)aWy;-0`5M(n`D|$9sKnuT4JrX@ zYL|yP)T>V9gFZfBJvQw7%TQFL=*mg?_Ac6MIwZXdWwnO?u}Qsvc7ZoVu0j%RJrNW+ zOmgtPWz~odN1o{9ox9k~Ljhd{Lm|EQNfPfU;x@CAckaLQ&Ul%ZvW}yy5$&Qid3X!X z%B?hcymNR~_WiH&&bwIIp^n1)L9F3Wg5bKDMI9c|?X@7Pl^&(zC4<sesc~b&a_BR? zIT(!ue4+O$-b9r2##y)vVt`KYVt0M@YIf-GYr^#`)~0NZV2NUz%i@Je1?-oy6yZ^4 z*5OFFuxldw=tx)g-jU`)wo3MV&ELKhFL~`A`&wIG`1B37WLp2w4VloWnuSdpd#Spy zSAd~y?86tbd(+^?7|61xhYF(y?zT=(<z259P^(YL_wit#-I~!yuokd3GXw1>pnNuP z<}~|R&{Vd6=24+{KFfc7iM>-JDt39JfVV#ZzYJTR6XM?&{<9RzaYy=eEIrY1=$V-H z0B1{?%^2L4ozCg*=l}tmv9=9!do#fCN)AQ7ZNsA947M+Tvz%qT`IG%Ue1FDz<ZcpN zGj{)!`+*?Dr?a_vff%h99oUr1w&aC|Dc`G_!+cc7uJj|6a%!A@4EEev<KgFp>Wq1x z*^UGXev??x_pMrwX+=Fu`UiJTf4sL=?<K#M+oaO+B9OE|@U(X4Yawje_bzU`SD>WV z#4NlS++p_$w)OjPVR{xj^L>Qr^Khl4XV{)y?dY+Sm%Q)I7e!UQ$-K(LB0dENCCZCl zq|~ROiJjy0jX2I{wGp$<VOwY{VH3+kjAJ$7m>0tGksS6xd1ql+39Bj(NW6jWmsl1= zyK;GWlO{FV{lG>84PSmJ$33iRc|P>9Eh*Y{KRs@fccU-3RT>6LuD?On*B$S^gtE3r zBTNpcS(LuF{GvVUJx?AF9L1^bc;_<QF_h&VZ5P;=vX|i{`r~zbv4EfxzEn9px>nuC z%h|U_(|pPxQev5p#suT0*VLAYA<T5lug5a<AsO!jPNF26v2_R@BKL)>+EtAQAzqIc za?dD2y^kYFERM!w!{3%M^RZsSr_0!$V<wX)9t)Be1b5If^DHroUa`o5gD9ogzS^Q2 zy^Q^RY^pFbhmAYly<^+gn+Csb0|tPe=M@3t;x||^(hG6wig@=_^{ubAvZKel_S+Fm zJA59T`FpTjRcAgfyGHr+Oxn}sPXX#~r|y}7c)o}|vxRJZks$lcga<t-VVjkfc#W8) zys(c~B#X)x!RHG{!&!ML(on1keeg$TB9bIL#XBxtq^~#W6>6LDu8AG37_Z+jgX&XG z>9#XiWTih}&C)7^-1lI`BuUQ_4J#|exGp>1WD6=IjN}edK3O4`5505hBnJ#)`zxb* zb%8j2siGNdIwRsxg%ldZB>C)rXiRk?n6k8mW_j*h`M_o(%sKK0(^;DnUE-bLA%oBt zSe=v$uWDkK?Xq2oKFBkvAIIJLN)cME34Npry_>_<orn^~Y+`3mL<+$<j6d0iKhN5o zOm4aJ4RX0Vl6uai3Q+R=`dk#e<_#vD93uGaWIvq@NC<~mQg`f><~;g}X4fY++aUp& zU@bb}+uSodkQLqq<Q_rk?;|~pKJh#&T0I@)*2OIDRLI~l0m|IEVdmsfuhF=+r6MpL zMFUASZTL7HndcyqR@_l4uiJ#lefho_ef?^Zn`h(M0*MV_+fTLUx3MdytW#c3gW)$# zEIOapOnz?w=&_>ZaqU}uv6~wfH2s7@(N)oqRHJ+mmr7r1hAYOKGF5MM*R_D0`l!Pf zT8oaf6D?0_XMaa~Z!>V@2UA&9Rm*l)rczh&Az`I2^s`c4j5yvsy(pJ8C48kOWKU(y zPj~T~3%2@30rsQ#6vJ2W#pBl%<PP7m38&i%r;C~ObRXeX4Lf(*bHLde6b`%A94|`O z#dZ+AYFKszgJd(xKA5P(bnaV6!C{6kXz}XW6ENv9Ewrt({Lz%Anq)SGbvn~$%;76^ z9NVzM*Q$jrYFU3(9t}B5tS-+*?xf_<(<I=*6y8|G;|HeH7LS5crDLdvUY#3^SpA5( z<KX2LrHc@{bD340@oDc0{=*uvrA>TzL$sY0VOivb%f}~EJE&CoDoACpKKEwsXT1gM zW!C0wpx{x&QqG?C`+O1!Dr>77$Wo%}oh9A9xh%4JcwkTPnw)!;S;{h>BuXO^=+{1A z`)sO?Zf=30C;x^tRh8tcS?qRor0ZwkSjUH8yXcn1LeIUKeh@`tN?TYKMsS9rXqbiM zLHh*rw!vm>W+V${o}s9v!heAGpP-6*MNyPzM=<i_I}_0u?2yVgiWAw*b3Hvv;kFko zkDlkb$|+b2(6vvevY7L&g_&M#-1$z;OIM)H!<2V|zJD8z`!E;>=lunTiR|F{#P;5! zX{wvv335_nnN9BmeRiF6P<<yT<vMHgLzK|v8XNaRScl66Xzl;-PLN^pe|slr;}2}d z5AB7$SJ|~6dWu_cg*>3HS&@g2R&LRkCP!Jc9YyO%Uip~3Qa0h9j;eL|>4JJ6^;+<9 z4ay_Ov&;*TGb%eOuZJGMfFvYg1I31Uy#>Yq?*yYAFsDSAQkLJbL+Jw)2Hp<Ghxm`4 z<DrkSMRP?YqTCif2c#{0p7MVo{>Pj3RS6@I(?q{@A*H3u-)FED7hANme20Exg}SKw z+~*T;(9vBRzp0|gN2>Cfa$yGRb<rnoTE_n%u)i6KkjH3wBb)N7bzw}-Nse;eI@ISY zgiN4naa`!9jilwb$Ag``=r8=13ASf5MY&I9vc;Euo4Hb%Ul6~AAiu_9FQo_rRVgJU zGLwCB*=V0Gula*S*vU)IE=%%BT|7_9okp=6mx6_=@l3cJ>DzXpI<<J<fd|QD>H`mF z+3pWE@UpLvJzl9=u_V&Qqv}AR!+4bK&97wpFZX8+LPGu$6AT0Vt=4dxhI=)v*6=3{ z8#J^JR`dC3*j2-!8fI&_NW%{`+^OMF74x|Z8snyh4>WWLQ7Z`4FjB+ytyKO}4JT>* zIT|j}aHWPDHQc4)Q4KFCh+(~>F&yyCIQ@lb7^`8bhEp`0r{NL}H)>d_VXcM@8ajrl z<@#vYO2ZgJ)X(+O7=tvNq+yPR`1K^EVef0WQNtY?9?<ZphSeJWq9J|IPk+xfbjR2J z^cM)E`ngDrF+jsf8qU+OM8nTDJfLB<hIcf)ry-|JjjM+K8g|sMmxe=I@ex!%H%VhG z(r|}{N+SMC4dL0(LU_Q<E?fe&-BVPtW~7Q?^MNaZTj=Nfq%w|n1tLq3SJSVH(KeQb zV8R!irpDc`Tpi?;Z+faqlPyDkWlz+&lN#kRwKy#x`g7OfI8l|qosU&{4=tXq#jz`? ze_9;dobtEvkt&ZfkpA+yNR5HxU-_$TR0aIBc#altsl_=hPW}b@+xbwH4^*REwiXZ4 z;u{~R=_V~6sl|hp_%Q6mf2#s>Gy#(qpR2_=Ej~|+*WOp<pK9?MEpF4|rMn;9$~WvT z`zMhPeSN~j8Iw)sH%wW%lP8!bn{smB$eKKBmML%6Yp+i=wGN*#t2G<`a4c(h=*x~g zyuG`!(N*yHY}P-cx^`qKyCzQCHKt9;EFHV`cvi@@XuQ>pDx%Qw5ePefQtNvK^y^BT z&3h7MA~8}AJM`bf#BnDzG2U6=oTD5f*|jGo!SPl0{K){HCfS@eIG_>kYO-r;dEP(Y z!(h)DVw!P=D3_cj$v@h|bU|>up;pd&I&z+q5QaD7JYyU<&nWjC4^JTRkeA|3c|CCW znGjy&H8^mFNhlxrs1Dvi;2eeu=^nn$obOQ=*6pcBKINrOhj-$*g9ss7y(dB6_*eXY zYU|-hjyr+SRG*Vx$LV9dIX&;mIplC$Q@gfy#^Q?NxOkLJ$h!)hEAPSaCRg^xv*hMY za(8-goO4f(8*_sle-;?x_$tTI8?NdQIAaK$&}DRcsCy1?&vW(@*_~&7lpfH0v`gf; z*AQlFJ#ZqCJ}+BQhqONsc*qiZcsq06liWD(7(M4bv_*qQt)W_9_Hrf`_r+UucqMhE zR-T8mJ?D&(#2hOlPNOz5nH$JuCUe{_O(+ladW7>$nOfL$Eig?jq6{xGJ;h5NZTi2= z69+mM;rtEu;CW!zrqP%uV&$Zut^bGq>>Gen1;M?RkPWZ}lGaVL1&TCqIT$###m!!{ zn0lT&1apQEg<SOK8|28j$GCIuxvtgDWsW(`G9A(>e=O2i<Wrv>P0gvnbJ{|f98DP& zWr~6_ky1qokxg=aU*WjX2x+yeLio08mh!yMs#LxWD|+g)Dv$5<VzSrI<hU~k*|n?o z@?D2EW#5p?ac2>-YpHeU1L>;SmGhnC!uiI49%^i8QSDJi^2%a0W5y8{*DB)Nqgfoc z9PzJf*G%O3w&@-o4x9(_c|_6NI%hW3GZ>rAEQIMltND0kt+-&VICF-fSSw1sden=$ z@zX_0-MPlHHLe057UH2uJ3-n}k!~vW)gnwI!k8OtrU`stpgIH2oO7<KGc*H_tQ_YL z$3#`_nq30l3X`_xM-2r#zAMFh*zrMYCfM<Pd(c!=qM!V~l+VNH$Qff?IAc_k%D(JY z@z)&p9YS{Pn)7yi$N<W3!p8ppl;`w*7?KE0L+%^w$Tc5s;F`y{am{nx%3O0?GSR4B z_I#@_-$)lODCScxh+ogSH@MZlR2pmpsH+<97_FIO&qusimb*@K+z$vwZJ^Z&b;H17 zaAPpIxyIUmPw2Tb9M^~tsmad)?OV-p=}r0n4->i-GW7`BgsutF@$auWqT^dSQoACl zU6BIslkXeejPo|fVA)4=-h3qIlCHKq2Xn&*xpF~f4=!kuJ9=s0f^vNtylOqGo0qw# z`$p+Glld*qG-)<x8lK0QV$7V0&*6M)y{nC7S{Zn^N7<RM?bP50M4SG4k@45SadS+( zp^3+3fVuG=emC_0TF3v_I{yFDI&Q6~<o~;Mye8L)uURwJnSbn*%nvi?;$O<XA{TzX z{l1;<d?dfuo!{6jt>k^xc>DhZVO&qxH^_(o#ePD%z$eg(O4U%*FiFD%4SQ-Bt05M> zk}pC-lZNgZavJ`<=3D^ZDgOemidSn`rD3IpM>Q<d@SukH#89bbr-mCfT&W?>K_$JZ zeEbZW5?`R9S;anFw#LZRFkQn04P!No(J(?ocMZ8UivxM@HjSKGeuIXxhP4{jXjr}G z9K?gSY0<SB7HF8QVa6KYAbxOuqfMRM1`TBmYc;IauvEis8m`sQM=NfL7N4VGwub2% zCTJL;p}UH$ID3up^tsv)S;I07w-K%}P2>IHcNn}T!^GEgsI^mvBF9l15+@ZS<F$Bi z6>sW*(75t7rVu{CxhdN(x_#3^_*T4AhN~(kuK6I8e<R->8+?^J9?5y&JjZv`F~C+W zprGJK06p;3^lyKj^91H2EC5{q+=5U78N$`@|3!jc3oOKi%?C8$WE|PqptFIw2qa$u z{EBGYEqAuWz0Qg|$x1{@fWIPCgSLk&J05}P1mH-G&H?V$=sm#O2sMzAfx&?scfpV6 z!hk<$^aWs+iQ{fUW<BtG1S<0=kPpV)xJ5oH1CfkC2_mpgqkjhqc+Mo^^Hh7_dIahL z;m|O3v~qy-fXyGjoirA>6(I@qPT+ZjbkG-oF1R&ifOZA$M#urZ2Y4<Y|Bc0ub6o)T zXiF`^4|W5eAW&sbfxB>#+DI9It{rf}p@M*i5Nbh}0ef`BG*MpQLj;=2M&M&yequmB z1<sCCM`#Z4Cj>M2`L!VC;x@Aukp;kI2+BMGe~iXGAAGV4uXKS42O4e#4juz_7=iT& zB>y{ba5t5o4qU9!4ZzbqFa^k44Q!2@5r+;(0_VqL{v!}s5YKU6B2Ym)fkC}6?BJV# zy%1=GmH@XQkPKm^MiaXARx36ElMu2YpH7I713CxzCW0As0dO(G9MFZm^I`HNK&ufU zT#i6Px(&E25tV^o0=z&0%0>7P;UM^pKqE{fYLEz=mxNmy_$z@G2&7LJfR7MJ<|**& zWK5wKrV<$24`t?~P$I@6PzJ)~xJ4yEs08SN>l5+e#o<P2bOvx@3VH*XNx=CCRKWt^ z9t3JA;qMwv7&1WR?*#sZumyQd1F@tRreQu2DFBuuP(eq5euGrnAGjBR3M~aD4OT~p zFlGpRv&c($7s1h+=Nf<oLy;Fh&Pe#raE@yaek1VGNK7kexT4S-rF{5xGC1zD(W=I7 z2im=&_Shac7oh+d766Bg!72bf9N1$l$885quSFM*LkB<;ZbBek*#d0#s#*@&&EW{t zu?Qg3_$z^by^8t2h8-<(JjX3Us6{~qK>Y;t473U8JQ1z+!IA^E%2F$i0On1?Pve6> z2Y7O_+JP$IpEE#%-vD%Z9iv2ffl&z5h#26v2(&~G=7U)FCcJtODgn02RTCnB&1RwZ z;5z~rBT$AyU^NBk1mQt58U&e2U_&0}95i9!Y_+nDz{zi+1K7l}fg9$iqqPwjJ{R*( z+8zPoCIZbd;nsP$k0Zl2;0*+-tQHtNUo8|aM6Rbs6S^)`b%L-6fy!9|EM257W5Ra} z)J<#&aM5CDKgusyjP-ARTis#_TU$6T6A6*P9SAc(?*v8{s&ou+zeXPfCKaiN4dGpc zLdZ7&*IF?upb4ifLytkv0Dgo(`cVaRT~3|B{%-;?{C&(fgb1w&G*3H$84SY;K4Io6 zY_XsTZz0gA$iRK8Rk{?|h(LJ>zyAQEh><u7eC0#*81z`+fpzFG=!3u>`RlPmA<`4r zb|dB+bbH`h1Ud@{H+`b=w*YmU)XGeR2umPe3*5RH6@xAV=50ZK&~t!?5U8PLK#%|6 z5Ch*6_!$Bn)A`#$%-;z&Arck<J-$}w&lC6%p%(l`V8(t78P+smw*xruLB|3m1gc;u zupWVW|2xqKG1QQ;{|?Le5Vl*;g}@UCv^P`%TO7vxQ^fNy#|=cFf>MDS5U8aafj!F9 zmi7d$K82n^el0M$N>w@$cn{$q_<sYlPpgd})SrQa0{mEDLN&)d1x@IE4hoEA?{h96 zLxc!zAZ5V$7qJwPumJcFfvg?ZOIQU6wESv;Q!cA?Hjul*aZ4dXcm`n`=xQMSoIQ;M z_an-_iqQq1um*t?kg)SLoUZvuhyhW49VZ$I0lWXCZc06YZz22+KAiR3hX{&d0?A!F z2aP5qcP!C_<iu=`h7yt!l=y_?v?M;^7>!SuPku%cA|xLn(S+m*B%1KFMw2^`{CLDC zw5wHVcVLu86Q*f2VYWsSF4kzm&s4e}TwLTtQVSw9{;al?FcN_(BOIvF=|J+15TCG+ z=zZrp^4)kC%RXBjBl)48m7A(J)oiNW)VPV;oV__`^OEgrx3BSen~&I+^frIc?tcNP CAY45F diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/wheel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/wheel.py index b04bfaef..1e2c7a02 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/wheel.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distlib/wheel.py @@ -26,7 +26,8 @@ import zipfile from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, read_exports, tempdir) from .version import NormalizedVersion, UnsupportedVersionError @@ -221,10 +222,12 @@ class Wheel(object): wheel_metadata = self.get_wheel_metadata(zf) wv = wheel_metadata['Wheel-Version'].split('.', 1) file_version = tuple([int(i) for i in wv]) - if file_version < (1, 1): - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] - else: - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] result = None for fn in fns: try: @@ -299,10 +302,9 @@ class Wheel(object): return hash_kind, result def write_record(self, records, record_path, base): - records = list(records) # make a copy for sorting + records = list(records) # make a copy, as mutated p = to_posix(os.path.relpath(record_path, base)) records.append((p, '', '')) - records.sort() with CSVWriter(record_path) as writer: for row in records: writer.writerow(row) @@ -425,6 +427,18 @@ class Wheel(object): ap = to_posix(os.path.join(info_dir, 'WHEEL')) archive_paths.append((ap, p)) + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + # Now, at last, RECORD. # Paths in here are archive paths - nothing else makes sense. self.write_records((distinfo, info_dir), libdir, archive_paths) @@ -433,6 +447,22 @@ class Wheel(object): self.build_zip(pathname, archive_paths) return pathname + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + def install(self, paths, maker, **kwargs): """ Install a wheel to the specified paths. If kwarg ``warner`` is @@ -460,7 +490,7 @@ class Wheel(object): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -514,9 +544,7 @@ class Wheel(object): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: @@ -605,7 +633,7 @@ class Wheel(object): for v in epdata[k].values(): s = '%s:%s' % (v.prefix, v.suffix) if v.flags: - s += ' %s' % v.flags + s += ' [%s]' % ','.join(v.flags) d[v.name] = s except Exception: logger.warning('Unable to read legacy script ' @@ -670,7 +698,7 @@ class Wheel(object): if cache is None: # Use native string to avoid issues on 2.x: see Python #20140. base = os.path.join(get_cache_base(), str('dylib-cache'), - sys.version[:3]) + '%s.%s' % sys.version_info[:2]) cache = Cache(base) return cache @@ -759,7 +787,7 @@ class Wheel(object): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -786,13 +814,15 @@ class Wheel(object): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - if '..' in u_arcname: + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: raise DistlibException('invalid entry in ' 'wheel: %r' % u_arcname) - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: @@ -826,7 +856,7 @@ class Wheel(object): def get_version(path_map, info_dir): version = path = None - key = '%s/%s' % (info_dir, METADATA_FILENAME) + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) if key not in path_map: key = '%s/PKG-INFO' % info_dir if key in path_map: @@ -852,7 +882,7 @@ class Wheel(object): if updated: md = Metadata(path=path) md.version = updated - legacy = not path.endswith(METADATA_FILENAME) + legacy = path.endswith(LEGACY_METADATA_FILENAME) md.write(path=path, legacy=legacy) logger.debug('Version updated from %r to %r', version, updated) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distro.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distro.py index aa4defc3..0611b62a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distro.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/distro.py @@ -17,12 +17,12 @@ The ``distro`` package (``distro`` stands for Linux Distribution) provides information about the Linux distribution it runs on, such as a reliable machine-readable distro ID, or version information. -It is a renewed alternative implementation for Python's original +It is the recommended replacement for Python's original :py:func:`platform.linux_distribution` function, but it provides much more functionality. An alternative implementation became necessary because Python -3.5 deprecated this function, and Python 3.7 is expected to remove it -altogether. Its predecessor function :py:func:`platform.dist` was already -deprecated since Python 2.6 and is also expected to be removed in Python 3.7. +3.5 deprecated this function, and Python 3.8 will remove it altogether. +Its predecessor function :py:func:`platform.dist` was already +deprecated since Python 2.6 and will also be removed in Python 3.8. Still, there are many cases in which access to OS distribution information is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for more information. @@ -48,7 +48,9 @@ _OS_RELEASE_BASENAME = 'os-release' #: with blanks translated to underscores. #: #: * Value: Normalized value. -NORMALIZED_OS_ID = {} +NORMALIZED_OS_ID = { + 'ol': 'oracle', # Oracle Linux +} #: Translation table for normalizing the "Distributor ID" attribute returned by #: the lsb_release command, for use by the :func:`distro.id` method. @@ -58,9 +60,11 @@ NORMALIZED_OS_ID = {} #: #: * Value: Normalized value. NORMALIZED_LSB_ID = { - 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux + 'enterpriseenterpriseas': 'oracle', # Oracle Enterprise Linux 4 + 'enterpriseenterpriseserver': 'oracle', # Oracle Linux 5 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server + 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode } #: Translation table for normalizing the distro ID derived from the file name @@ -88,7 +92,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = ( 'lsb-release', 'oem-release', _OS_RELEASE_BASENAME, - 'system-release' + 'system-release', + 'plesk-release', ) @@ -161,6 +166,7 @@ def id(): "openbsd" OpenBSD "netbsd" NetBSD "freebsd" FreeBSD + "midnightbsd" MidnightBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -607,7 +613,7 @@ class LinuxDistribution(object): distro release file can be found, the data source for the distro release file will be empty. - * ``include_name`` (bool): Controls whether uname command output is + * ``include_uname`` (bool): Controls whether uname command output is included as a data source. If the uname command is not available in the program execution path the data source for the uname command will be empty. @@ -755,7 +761,7 @@ class LinuxDistribution(object): version = v break if pretty and version and self.codename(): - version = u'{0} ({1})'.format(version, self.codename()) + version = '{0} ({1})'.format(version, self.codename()) return version def version_parts(self, best=False): @@ -812,10 +818,14 @@ class LinuxDistribution(object): For details, see :func:`distro.codename`. """ - return self.os_release_attr('codename') \ - or self.lsb_release_attr('codename') \ - or self.distro_release_attr('codename') \ - or '' + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info['codename'] + except KeyError: + return self.lsb_release_attr('codename') \ + or self.distro_release_attr('codename') \ + or '' def info(self, pretty=False, best=False): """ @@ -872,6 +882,7 @@ class LinuxDistribution(object): For details, see :func:`distro.uname_info`. """ + return self._uname_info def os_release_attr(self, attribute): """ @@ -960,26 +971,31 @@ class LinuxDistribution(object): # * commands or their arguments (not allowed in os-release) if '=' in token: k, v = token.split('=', 1) - if isinstance(v, bytes): - v = v.decode('utf-8') props[k.lower()] = v - if k == 'VERSION': - # this handles cases in which the codename is in - # the `(CODENAME)` (rhel, centos, fedora) format - # or in the `, CODENAME` format (Ubuntu). - codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) - if codename: - codename = codename.group() - codename = codename.strip('()') - codename = codename.strip(',') - codename = codename.strip() - # codename appears within paranthese. - props['codename'] = codename - else: - props['codename'] = '' else: # Ignore any tokens that are not variable assignments pass + + if 'version_codename' in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props['codename'] = props['version_codename'] + elif 'ubuntu_codename' in props: + # Same as above but a non-standard field name used on older Ubuntus + props['codename'] = props['ubuntu_codename'] + elif 'version' in props: + # If there is no version_codename, parse it from the version + codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version']) + if codename: + codename = codename.group() + codename = codename.strip('()') + codename = codename.strip(',') + codename = codename.strip() + # codename appears within paranthese. + props['codename'] = codename + return props @cached_property @@ -998,7 +1014,7 @@ class LinuxDistribution(object): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: # Command not found return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_lsb_release_content(content) @staticmethod @@ -1033,7 +1049,7 @@ class LinuxDistribution(object): stdout = subprocess.check_output(cmd, stderr=devnull) except OSError: return {} - content = stdout.decode(sys.getfilesystemencoding()).splitlines() + content = self._to_str(stdout).splitlines() return self._parse_uname_content(content) @staticmethod @@ -1053,6 +1069,20 @@ class LinuxDistribution(object): props['release'] = version return props + @staticmethod + def _to_str(text): + encoding = sys.getfilesystemencoding() + encoding = 'utf-8' if encoding == 'ascii' else encoding + + if sys.version_info[0] >= 3: + if isinstance(text, bytes): + return text.decode(encoding) + else: + if isinstance(text, unicode): # noqa + return text.encode(encoding) + + return text + @cached_property def _distro_release_info(self): """ @@ -1072,7 +1102,10 @@ class LinuxDistribution(object): # file), because we want to use what was specified as best as # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: + if 'name' in distro_info \ + and 'cloudlinux' in distro_info['name'].lower(): + distro_info['id'] = 'cloudlinux' + elif match: distro_info['id'] = match.group(1) return distro_info else: @@ -1113,6 +1146,8 @@ class LinuxDistribution(object): # The name is always present if the pattern matches self.distro_release_file = filepath distro_info['id'] = match.group(1) + if 'cloudlinux' in distro_info['name'].lower(): + distro_info['id'] = 'cloudlinux' return distro_info return {} @@ -1150,8 +1185,6 @@ class LinuxDistribution(object): Returns: A dictionary containing all information items. """ - if isinstance(line, bytes): - line = line.decode('utf-8') matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( line.strip()[::-1]) distro_info = {} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/__init__.py index 04912349..d1d82f15 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/__init__.py @@ -32,4 +32,4 @@ __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", # this has to be at the top level, see how setup.py parses this #: Distribution version number. -__version__ = "1.0.1" +__version__ = "1.1" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py index 4c77717b..3ff803c1 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -136,6 +136,7 @@ def normaliseCharList(charList): i += j return rv + # We don't really support characters above the BMP :( max_unicode = int("FFFF", 16) @@ -254,7 +255,7 @@ class InfosetFilter(object): nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if m: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst @@ -262,7 +263,7 @@ class InfosetFilter(object): nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: - warnings.warn("Coercing non-XML name", DataLossWarning) + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py index a65e55f6..e0bb3760 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -1,10 +1,11 @@ from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import text_type, binary_type +from pip._vendor.six import text_type from pip._vendor.six.moves import http_client, urllib import codecs import re +from io import BytesIO, StringIO from pip._vendor import webencodings @@ -12,13 +13,6 @@ from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase from .constants import _ReparseException from . import _utils -from io import StringIO - -try: - from io import BytesIO -except ImportError: - BytesIO = StringIO - # Non-unicode versions of constants for use in the pre-parser spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) @@ -40,13 +34,13 @@ if _utils.supports_lone_surrogates: else: invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) -non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, - 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, - 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, - 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, - 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, - 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, - 0x10FFFE, 0x10FFFF]) +non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF} ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") @@ -367,7 +361,7 @@ class HTMLUnicodeInputStream(object): def unget(self, char): # Only one character is allowed to be ungotten at once - it must # be consumed again before any further call to unget - if char is not None: + if char is not EOF: if self.chunkOffset == 0: # unget is called quite rarely, so it's a good idea to do # more work here if it saves a bit of work in the frequently @@ -449,7 +443,7 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): try: stream.seek(stream.tell()) - except: # pylint:disable=bare-except + except Exception: stream = BufferedStream(stream) return stream @@ -461,7 +455,7 @@ class HTMLBinaryInputStream(HTMLUnicodeInputStream): if charEncoding[0] is not None: return charEncoding - # If we've been overriden, we've been overriden + # If we've been overridden, we've been overridden charEncoding = lookupEncoding(self.override_encoding), "certain" if charEncoding[0] is not None: return charEncoding @@ -664,9 +658,7 @@ class EncodingBytes(bytes): """Look for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone""" - p = self.position - data = self[p:p + len(bytes)] - rv = data.startswith(bytes) + rv = self.startswith(bytes, self.position) if rv: self.position += len(bytes) return rv @@ -674,15 +666,11 @@ class EncodingBytes(bytes): def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" - newPosition = self[self.position:].find(bytes) - if newPosition > -1: - # XXX: This is ugly, but I can't see a nicer way to fix this. - if self._position == -1: - self._position = 0 - self._position += (newPosition + len(bytes) - 1) - return True - else: + try: + self._position = self.index(bytes, self.position) + len(bytes) - 1 + except ValueError: raise StopIteration + return True class EncodingParser(object): @@ -694,6 +682,9 @@ class EncodingParser(object): self.encoding = None def getEncoding(self): + if b"<meta" not in self.data: + return None + methodDispatch = ( (b"<!--", self.handleComment), (b"<meta", self.handleMeta), @@ -703,6 +694,10 @@ class EncodingParser(object): (b"<", self.handlePossibleStartTag)) for _ in self.data: keepParsing = True + try: + self.data.jumpTo(b"<") + except StopIteration: + break for key, method in methodDispatch: if self.data.matchBytes(key): try: @@ -908,7 +903,7 @@ class ContentAttrParser(object): def lookupEncoding(encoding): """Return the python codec name corresponding to an encoding or None if the string doesn't correspond to a valid encoding.""" - if isinstance(encoding, binary_type): + if isinstance(encoding, bytes): try: encoding = encoding.decode("ascii") except UnicodeDecodeError: diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py index 178f6e7f..5f00253e 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_tokenizer.py @@ -2,7 +2,8 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import unichr as chr -from collections import deque +from collections import deque, OrderedDict +from sys import version_info from .constants import spaceCharacters from .constants import entities @@ -17,6 +18,11 @@ from ._trie import Trie entitiesTrie = Trie(entities) +if version_info >= (3, 7): + attributeMap = dict +else: + attributeMap = OrderedDict + class HTMLTokenizer(object): """ This class takes care of tokenizing HTML. @@ -228,6 +234,14 @@ class HTMLTokenizer(object): # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + data = attributeMap(raw) + if len(raw) > len(data): + # we had some duplicated attribute, fix so first wins + data.update(raw[::-1]) + token["data"] = data + if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py index a5ba4bf1..07bad5d3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/__init__.py @@ -1,14 +1,5 @@ from __future__ import absolute_import, division, unicode_literals -from .py import Trie as PyTrie +from .py import Trie -Trie = PyTrie - -# pylint:disable=wrong-import-position -try: - from .datrie import Trie as DATrie -except ImportError: - pass -else: - Trie = DATrie -# pylint:enable=wrong-import-position +__all__ = ["Trie"] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py index a1158bbb..6b71975f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/_base.py @@ -1,6 +1,9 @@ from __future__ import absolute_import, division, unicode_literals -from collections import Mapping +try: + from collections.abc import Mapping +except ImportError: # Python 2.7 + from collections import Mapping class Trie(Mapping): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py deleted file mode 100644 index e2e5f866..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_trie/datrie.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -from datrie import Trie as DATrie -from pip._vendor.six import text_type - -from ._base import Trie as ABCTrie - - -class Trie(ABCTrie): - def __init__(self, data): - chars = set() - for key in data.keys(): - if not isinstance(key, text_type): - raise TypeError("All keys must be strings") - for char in key: - chars.add(char) - - self._data = DATrie("".join(chars)) - for key, value in data.items(): - self._data[key] = value - - def __contains__(self, key): - return key in self._data - - def __len__(self): - return len(self._data) - - def __iter__(self): - raise NotImplementedError() - - def __getitem__(self, key): - return self._data[key] - - def keys(self, prefix=None): - return self._data.keys(prefix) - - def has_keys_with_prefix(self, prefix): - return self._data.has_keys_with_prefix(prefix) - - def longest_prefix(self, prefix): - return self._data.longest_prefix(prefix) - - def longest_prefix_item(self, prefix): - return self._data.longest_prefix_item(prefix) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_utils.py index 0703afb3..d7c4926a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_utils.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/_utils.py @@ -2,12 +2,20 @@ from __future__ import absolute_import, division, unicode_literals from types import ModuleType -from pip._vendor.six import text_type - try: - import xml.etree.cElementTree as default_etree + from collections.abc import Mapping except ImportError: + from collections import Mapping + +from pip._vendor.six import text_type, PY3 + +if PY3: import xml.etree.ElementTree as default_etree +else: + try: + import xml.etree.cElementTree as default_etree + except ImportError: + import xml.etree.ElementTree as default_etree __all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", @@ -27,7 +35,7 @@ try: # We need this with u"" because of http://bugs.jython.org/issue2039 _x = eval('u"\\uD800"') # pylint:disable=eval-used assert isinstance(_x, text_type) -except: # pylint:disable=bare-except +except Exception: supports_lone_surrogates = False else: supports_lone_surrogates = True @@ -47,9 +55,6 @@ class MethodDispatcher(dict): """ def __init__(self, items=()): - # Using _dictEntries instead of directly assigning to self is about - # twice as fast. Please do careful performance testing before changing - # anything here. _dictEntries = [] for name, value in items: if isinstance(name, (list, tuple, frozenset, set)): @@ -64,6 +69,36 @@ class MethodDispatcher(dict): def __getitem__(self, key): return dict.get(self, key, self.default) + def __get__(self, instance, owner=None): + return BoundMethodDispatcher(instance, self) + + +class BoundMethodDispatcher(Mapping): + """Wraps a MethodDispatcher, binding its return values to `instance`""" + def __init__(self, instance, dispatcher): + self.instance = instance + self.dispatcher = dispatcher + + def __getitem__(self, key): + # see https://docs.python.org/3/reference/datamodel.html#object.__get__ + # on a function, __get__ is used to bind a function to an instance as a bound method + return self.dispatcher[key].__get__(self.instance) + + def get(self, key, default): + if key in self.dispatcher: + return self[key] + else: + return default + + def __iter__(self): + return iter(self.dispatcher) + + def __len__(self): + return len(self.dispatcher) + + def __contains__(self, key): + return key in self.dispatcher + # Some utility functions to deal with weirdness around UCS2 vs UCS4 # python builds diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/constants.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/constants.py index 1ff80419..fe3e237c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/constants.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/constants.py @@ -519,8 +519,8 @@ adjustForeignAttributes = { "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) } -unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in - adjustForeignAttributes.items()]) +unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in + adjustForeignAttributes.items()} spaceCharacters = frozenset([ "\t", @@ -544,8 +544,7 @@ asciiLetters = frozenset(string.ascii_letters) digits = frozenset(string.digits) hexDigits = frozenset(string.hexdigits) -asciiUpper2Lower = dict([(ord(c), ord(c.lower())) - for c in string.ascii_uppercase]) +asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} # Heading elements need to be ordered headingElements = ( @@ -2934,7 +2933,7 @@ tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], tokenTypes["EmptyTag"]]) -prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes = {v: k for k, v in namespaces.items()} prefixes["http://www.w3.org/1998/Math/MathML"] = "math" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py index af8e77b8..aa7431d1 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -1,6 +1,15 @@ +"""Deprecated from html5lib 1.1. + +See `here <https://github.com/html5lib/html5lib-python/issues/443>`_ for +information about its deprecation; `Bleach <https://github.com/mozilla/bleach>`_ +is recommended as a replacement. Please let us know in the aforementioned issue +if Bleach is unsuitable for your needs. + +""" from __future__ import absolute_import, division, unicode_literals import re +import warnings from xml.sax.saxutils import escape, unescape from pip._vendor.six.moves import urllib_parse as urlparse @@ -11,6 +20,14 @@ from ..constants import namespaces, prefixes __all__ = ["Filter"] +_deprecation_msg = ( + "html5lib's sanitizer is deprecated; see " + + "https://github.com/html5lib/html5lib-python/issues/443 and please let " + + "us know if Bleach is unsuitable for your needs" +) + +warnings.warn(_deprecation_msg, DeprecationWarning) + allowed_elements = frozenset(( (namespaces['html'], 'a'), (namespaces['html'], 'abbr'), @@ -750,6 +767,9 @@ class Filter(base.Filter): """ super(Filter, self).__init__(source) + + warnings.warn(_deprecation_msg, DeprecationWarning) + self.allowed_elements = allowed_elements self.allowed_attributes = allowed_attributes self.allowed_css_properties = allowed_css_properties diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/html5parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/html5parser.py index ae41a133..d06784f3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/html5parser.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/html5parser.py @@ -2,7 +2,6 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import with_metaclass, viewkeys import types -from collections import OrderedDict from . import _inputstream from . import _tokenizer @@ -119,8 +118,8 @@ class HTMLParser(object): self.tree = tree(namespaceHTMLElements) self.errors = [] - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) + self.phases = {name: cls(self, self.tree) for name, cls in + getPhases(debug).items()} def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): @@ -202,7 +201,7 @@ class HTMLParser(object): DoctypeToken = tokenTypes["Doctype"] ParseErrorToken = tokenTypes["ParseError"] - for token in self.normalizedTokens(): + for token in self.tokenizer: prev_token = None new_token = token while new_token is not None: @@ -260,10 +259,6 @@ class HTMLParser(object): if reprocess: assert self.phase not in phases - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - def parse(self, stream, *args, **kwargs): """Parse a HTML document into a well-formed tree @@ -325,17 +320,6 @@ class HTMLParser(object): if self.strict: raise ParseError(E[errorcode] % datavars) - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - def adjustMathMLAttributes(self, token): adjust_attributes(token, adjustMathMLAttributes) @@ -413,16 +397,12 @@ class HTMLParser(object): def getPhases(debug): def log(function): """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) + type_names = {value: key for key, value in tokenTypes.items()} def wrapped(self, *args, **kwargs): if function.__name__.startswith("process") and len(args) > 0: token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise + info = {"type": type_names[token['type']]} if token['type'] in tagTokenTypes: info["name"] = token['name'] @@ -446,10 +426,13 @@ def getPhases(debug): class Phase(with_metaclass(getMetaclass(debug, log))): """Base class for helper object that implements each phase of processing """ + __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") def __init__(self, parser, tree): self.parser = parser self.tree = tree + self.__startTagCache = {} + self.__endTagCache = {} def processEOF(self): raise NotImplementedError @@ -469,7 +452,21 @@ def getPhases(debug): self.tree.insertText(token["data"]) def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__startTagCache: + func = self.__startTagCache[name] + else: + func = self.__startTagCache[name] = self.startTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__startTagCache.pop(next(iter(self.__startTagCache))) + return func(token) def startTagHtml(self, token): if not self.parser.firstStartTag and token["name"] == "html": @@ -482,9 +479,25 @@ def getPhases(debug): self.parser.firstStartTag = False def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__endTagCache: + func = self.__endTagCache[name] + else: + func = self.__endTagCache[name] = self.endTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__endTagCache.pop(next(iter(self.__endTagCache))) + return func(token) class InitialPhase(Phase): + __slots__ = tuple() + def processSpaceCharacters(self, token): pass @@ -613,6 +626,8 @@ def getPhases(debug): return True class BeforeHtmlPhase(Phase): + __slots__ = tuple() + # helper methods def insertHtmlElement(self): self.tree.insertRoot(impliedTagToken("html", "StartTag")) @@ -648,19 +663,7 @@ def getPhases(debug): return token class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.startTagHead(impliedTagToken("head", "StartTag")) @@ -693,28 +696,19 @@ def getPhases(debug): self.parser.parseError("end-tag-after-implied-root", {"name": token["name"]}) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), endTagImplyHead) + ]) + endTagHandler.default = endTagOther + class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # the real thing def processEOF(self): @@ -796,22 +790,27 @@ def getPhases(debug): def anythingElse(self): self.endTagHead(impliedTagToken("head")) - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("title", startTagTitle), + (("noframes", "style"), startTagNoFramesStyle), + ("noscript", startTagNoscript), + ("script", startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + startTagBaseLinkCommand), + ("meta", startTagMeta), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("head", endTagHead), + (("br", "html", "body"), endTagHtmlBodyBr) + ]) + endTagHandler.default = endTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther + class InHeadNoscriptPhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.parseError("eof-in-head-noscript") @@ -860,23 +859,21 @@ def getPhases(debug): # Caller must raise parse error first! self.endTagNoscript(impliedTagToken("noscript")) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), + (("head", "noscript"), startTagHeadNoscript), + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("noscript", endTagNoscript), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): self.anythingElse() @@ -927,80 +924,30 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] self.parser.framesetOK = True + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + startTagFromHead), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + endTagHtmlBodyBr)]) + endTagHandler.default = endTagOther + class InBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("processSpaceCharacters",) + def __init__(self, *args, **kwargs): + super(InBodyPhase, self).__init__(*args, **kwargs) # Set this to the default handler self.processSpaceCharacters = self.processSpaceCharactersNonPre - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - def isMatchingFormattingElement(self, node1, node2): return (node1.name == node2.name and node1.namespace == node2.namespace and @@ -1650,14 +1597,73 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) break + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + startTagProcessInHead), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + startTagCloseP), + (headingElements, startTagHeading), + (("pre", "listing"), startTagPreListing), + ("form", startTagForm), + (("li", "dd", "dt"), startTagListItem), + ("plaintext", startTagPlaintext), + ("a", startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), startTagFormatting), + ("nobr", startTagNobr), + ("button", startTagButton), + (("applet", "marquee", "object"), startTagAppletMarqueeObject), + ("xmp", startTagXmp), + ("table", startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + startTagVoidFormatting), + (("param", "source", "track"), startTagParamSource), + ("input", startTagInput), + ("hr", startTagHr), + ("image", startTagImage), + ("isindex", startTagIsIndex), + ("textarea", startTagTextarea), + ("iframe", startTagIFrame), + ("noscript", startTagNoscript), + (("noembed", "noframes"), startTagRawtext), + ("select", startTagSelect), + (("rp", "rt"), startTagRpRt), + (("option", "optgroup"), startTagOpt), + (("math"), startTagMath), + (("svg"), startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), startTagMisplaced) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("body", endTagBody), + ("html", endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), endTagBlock), + ("form", endTagForm), + ("p", endTagP), + (("dd", "dt", "li"), endTagListItem), + (headingElements, endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), endTagFormatting), + (("applet", "marquee", "object"), endTagAppletMarqueeObject), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + class TextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("script", self.endTagScript)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processCharacters(self, token): self.tree.insertText(token["data"]) @@ -1683,30 +1689,15 @@ def getPhases(debug): self.tree.openElements.pop() self.parser.phase = self.parser.originalPhase + startTagHandler = _utils.MethodDispatcher([]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("script", endTagScript)]) + endTagHandler.default = endTagOther + class InTablePhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("caption", self.startTagCaption), - ("colgroup", self.startTagColgroup), - ("col", self.startTagCol), - (("tbody", "tfoot", "thead"), self.startTagRowGroup), - (("td", "th", "tr"), self.startTagImplyTbody), - ("table", self.startTagTable), - (("style", "script"), self.startTagStyleScript), - ("input", self.startTagInput), - ("form", self.startTagForm) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "tbody", "td", - "tfoot", "th", "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableContext(self): @@ -1828,9 +1819,32 @@ def getPhases(debug): self.parser.phases["inBody"].processEndTag(token) self.tree.insertFromTable = False + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("caption", startTagCaption), + ("colgroup", startTagColgroup), + ("col", startTagCol), + (("tbody", "tfoot", "thead"), startTagRowGroup), + (("td", "th", "tr"), startTagImplyTbody), + ("table", startTagTable), + (("style", "script"), startTagStyleScript), + ("input", startTagInput), + ("form", startTagForm) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "tbody", "td", + "tfoot", "th", "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InTableTextPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + __slots__ = ("originalPhase", "characterTokens") + + def __init__(self, *args, **kwargs): + super(InTableTextPhase, self).__init__(*args, **kwargs) self.originalPhase = None self.characterTokens = [] @@ -1875,23 +1889,7 @@ def getPhases(debug): class InCaptionPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-caption - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableElement) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("caption", self.endTagCaption), - ("table", self.endTagTable), - (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagCaption(self): return not self.tree.elementInScope("caption", variant="table") @@ -1944,23 +1942,24 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableElement) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("caption", endTagCaption), + ("table", endTagTable), + (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", + "thead", "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InColumnGroupPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-column - - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("col", self.startTagCol) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("colgroup", self.endTagColgroup), - ("col", self.endTagCol) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def ignoreEndTagColgroup(self): return self.tree.openElements[-1].name == "html" @@ -2010,26 +2009,21 @@ def getPhases(debug): if not ignoreEndTag: return token + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("col", startTagCol) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("colgroup", endTagColgroup), + ("col", endTagCol) + ]) + endTagHandler.default = endTagOther + class InTableBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-table0 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("tr", self.startTagTr), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), - self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - ("table", self.endTagTable), - (("body", "caption", "col", "colgroup", "html", "td", "th", - "tr"), self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods def clearStackToTableBodyContext(self): @@ -2108,26 +2102,26 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("tr", startTagTr), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), + startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + ("table", endTagTable), + (("body", "caption", "col", "colgroup", "html", "td", "th", + "tr"), endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InRowPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-row - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("td", "th"), self.startTagTableCell), - (("caption", "col", "colgroup", "tbody", "tfoot", "thead", - "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("tr", self.endTagTr), - ("table", self.endTagTable), - (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), - (("body", "caption", "col", "colgroup", "html", "td", "th"), - self.endTagIgnore) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper methods (XXX unify this with other table helper methods) def clearStackToTableRowContext(self): @@ -2197,23 +2191,26 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inTable"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("td", "th"), startTagTableCell), + (("caption", "col", "colgroup", "tbody", "tfoot", "thead", + "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("tr", endTagTr), + ("table", endTagTable), + (("tbody", "tfoot", "thead"), endTagTableRowGroup), + (("body", "caption", "col", "colgroup", "html", "td", "th"), + endTagIgnore) + ]) + endTagHandler.default = endTagOther + class InCellPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#in-cell - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", - "thead", "tr"), self.startTagTableOther) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("td", "th"), self.endTagTableCell), - (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore), - (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # helper def closeCell(self): @@ -2273,26 +2270,22 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inBody"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", + "thead", "tr"), startTagTableOther) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("td", "th"), endTagTableCell), + (("body", "caption", "col", "colgroup", "html"), endTagIgnore), + (("table", "tbody", "tfoot", "thead", "tr"), endTagImply) + ]) + endTagHandler.default = endTagOther + class InSelectPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("option", self.startTagOption), - ("optgroup", self.startTagOptgroup), - ("select", self.startTagSelect), - (("input", "keygen", "textarea"), self.startTagInput), - ("script", self.startTagScript) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("option", self.endTagOption), - ("optgroup", self.endTagOptgroup), - ("select", self.endTagSelect) - ]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() # http://www.whatwg.org/specs/web-apps/current-work/#in-select def processEOF(self): @@ -2373,21 +2366,25 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-in-select", {"name": token["name"]}) - class InSelectInTablePhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.startTagTable) - ]) - self.startTagHandler.default = self.startTagOther + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("option", startTagOption), + ("optgroup", startTagOptgroup), + ("select", startTagSelect), + (("input", "keygen", "textarea"), startTagInput), + ("script", startTagScript) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("option", endTagOption), + ("optgroup", endTagOptgroup), + ("select", endTagSelect) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), - self.endTagTable) - ]) - self.endTagHandler.default = self.endTagOther + class InSelectInTablePhase(Phase): + __slots__ = tuple() def processEOF(self): self.parser.phases["inSelect"].processEOF() @@ -2412,7 +2409,21 @@ def getPhases(debug): def endTagOther(self, token): return self.parser.phases["inSelect"].processEndTag(token) + startTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + startTagTable) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), + endTagTable) + ]) + endTagHandler.default = endTagOther + class InForeignContentPhase(Phase): + __slots__ = tuple() + breakoutElements = frozenset(["b", "big", "blockquote", "body", "br", "center", "code", "dd", "div", "dl", "dt", "em", "embed", "h1", "h2", "h3", @@ -2422,9 +2433,6 @@ def getPhases(debug): "span", "strong", "strike", "sub", "sup", "table", "tt", "u", "ul", "var"]) - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - def adjustSVGTagNames(self, token): replacements = {"altglyph": "altGlyph", "altglyphdef": "altGlyphDef", @@ -2478,7 +2486,7 @@ def getPhases(debug): currentNode = self.tree.openElements[-1] if (token["name"] in self.breakoutElements or (token["name"] == "font" and - set(token["data"].keys()) & set(["color", "face", "size"]))): + set(token["data"].keys()) & {"color", "face", "size"})): self.parser.parseError("unexpected-html-element-in-foreign-content", {"name": token["name"]}) while (self.tree.openElements[-1].namespace != @@ -2528,16 +2536,7 @@ def getPhases(debug): return new_token class AfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)]) - self.endTagHandler.default = self.endTagOther + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2574,23 +2573,17 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] return token - class InFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("frameset", self.startTagFrameset), - ("frame", self.startTagFrame), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([("html", endTagHtml)]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("frameset", self.endTagFrameset) - ]) - self.endTagHandler.default = self.endTagOther + class InFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset + __slots__ = tuple() def processEOF(self): if self.tree.openElements[-1].name != "html": @@ -2631,21 +2624,22 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-in-frameset", {"name": token["name"]}) - class AfterFramesetPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#after3 - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("frameset", startTagFrameset), + ("frame", startTagFrame), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoframes) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("frameset", endTagFrameset) + ]) + endTagHandler.default = endTagOther - self.endTagHandler = _utils.MethodDispatcher([ - ("html", self.endTagHtml) - ]) - self.endTagHandler.default = self.endTagOther + class AfterFramesetPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#after3 + __slots__ = tuple() def processEOF(self): # Stop parsing @@ -2668,14 +2662,19 @@ def getPhases(debug): self.parser.parseError("unexpected-end-tag-after-frameset", {"name": token["name"]}) - class AfterAfterBodyPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", Phase.startTagHtml), + ("noframes", startTagNoframes) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml) - ]) - self.startTagHandler.default = self.startTagOther + endTagHandler = _utils.MethodDispatcher([ + ("html", endTagHtml) + ]) + endTagHandler.default = endTagOther + + class AfterAfterBodyPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2706,15 +2705,13 @@ def getPhases(debug): self.parser.phase = self.parser.phases["inBody"] return token - class AfterAfterFramesetPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml) + ]) + startTagHandler.default = startTagOther - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("noframes", self.startTagNoFrames) - ]) - self.startTagHandler.default = self.startTagOther + class AfterAfterFramesetPhase(Phase): + __slots__ = tuple() def processEOF(self): pass @@ -2741,6 +2738,13 @@ def getPhases(debug): def processEndTag(self, token): self.parser.parseError("expected-eof-but-got-end-tag", {"name": token["name"]}) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("noframes", startTagNoFrames) + ]) + startTagHandler.default = startTagOther + # pylint:enable=unused-argument return { @@ -2774,8 +2778,8 @@ def getPhases(debug): def adjust_attributes(token, replacements): needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) if needs_adjustment: - token['data'] = OrderedDict((replacements.get(k, k), v) - for k, v in token['data'].items()) + token['data'] = type(token['data'])((replacements.get(k, k), v) + for k, v in token['data'].items()) def impliedTagToken(name, type="EndTag", attributes=None, diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/serializer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/serializer.py index 53f4d44c..d5669d8c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/serializer.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/serializer.py @@ -274,7 +274,7 @@ class HTMLSerializer(object): if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: - self.serializeError("System identifer contains both single and double quote characters") + self.serializeError("System identifier contains both single and double quote characters") quote_char = "'" else: quote_char = '"' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py index 73973db5..965fce29 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/base.py @@ -10,9 +10,9 @@ Marker = None listElementsMap = { None: (frozenset(scopingElements), False), - "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), - "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), - (namespaces["html"], "ul")])), False), + "button": (frozenset(scopingElements | {(namespaces["html"], "button")}), False), + "list": (frozenset(scopingElements | {(namespaces["html"], "ol"), + (namespaces["html"], "ul")}), False), "table": (frozenset([(namespaces["html"], "html"), (namespaces["html"], "table")]), False), "select": (frozenset([(namespaces["html"], "optgroup"), @@ -28,7 +28,7 @@ class Node(object): :arg name: The tag name associated with the node """ - # The tag name assocaited with the node + # The tag name associated with the node self.name = name # The parent of the current node (or None for the document node) self.parent = None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py index dcfac220..d8b53004 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py @@ -1,7 +1,10 @@ from __future__ import absolute_import, division, unicode_literals -from collections import MutableMapping +try: + from collections.abc import MutableMapping +except ImportError: # Python 2.7 + from collections import MutableMapping from xml.dom import minidom, Node import weakref diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py index 0dedf441..ea92dc30 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py @@ -5,6 +5,8 @@ from pip._vendor.six import text_type import re +from copy import copy + from . import base from .. import _ihatexml from .. import constants @@ -61,16 +63,17 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): return self._element.attrib def _setAttributes(self, attributes): - # Delete existing attributes first - # XXX - there may be a better way to do this... - for key in list(self._element.attrib.keys()): - del self._element.attrib[key] - for key, value in attributes.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], key[1]) - else: - name = key - self._element.set(name, value) + el_attrib = self._element.attrib + el_attrib.clear() + if attributes: + # calling .items _always_ allocates, and the above truthy check is cheaper than the + # allocation on average + for key, value in attributes.items(): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], key[1]) + else: + name = key + el_attrib[name] = value attributes = property(_getAttributes, _setAttributes) @@ -129,8 +132,8 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): def cloneNode(self): element = type(self)(self.name, self.namespace) - for name, value in self.attributes.items(): - element.attributes[name] = value + if self._element.attrib: + element._element.attrib = copy(self._element.attrib) return element def reparentChildren(self, newParent): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py index ca12a99c..f037759f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py @@ -16,6 +16,11 @@ import warnings import re import sys +try: + from collections.abc import MutableMapping +except ImportError: + from collections import MutableMapping + from . import base from ..constants import DataLossWarning from .. import constants @@ -23,6 +28,7 @@ from . import etree as etree_builders from .. import _ihatexml import lxml.etree as etree +from pip._vendor.six import PY3, binary_type fullTree = True @@ -44,7 +50,11 @@ class Document(object): self._childNodes = [] def appendChild(self, element): - self._elementTree.getroot().addnext(element._element) + last = self._elementTree.getroot() + for last in self._elementTree.getroot().itersiblings(): + pass + + last.addnext(element._element) def _getChildNodes(self): return self._childNodes @@ -185,26 +195,37 @@ class TreeBuilder(base.TreeBuilder): infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) self.namespaceHTMLElements = namespaceHTMLElements - class Attributes(dict): - def __init__(self, element, value=None): - if value is None: - value = {} + class Attributes(MutableMapping): + def __init__(self, element): self._element = element - dict.__init__(self, value) # pylint:disable=non-parent-init-called - for key, value in self.items(): - if isinstance(key, tuple): - name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) - else: - name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value - def __setitem__(self, key, value): - dict.__setitem__(self, key, value) + def _coerceKey(self, key): if isinstance(key, tuple): name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) else: name = infosetFilter.coerceAttribute(key) - self._element._element.attrib[name] = value + return name + + def __getitem__(self, key): + value = self._element._element.attrib[self._coerceKey(key)] + if not PY3 and isinstance(value, binary_type): + value = value.decode("ascii") + return value + + def __setitem__(self, key, value): + self._element._element.attrib[self._coerceKey(key)] = value + + def __delitem__(self, key): + del self._element._element.attrib[self._coerceKey(key)] + + def __iter__(self): + return iter(self._element._element.attrib) + + def __len__(self): + return len(self._element._element.attrib) + + def clear(self): + return self._element._element.attrib.clear() class Element(builder.Element): def __init__(self, name, namespace): @@ -225,8 +246,10 @@ class TreeBuilder(base.TreeBuilder): def _getAttributes(self): return self._attributes - def _setAttributes(self, attributes): - self._attributes = Attributes(self, attributes) + def _setAttributes(self, value): + attributes = self.attributes + attributes.clear() + attributes.update(value) attributes = property(_getAttributes, _setAttributes) @@ -234,8 +257,11 @@ class TreeBuilder(base.TreeBuilder): data = infosetFilter.coerceCharacters(data) builder.Element.insertText(self, data, insertBefore) - def appendChild(self, child): - builder.Element.appendChild(self, child) + def cloneNode(self): + element = type(self)(self.name, self.namespace) + if self._element.attrib: + element._element.attrib.update(self._element.attrib) + return element class Comment(builder.Comment): def __init__(self, data): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py index 9bec2076..b2d3aac3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py @@ -2,10 +2,10 @@ tree, generating tokens identical to those produced by the tokenizer module. -To create a tree walker for a new type of tree, you need to do +To create a tree walker for a new type of tree, you need to implement a tree walker object (called TreeWalker by convention) that -implements a 'serialize' method taking a tree as sole argument and -returning an iterator generating tokens. +implements a 'serialize' method which takes a tree as sole argument and +returns an iterator which generates tokens. """ from __future__ import absolute_import, division, unicode_literals diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py index 95fc0c17..837b27ec 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py @@ -127,4 +127,5 @@ def getETreeBuilder(ElementTreeImplementation): return locals() + getETreeModule = moduleFactoryFactory(getETreeBuilder) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py index e81ddf33..c56af390 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py @@ -1,6 +1,8 @@ from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type +from collections import OrderedDict + from lxml import etree from ..treebuilders.etree import tag_regexp @@ -163,7 +165,7 @@ class TreeWalker(base.NonRecursiveTreeWalker): else: namespace = None tag = ensure_str(node.tag) - attrs = {} + attrs = OrderedDict() for name, value in list(node.attrib.items()): name = ensure_str(name) value = ensure_str(value) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/core.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/core.py index 104624ad..41ec5c71 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/core.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/core.py @@ -9,7 +9,7 @@ _virama_combining_class = 9 _alabel_prefix = b'xn--' _unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') -if sys.version_info[0] == 3: +if sys.version_info[0] >= 3: unicode = str unichr = chr @@ -300,6 +300,10 @@ def ulabel(label): label = label.lower() if label.startswith(_alabel_prefix): label = label[len(_alabel_prefix):] + if not label: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') else: check_label(label) return label.decode('ascii') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/idnadata.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/idnadata.py index a80c959d..a284e4c8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/idnadata.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "11.0.0" +__version__ = "13.0.0" scripts = { 'Greek': ( 0x37000000374, @@ -48,16 +48,18 @@ scripts = { 0x300700003008, 0x30210000302a, 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xf9000000fa6e, 0xfa700000fada, - 0x200000002a6d7, + 0x16ff000016ff2, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, + 0x300000003134b, ), 'Hebrew': ( 0x591000005c8, @@ -74,6 +76,7 @@ scripts = { 0x304100003097, 0x309d000030a0, 0x1b0010001b11f, + 0x1b1500001b153, 0x1f2000001f201, ), 'Katakana': ( @@ -85,6 +88,7 @@ scripts = { 0xff660000ff70, 0xff710000ff9e, 0x1b0000001b001, + 0x1b1640001b168, ), } joining_types = { @@ -387,9 +391,9 @@ joining_types = { 0x853: 68, 0x854: 82, 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, + 0x856: 82, + 0x857: 82, + 0x858: 82, 0x860: 68, 0x861: 85, 0x862: 68, @@ -430,6 +434,16 @@ joining_types = { 0x8bb: 68, 0x8bc: 68, 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -754,6 +768,34 @@ joining_types = { 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, 0x110bd: 85, 0x110cd: 85, 0x1e900: 68, @@ -824,6 +866,7 @@ joining_types = { 0x1e941: 68, 0x1e942: 68, 0x1e943: 68, + 0x1e94b: 84, } codepoint_classes = { 'PVALID': ( @@ -1126,7 +1169,7 @@ codepoint_classes = { 0x8400000085c, 0x8600000086b, 0x8a0000008b5, - 0x8b6000008be, + 0x8b6000008c8, 0x8d3000008e2, 0x8e300000958, 0x96000000964, @@ -1185,7 +1228,7 @@ codepoint_classes = { 0xb3c00000b45, 0xb4700000b49, 0xb4b00000b4e, - 0xb5600000b58, + 0xb5500000b58, 0xb5f00000b64, 0xb6600000b70, 0xb7100000b72, @@ -1230,8 +1273,7 @@ codepoint_classes = { 0xce000000ce4, 0xce600000cf0, 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, + 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, 0xd4600000d49, @@ -1240,7 +1282,7 @@ codepoint_classes = { 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, - 0xd8200000d84, + 0xd8100000d84, 0xd8500000d97, 0xd9a00000db2, 0xdb300000dbc, @@ -1258,18 +1300,11 @@ codepoint_classes = { 0xe5000000e5a, 0xe8100000e83, 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, + 0xe8600000e8b, + 0xe8c00000ea4, 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, + 0xea700000eb3, + 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, 0xec800000ece, @@ -1362,6 +1397,7 @@ codepoint_classes = { 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, + 0x1abf00001ac1, 0x1b0000001b4c, 0x1b5000001b5a, 0x1b6b00001b74, @@ -1370,7 +1406,7 @@ codepoint_classes = { 0x1c4000001c4a, 0x1c4d00001c7e, 0x1cd000001cd3, - 0x1cd400001cfa, + 0x1cd400001cfb, 0x1d0000001d2c, 0x1d2f00001d30, 0x1d3b00001d3c, @@ -1613,10 +1649,10 @@ codepoint_classes = { 0x30a1000030fb, 0x30fc000030ff, 0x310500003130, - 0x31a0000031bb, + 0x31a0000031c0, 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1727,8 +1763,15 @@ codepoint_classes = { 0xa7b50000a7b6, 0xa7b70000a7b8, 0xa7b90000a7ba, - 0xa7f70000a7f8, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7f60000a7f8, 0xa7fa0000a828, + 0xa82c0000a82d, 0xa8400000a874, 0xa8800000a8c6, 0xa8d00000a8da, @@ -1753,7 +1796,7 @@ codepoint_classes = { 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab66, + 0xab600000ab6a, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1827,9 +1870,14 @@ codepoint_classes = { 0x10cc000010cf3, 0x10d0000010d28, 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, 0x10f0000010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10fb000010fc5, + 0x10fe000010ff7, 0x1100000011047, 0x1106600011070, 0x1107f000110bb, @@ -1837,12 +1885,12 @@ codepoint_classes = { 0x110f0000110fa, 0x1110000011135, 0x1113600011140, - 0x1114400011147, + 0x1114400011148, 0x1115000011174, 0x1117600011177, 0x11180000111c5, 0x111c9000111cd, - 0x111d0000111db, + 0x111ce000111db, 0x111dc000111dd, 0x1120000011212, 0x1121300011238, @@ -1871,7 +1919,7 @@ codepoint_classes = { 0x1137000011375, 0x114000001144b, 0x114500001145a, - 0x1145e0001145f, + 0x1145e00011462, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1881,18 +1929,28 @@ codepoint_classes = { 0x1160000011641, 0x1164400011645, 0x116500001165a, - 0x11680000116b8, + 0x11680000116b9, 0x116c0000116ca, 0x117000001171b, 0x1171d0001172c, 0x117300001173a, 0x118000001183b, 0x118c0000118ea, - 0x118ff00011900, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, 0x11a0000011a3f, 0x11a4700011a48, - 0x11a5000011a84, - 0x11a8600011a9a, + 0x11a5000011a9a, 0x11a9d00011a9e, 0x11ac000011af9, 0x11c0000011c09, @@ -1916,6 +1974,7 @@ codepoint_classes = { 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, 0x130000001342f, @@ -1931,13 +1990,18 @@ codepoint_classes = { 0x16b6300016b78, 0x16b7d00016b90, 0x16e6000016e80, - 0x16f0000016f45, - 0x16f5000016f7f, + 0x16f0000016f4b, + 0x16f4f00016f88, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x17000000187f2, - 0x1880000018af3, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, 0x1b0000001b11f, + 0x1b1500001b153, + 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, 0x1bc700001bc7d, @@ -1955,15 +2019,22 @@ codepoint_classes = { 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2c00001e2fa, 0x1e8000001e8c5, 0x1e8d00001e8d7, - 0x1e9220001e94b, + 0x1e9220001e94c, 0x1e9500001e95a, - 0x200000002a6d7, + 0x1fbf00001fbfa, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x300000003134b, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/package_data.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/package_data.py index 257e8989..ce1c521d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/package_data.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.8' +__version__ = '2.10' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/uts46data.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/uts46data.py index a68ed4c0..3766dd49 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/uts46data.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/idna/uts46data.py @@ -4,7 +4,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = "11.0.0" +__version__ = "13.0.0" def _seg_0(): return [ (0x0, '3'), @@ -1074,7 +1074,7 @@ def _seg_10(): (0x8A0, 'V'), (0x8B5, 'X'), (0x8B6, 'V'), - (0x8BE, 'X'), + (0x8C8, 'X'), (0x8D3, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), @@ -1205,7 +1205,7 @@ def _seg_11(): (0xB49, 'X'), (0xB4B, 'V'), (0xB4E, 'X'), - (0xB56, 'V'), + (0xB55, 'V'), (0xB58, 'X'), (0xB5C, 'M', u'ଡ଼'), (0xB5D, 'M', u'ଢ଼'), @@ -1272,7 +1272,7 @@ def _seg_12(): (0xC64, 'X'), (0xC66, 'V'), (0xC70, 'X'), - (0xC78, 'V'), + (0xC77, 'V'), (0xC8D, 'X'), (0xC8E, 'V'), (0xC91, 'X'), @@ -1299,8 +1299,6 @@ def _seg_12(): (0xCF1, 'V'), (0xCF3, 'X'), (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), (0xD11, 'X'), @@ -1314,7 +1312,7 @@ def _seg_12(): (0xD64, 'X'), (0xD66, 'V'), (0xD80, 'X'), - (0xD82, 'V'), + (0xD81, 'V'), (0xD84, 'X'), (0xD85, 'V'), (0xD97, 'X'), @@ -1348,33 +1346,19 @@ def _seg_12(): (0xE83, 'X'), (0xE84, 'V'), (0xE85, 'X'), - (0xE87, 'V'), - (0xE89, 'X'), - (0xE8A, 'V'), + (0xE86, 'V'), (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - ] - -def _seg_13(): - return [ - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), + (0xE8C, 'V'), (0xEA4, 'X'), (0xEA5, 'V'), (0xEA6, 'X'), (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), (0xEB3, 'M', u'ໍາ'), (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), + ] + +def _seg_13(): + return [ (0xEBE, 'X'), (0xEC0, 'V'), (0xEC5, 'X'), @@ -1459,10 +1443,6 @@ def _seg_13(): (0x1260, 'V'), (0x1289, 'X'), (0x128A, 'V'), - ] - -def _seg_14(): - return [ (0x128E, 'X'), (0x1290, 'V'), (0x12B1, 'X'), @@ -1479,6 +1459,10 @@ def _seg_14(): (0x12D8, 'V'), (0x1311, 'X'), (0x1312, 'V'), + ] + +def _seg_14(): + return [ (0x1316, 'X'), (0x1318, 'V'), (0x135B, 'X'), @@ -1563,15 +1547,11 @@ def _seg_14(): (0x1A7F, 'V'), (0x1A8A, 'X'), (0x1A90, 'V'), - ] - -def _seg_15(): - return [ (0x1A9A, 'X'), (0x1AA0, 'V'), (0x1AAE, 'X'), (0x1AB0, 'V'), - (0x1ABF, 'X'), + (0x1AC1, 'X'), (0x1B00, 'V'), (0x1B4C, 'X'), (0x1B50, 'V'), @@ -1583,6 +1563,10 @@ def _seg_15(): (0x1C3B, 'V'), (0x1C4A, 'X'), (0x1C4D, 'V'), + ] + +def _seg_15(): + return [ (0x1C80, 'M', u'в'), (0x1C81, 'M', u'д'), (0x1C82, 'M', u'о'), @@ -1592,10 +1576,57 @@ def _seg_15(): (0x1C87, 'M', u'ѣ'), (0x1C88, 'M', u'ꙋ'), (0x1C89, 'X'), + (0x1C90, 'M', u'ა'), + (0x1C91, 'M', u'ბ'), + (0x1C92, 'M', u'გ'), + (0x1C93, 'M', u'დ'), + (0x1C94, 'M', u'ე'), + (0x1C95, 'M', u'ვ'), + (0x1C96, 'M', u'ზ'), + (0x1C97, 'M', u'თ'), + (0x1C98, 'M', u'ი'), + (0x1C99, 'M', u'კ'), + (0x1C9A, 'M', u'ლ'), + (0x1C9B, 'M', u'მ'), + (0x1C9C, 'M', u'ნ'), + (0x1C9D, 'M', u'ო'), + (0x1C9E, 'M', u'პ'), + (0x1C9F, 'M', u'ჟ'), + (0x1CA0, 'M', u'რ'), + (0x1CA1, 'M', u'ს'), + (0x1CA2, 'M', u'ტ'), + (0x1CA3, 'M', u'უ'), + (0x1CA4, 'M', u'ფ'), + (0x1CA5, 'M', u'ქ'), + (0x1CA6, 'M', u'ღ'), + (0x1CA7, 'M', u'ყ'), + (0x1CA8, 'M', u'შ'), + (0x1CA9, 'M', u'ჩ'), + (0x1CAA, 'M', u'ც'), + (0x1CAB, 'M', u'ძ'), + (0x1CAC, 'M', u'წ'), + (0x1CAD, 'M', u'ჭ'), + (0x1CAE, 'M', u'ხ'), + (0x1CAF, 'M', u'ჯ'), + (0x1CB0, 'M', u'ჰ'), + (0x1CB1, 'M', u'ჱ'), + (0x1CB2, 'M', u'ჲ'), + (0x1CB3, 'M', u'ჳ'), + (0x1CB4, 'M', u'ჴ'), + (0x1CB5, 'M', u'ჵ'), + (0x1CB6, 'M', u'ჶ'), + (0x1CB7, 'M', u'ჷ'), + (0x1CB8, 'M', u'ჸ'), + (0x1CB9, 'M', u'ჹ'), + (0x1CBA, 'M', u'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', u'ჽ'), + (0x1CBE, 'M', u'ჾ'), + (0x1CBF, 'M', u'ჿ'), (0x1CC0, 'V'), (0x1CC8, 'X'), (0x1CD0, 'V'), - (0x1CFA, 'X'), + (0x1CFB, 'X'), (0x1D00, 'V'), (0x1D2C, 'M', u'a'), (0x1D2D, 'M', u'æ'), @@ -1636,6 +1667,10 @@ def _seg_15(): (0x1D50, 'M', u'm'), (0x1D51, 'M', u'ŋ'), (0x1D52, 'M', u'o'), + ] + +def _seg_16(): + return [ (0x1D53, 'M', u'ɔ'), (0x1D54, 'M', u'ᴖ'), (0x1D55, 'M', u'ᴗ'), @@ -1667,10 +1702,6 @@ def _seg_15(): (0x1D9C, 'M', u'c'), (0x1D9D, 'M', u'ɕ'), (0x1D9E, 'M', u'ð'), - ] - -def _seg_16(): - return [ (0x1D9F, 'M', u'ɜ'), (0x1DA0, 'M', u'f'), (0x1DA1, 'M', u'ɟ'), @@ -1740,6 +1771,10 @@ def _seg_16(): (0x1E1E, 'M', u'ḟ'), (0x1E1F, 'V'), (0x1E20, 'M', u'ḡ'), + ] + +def _seg_17(): + return [ (0x1E21, 'V'), (0x1E22, 'M', u'ḣ'), (0x1E23, 'V'), @@ -1771,10 +1806,6 @@ def _seg_16(): (0x1E3D, 'V'), (0x1E3E, 'M', u'ḿ'), (0x1E3F, 'V'), - ] - -def _seg_17(): - return [ (0x1E40, 'M', u'ṁ'), (0x1E41, 'V'), (0x1E42, 'M', u'ṃ'), @@ -1844,6 +1875,10 @@ def _seg_17(): (0x1E82, 'M', u'ẃ'), (0x1E83, 'V'), (0x1E84, 'M', u'ẅ'), + ] + +def _seg_18(): + return [ (0x1E85, 'V'), (0x1E86, 'M', u'ẇ'), (0x1E87, 'V'), @@ -1875,10 +1910,6 @@ def _seg_17(): (0x1EA6, 'M', u'ầ'), (0x1EA7, 'V'), (0x1EA8, 'M', u'ẩ'), - ] - -def _seg_18(): - return [ (0x1EA9, 'V'), (0x1EAA, 'M', u'ẫ'), (0x1EAB, 'V'), @@ -1948,6 +1979,10 @@ def _seg_18(): (0x1EEB, 'V'), (0x1EEC, 'M', u'ử'), (0x1EED, 'V'), + ] + +def _seg_19(): + return [ (0x1EEE, 'M', u'ữ'), (0x1EEF, 'V'), (0x1EF0, 'M', u'ự'), @@ -1979,10 +2014,6 @@ def _seg_18(): (0x1F18, 'M', u'ἐ'), (0x1F19, 'M', u'ἑ'), (0x1F1A, 'M', u'ἒ'), - ] - -def _seg_19(): - return [ (0x1F1B, 'M', u'ἓ'), (0x1F1C, 'M', u'ἔ'), (0x1F1D, 'M', u'ἕ'), @@ -2052,6 +2083,10 @@ def _seg_19(): (0x1F82, 'M', u'ἂι'), (0x1F83, 'M', u'ἃι'), (0x1F84, 'M', u'ἄι'), + ] + +def _seg_20(): + return [ (0x1F85, 'M', u'ἅι'), (0x1F86, 'M', u'ἆι'), (0x1F87, 'M', u'ἇι'), @@ -2083,10 +2118,6 @@ def _seg_19(): (0x1FA1, 'M', u'ὡι'), (0x1FA2, 'M', u'ὢι'), (0x1FA3, 'M', u'ὣι'), - ] - -def _seg_20(): - return [ (0x1FA4, 'M', u'ὤι'), (0x1FA5, 'M', u'ὥι'), (0x1FA6, 'M', u'ὦι'), @@ -2156,6 +2187,10 @@ def _seg_20(): (0x1FF0, 'X'), (0x1FF2, 'M', u'ὼι'), (0x1FF3, 'M', u'ωι'), + ] + +def _seg_21(): + return [ (0x1FF4, 'M', u'ώι'), (0x1FF5, 'X'), (0x1FF6, 'V'), @@ -2187,10 +2222,6 @@ def _seg_20(): (0x2035, 'V'), (0x2036, 'M', u'‵‵'), (0x2037, 'M', u'‵‵‵'), - ] - -def _seg_21(): - return [ (0x2038, 'V'), (0x203C, '3', u'!!'), (0x203D, 'V'), @@ -2260,6 +2291,10 @@ def _seg_21(): (0x20F1, 'X'), (0x2100, '3', u'a/c'), (0x2101, '3', u'a/s'), + ] + +def _seg_22(): + return [ (0x2102, 'M', u'c'), (0x2103, 'M', u'°c'), (0x2104, 'V'), @@ -2291,10 +2326,6 @@ def _seg_21(): (0x2127, 'V'), (0x2128, 'M', u'z'), (0x2129, 'V'), - ] - -def _seg_22(): - return [ (0x212A, 'M', u'k'), (0x212B, 'M', u'å'), (0x212C, 'M', u'b'), @@ -2364,6 +2395,10 @@ def _seg_22(): (0x2177, 'M', u'viii'), (0x2178, 'M', u'ix'), (0x2179, 'M', u'x'), + ] + +def _seg_23(): + return [ (0x217A, 'M', u'xi'), (0x217B, 'M', u'xii'), (0x217C, 'M', u'l'), @@ -2395,10 +2430,6 @@ def _seg_22(): (0x244B, 'X'), (0x2460, 'M', u'1'), (0x2461, 'M', u'2'), - ] - -def _seg_23(): - return [ (0x2462, 'M', u'3'), (0x2463, 'M', u'4'), (0x2464, 'M', u'5'), @@ -2468,6 +2499,10 @@ def _seg_23(): (0x24B7, 'M', u'b'), (0x24B8, 'M', u'c'), (0x24B9, 'M', u'd'), + ] + +def _seg_24(): + return [ (0x24BA, 'M', u'e'), (0x24BB, 'M', u'f'), (0x24BC, 'M', u'g'), @@ -2499,10 +2534,6 @@ def _seg_23(): (0x24D6, 'M', u'g'), (0x24D7, 'M', u'h'), (0x24D8, 'M', u'i'), - ] - -def _seg_24(): - return [ (0x24D9, 'M', u'j'), (0x24DA, 'M', u'k'), (0x24DB, 'M', u'l'), @@ -2533,10 +2564,7 @@ def _seg_24(): (0x2B74, 'X'), (0x2B76, 'V'), (0x2B96, 'X'), - (0x2B98, 'V'), - (0x2BC9, 'X'), - (0x2BCA, 'V'), - (0x2BFF, 'X'), + (0x2B97, 'V'), (0x2C00, 'M', u'ⰰ'), (0x2C01, 'M', u'ⰱ'), (0x2C02, 'M', u'ⰲ'), @@ -2575,6 +2603,10 @@ def _seg_24(): (0x2C23, 'M', u'ⱓ'), (0x2C24, 'M', u'ⱔ'), (0x2C25, 'M', u'ⱕ'), + ] + +def _seg_25(): + return [ (0x2C26, 'M', u'ⱖ'), (0x2C27, 'M', u'ⱗ'), (0x2C28, 'M', u'ⱘ'), @@ -2603,10 +2635,6 @@ def _seg_24(): (0x2C6E, 'M', u'ɱ'), (0x2C6F, 'M', u'ɐ'), (0x2C70, 'M', u'ɒ'), - ] - -def _seg_25(): - return [ (0x2C71, 'V'), (0x2C72, 'M', u'ⱳ'), (0x2C73, 'V'), @@ -2679,6 +2707,10 @@ def _seg_25(): (0x2CBC, 'M', u'ⲽ'), (0x2CBD, 'V'), (0x2CBE, 'M', u'ⲿ'), + ] + +def _seg_26(): + return [ (0x2CBF, 'V'), (0x2CC0, 'M', u'ⳁ'), (0x2CC1, 'V'), @@ -2707,10 +2739,6 @@ def _seg_25(): (0x2CD8, 'M', u'ⳙ'), (0x2CD9, 'V'), (0x2CDA, 'M', u'ⳛ'), - ] - -def _seg_26(): - return [ (0x2CDB, 'V'), (0x2CDC, 'M', u'ⳝ'), (0x2CDD, 'V'), @@ -2757,7 +2785,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E4F, 'X'), + (0x2E53, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2783,6 +2811,10 @@ def _seg_26(): (0x2F0F, 'M', u'几'), (0x2F10, 'M', u'凵'), (0x2F11, 'M', u'刀'), + ] + +def _seg_27(): + return [ (0x2F12, 'M', u'力'), (0x2F13, 'M', u'勹'), (0x2F14, 'M', u'匕'), @@ -2811,10 +2843,6 @@ def _seg_26(): (0x2F2B, 'M', u'尸'), (0x2F2C, 'M', u'屮'), (0x2F2D, 'M', u'山'), - ] - -def _seg_27(): - return [ (0x2F2E, 'M', u'巛'), (0x2F2F, 'M', u'工'), (0x2F30, 'M', u'己'), @@ -2887,6 +2915,10 @@ def _seg_27(): (0x2F73, 'M', u'穴'), (0x2F74, 'M', u'立'), (0x2F75, 'M', u'竹'), + ] + +def _seg_28(): + return [ (0x2F76, 'M', u'米'), (0x2F77, 'M', u'糸'), (0x2F78, 'M', u'缶'), @@ -2915,10 +2947,6 @@ def _seg_27(): (0x2F8F, 'M', u'行'), (0x2F90, 'M', u'衣'), (0x2F91, 'M', u'襾'), - ] - -def _seg_28(): - return [ (0x2F92, 'M', u'見'), (0x2F93, 'M', u'角'), (0x2F94, 'M', u'言'), @@ -2991,6 +3019,10 @@ def _seg_28(): (0x3000, '3', u' '), (0x3001, 'V'), (0x3002, 'M', u'.'), + ] + +def _seg_29(): + return [ (0x3003, 'V'), (0x3036, 'M', u'〒'), (0x3037, 'V'), @@ -3019,10 +3051,6 @@ def _seg_28(): (0x3136, 'M', u'ᆭ'), (0x3137, 'M', u'ᄃ'), (0x3138, 'M', u'ᄄ'), - ] - -def _seg_29(): - return [ (0x3139, 'M', u'ᄅ'), (0x313A, 'M', u'ᆰ'), (0x313B, 'M', u'ᆱ'), @@ -3095,6 +3123,10 @@ def _seg_29(): (0x317E, 'M', u'ᄶ'), (0x317F, 'M', u'ᅀ'), (0x3180, 'M', u'ᅇ'), + ] + +def _seg_30(): + return [ (0x3181, 'M', u'ᅌ'), (0x3182, 'M', u'ᇱ'), (0x3183, 'M', u'ᇲ'), @@ -3123,15 +3155,9 @@ def _seg_29(): (0x319B, 'M', u'丙'), (0x319C, 'M', u'丁'), (0x319D, 'M', u'天'), - ] - -def _seg_30(): - return [ (0x319E, 'M', u'地'), (0x319F, 'M', u'人'), (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), (0x31E4, 'X'), (0x31F0, 'V'), (0x3200, '3', u'(ᄀ)'), @@ -3201,6 +3227,10 @@ def _seg_30(): (0x3240, '3', u'(祭)'), (0x3241, '3', u'(休)'), (0x3242, '3', u'(自)'), + ] + +def _seg_31(): + return [ (0x3243, '3', u'(至)'), (0x3244, 'M', u'問'), (0x3245, 'M', u'幼'), @@ -3227,10 +3257,6 @@ def _seg_30(): (0x3261, 'M', u'ᄂ'), (0x3262, 'M', u'ᄃ'), (0x3263, 'M', u'ᄅ'), - ] - -def _seg_31(): - return [ (0x3264, 'M', u'ᄆ'), (0x3265, 'M', u'ᄇ'), (0x3266, 'M', u'ᄉ'), @@ -3305,6 +3331,10 @@ def _seg_31(): (0x32AB, 'M', u'学'), (0x32AC, 'M', u'監'), (0x32AD, 'M', u'企'), + ] + +def _seg_32(): + return [ (0x32AE, 'M', u'資'), (0x32AF, 'M', u'協'), (0x32B0, 'M', u'夜'), @@ -3331,10 +3361,6 @@ def _seg_31(): (0x32C5, 'M', u'6月'), (0x32C6, 'M', u'7月'), (0x32C7, 'M', u'8月'), - ] - -def _seg_32(): - return [ (0x32C8, 'M', u'9月'), (0x32C9, 'M', u'10月'), (0x32CA, 'M', u'11月'), @@ -3390,7 +3416,7 @@ def _seg_32(): (0x32FC, 'M', u'ヰ'), (0x32FD, 'M', u'ヱ'), (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), + (0x32FF, 'M', u'令和'), (0x3300, 'M', u'アパート'), (0x3301, 'M', u'アルファ'), (0x3302, 'M', u'アンペア'), @@ -3409,6 +3435,10 @@ def _seg_32(): (0x330F, 'M', u'ガンマ'), (0x3310, 'M', u'ギガ'), (0x3311, 'M', u'ギニー'), + ] + +def _seg_33(): + return [ (0x3312, 'M', u'キュリー'), (0x3313, 'M', u'ギルダー'), (0x3314, 'M', u'キロ'), @@ -3435,10 +3465,6 @@ def _seg_32(): (0x3329, 'M', u'ノット'), (0x332A, 'M', u'ハイツ'), (0x332B, 'M', u'パーセント'), - ] - -def _seg_33(): - return [ (0x332C, 'M', u'パーツ'), (0x332D, 'M', u'バーレル'), (0x332E, 'M', u'ピアストル'), @@ -3513,6 +3539,10 @@ def _seg_33(): (0x3373, 'M', u'au'), (0x3374, 'M', u'bar'), (0x3375, 'M', u'ov'), + ] + +def _seg_34(): + return [ (0x3376, 'M', u'pc'), (0x3377, 'M', u'dm'), (0x3378, 'M', u'dm2'), @@ -3539,10 +3569,6 @@ def _seg_33(): (0x338D, 'M', u'μg'), (0x338E, 'M', u'mg'), (0x338F, 'M', u'kg'), - ] - -def _seg_34(): - return [ (0x3390, 'M', u'hz'), (0x3391, 'M', u'khz'), (0x3392, 'M', u'mhz'), @@ -3617,6 +3643,10 @@ def _seg_34(): (0x33D7, 'M', u'ph'), (0x33D8, 'X'), (0x33D9, 'M', u'ppm'), + ] + +def _seg_35(): + return [ (0x33DA, 'M', u'pr'), (0x33DB, 'M', u'sr'), (0x33DC, 'M', u'sv'), @@ -3643,10 +3673,6 @@ def _seg_34(): (0x33F1, 'M', u'18日'), (0x33F2, 'M', u'19日'), (0x33F3, 'M', u'20日'), - ] - -def _seg_35(): - return [ (0x33F4, 'M', u'21日'), (0x33F5, 'M', u'22日'), (0x33F6, 'M', u'23日'), @@ -3660,9 +3686,7 @@ def _seg_35(): (0x33FE, 'M', u'31日'), (0x33FF, 'M', u'gal'), (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), + (0x9FFD, 'X'), (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), @@ -3723,6 +3747,10 @@ def _seg_35(): (0xA685, 'V'), (0xA686, 'M', u'ꚇ'), (0xA687, 'V'), + ] + +def _seg_36(): + return [ (0xA688, 'M', u'ꚉ'), (0xA689, 'V'), (0xA68A, 'M', u'ꚋ'), @@ -3747,10 +3775,6 @@ def _seg_35(): (0xA69D, 'M', u'ь'), (0xA69E, 'V'), (0xA6F8, 'X'), - ] - -def _seg_36(): - return [ (0xA700, 'V'), (0xA722, 'M', u'ꜣ'), (0xA723, 'V'), @@ -3827,6 +3851,10 @@ def _seg_36(): (0xA76C, 'M', u'ꝭ'), (0xA76D, 'V'), (0xA76E, 'M', u'ꝯ'), + ] + +def _seg_37(): + return [ (0xA76F, 'V'), (0xA770, 'M', u'ꝯ'), (0xA771, 'V'), @@ -3851,10 +3879,6 @@ def _seg_36(): (0xA78E, 'V'), (0xA790, 'M', u'ꞑ'), (0xA791, 'V'), - ] - -def _seg_37(): - return [ (0xA792, 'M', u'ꞓ'), (0xA793, 'V'), (0xA796, 'M', u'ꞗ'), @@ -3891,14 +3915,31 @@ def _seg_37(): (0xA7B5, 'V'), (0xA7B6, 'M', u'ꞷ'), (0xA7B7, 'V'), - (0xA7B8, 'X'), + (0xA7B8, 'M', u'ꞹ'), (0xA7B9, 'V'), - (0xA7BA, 'X'), - (0xA7F7, 'V'), + (0xA7BA, 'M', u'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', u'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', u'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'X'), + (0xA7C2, 'M', u'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', u'ꞔ'), + (0xA7C5, 'M', u'ʂ'), + (0xA7C6, 'M', u'ᶎ'), + (0xA7C7, 'M', u'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', u'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7F5, 'M', u'ꟶ'), + (0xA7F6, 'V'), (0xA7F8, 'M', u'ħ'), (0xA7F9, 'M', u'œ'), (0xA7FA, 'V'), - (0xA82C, 'X'), + (0xA82D, 'X'), (0xA830, 'V'), (0xA83A, 'X'), (0xA840, 'V'), @@ -3914,6 +3955,10 @@ def _seg_37(): (0xA980, 'V'), (0xA9CE, 'X'), (0xA9CF, 'V'), + ] + +def _seg_38(): + return [ (0xA9DA, 'X'), (0xA9DE, 'V'), (0xA9FF, 'X'), @@ -3943,7 +3988,9 @@ def _seg_37(): (0xAB5E, 'M', u'ɫ'), (0xAB5F, 'M', u'ꭒ'), (0xAB60, 'V'), - (0xAB66, 'X'), + (0xAB69, 'M', u'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), (0xAB70, 'M', u'Ꭰ'), (0xAB71, 'M', u'Ꭱ'), (0xAB72, 'M', u'Ꭲ'), @@ -3955,10 +4002,6 @@ def _seg_37(): (0xAB78, 'M', u'Ꭸ'), (0xAB79, 'M', u'Ꭹ'), (0xAB7A, 'M', u'Ꭺ'), - ] - -def _seg_38(): - return [ (0xAB7B, 'M', u'Ꭻ'), (0xAB7C, 'M', u'Ꭼ'), (0xAB7D, 'M', u'Ꭽ'), @@ -4016,6 +4059,10 @@ def _seg_38(): (0xABB1, 'M', u'Ꮱ'), (0xABB2, 'M', u'Ꮲ'), (0xABB3, 'M', u'Ꮳ'), + ] + +def _seg_39(): + return [ (0xABB4, 'M', u'Ꮴ'), (0xABB5, 'M', u'Ꮵ'), (0xABB6, 'M', u'Ꮶ'), @@ -4059,10 +4106,6 @@ def _seg_38(): (0xF913, 'M', u'邏'), (0xF914, 'M', u'樂'), (0xF915, 'M', u'洛'), - ] - -def _seg_39(): - return [ (0xF916, 'M', u'烙'), (0xF917, 'M', u'珞'), (0xF918, 'M', u'落'), @@ -4120,6 +4163,10 @@ def _seg_39(): (0xF94C, 'M', u'樓'), (0xF94D, 'M', u'淚'), (0xF94E, 'M', u'漏'), + ] + +def _seg_40(): + return [ (0xF94F, 'M', u'累'), (0xF950, 'M', u'縷'), (0xF951, 'M', u'陋'), @@ -4163,10 +4210,6 @@ def _seg_39(): (0xF977, 'M', u'亮'), (0xF978, 'M', u'兩'), (0xF979, 'M', u'凉'), - ] - -def _seg_40(): - return [ (0xF97A, 'M', u'梁'), (0xF97B, 'M', u'糧'), (0xF97C, 'M', u'良'), @@ -4224,6 +4267,10 @@ def _seg_40(): (0xF9B0, 'M', u'聆'), (0xF9B1, 'M', u'鈴'), (0xF9B2, 'M', u'零'), + ] + +def _seg_41(): + return [ (0xF9B3, 'M', u'靈'), (0xF9B4, 'M', u'領'), (0xF9B5, 'M', u'例'), @@ -4267,10 +4314,6 @@ def _seg_40(): (0xF9DB, 'M', u'率'), (0xF9DC, 'M', u'隆'), (0xF9DD, 'M', u'利'), - ] - -def _seg_41(): - return [ (0xF9DE, 'M', u'吏'), (0xF9DF, 'M', u'履'), (0xF9E0, 'M', u'易'), @@ -4328,6 +4371,10 @@ def _seg_41(): (0xFA16, 'M', u'猪'), (0xFA17, 'M', u'益'), (0xFA18, 'M', u'礼'), + ] + +def _seg_42(): + return [ (0xFA19, 'M', u'神'), (0xFA1A, 'M', u'祥'), (0xFA1B, 'M', u'福'), @@ -4371,10 +4418,6 @@ def _seg_41(): (0xFA44, 'M', u'梅'), (0xFA45, 'M', u'海'), (0xFA46, 'M', u'渚'), - ] - -def _seg_42(): - return [ (0xFA47, 'M', u'漢'), (0xFA48, 'M', u'煮'), (0xFA49, 'M', u'爫'), @@ -4432,6 +4475,10 @@ def _seg_42(): (0xFA7F, 'M', u'奔'), (0xFA80, 'M', u'婢'), (0xFA81, 'M', u'嬨'), + ] + +def _seg_43(): + return [ (0xFA82, 'M', u'廒'), (0xFA83, 'M', u'廙'), (0xFA84, 'M', u'彩'), @@ -4475,10 +4522,6 @@ def _seg_42(): (0xFAAA, 'M', u'着'), (0xFAAB, 'M', u'磌'), (0xFAAC, 'M', u'窱'), - ] - -def _seg_43(): - return [ (0xFAAD, 'M', u'節'), (0xFAAE, 'M', u'类'), (0xFAAF, 'M', u'絛'), @@ -4536,6 +4579,10 @@ def _seg_43(): (0xFB14, 'M', u'մե'), (0xFB15, 'M', u'մի'), (0xFB16, 'M', u'վն'), + ] + +def _seg_44(): + return [ (0xFB17, 'M', u'մխ'), (0xFB18, 'X'), (0xFB1D, 'M', u'יִ'), @@ -4579,10 +4626,6 @@ def _seg_43(): (0xFB43, 'M', u'ףּ'), (0xFB44, 'M', u'פּ'), (0xFB45, 'X'), - ] - -def _seg_44(): - return [ (0xFB46, 'M', u'צּ'), (0xFB47, 'M', u'קּ'), (0xFB48, 'M', u'רּ'), @@ -4640,6 +4683,10 @@ def _seg_44(): (0xFBEE, 'M', u'ئو'), (0xFBF0, 'M', u'ئۇ'), (0xFBF2, 'M', u'ئۆ'), + ] + +def _seg_45(): + return [ (0xFBF4, 'M', u'ئۈ'), (0xFBF6, 'M', u'ئې'), (0xFBF9, 'M', u'ئى'), @@ -4683,10 +4730,6 @@ def _seg_44(): (0xFC24, 'M', u'ضخ'), (0xFC25, 'M', u'ضم'), (0xFC26, 'M', u'طح'), - ] - -def _seg_45(): - return [ (0xFC27, 'M', u'طم'), (0xFC28, 'M', u'ظم'), (0xFC29, 'M', u'عج'), @@ -4744,6 +4787,10 @@ def _seg_45(): (0xFC5D, 'M', u'ىٰ'), (0xFC5E, '3', u' ٌّ'), (0xFC5F, '3', u' ٍّ'), + ] + +def _seg_46(): + return [ (0xFC60, '3', u' َّ'), (0xFC61, '3', u' ُّ'), (0xFC62, '3', u' ِّ'), @@ -4787,10 +4834,6 @@ def _seg_45(): (0xFC88, 'M', u'ما'), (0xFC89, 'M', u'مم'), (0xFC8A, 'M', u'نر'), - ] - -def _seg_46(): - return [ (0xFC8B, 'M', u'نز'), (0xFC8C, 'M', u'نم'), (0xFC8D, 'M', u'نن'), @@ -4848,6 +4891,10 @@ def _seg_46(): (0xFCC1, 'M', u'فم'), (0xFCC2, 'M', u'قح'), (0xFCC3, 'M', u'قم'), + ] + +def _seg_47(): + return [ (0xFCC4, 'M', u'كج'), (0xFCC5, 'M', u'كح'), (0xFCC6, 'M', u'كخ'), @@ -4891,10 +4938,6 @@ def _seg_46(): (0xFCEC, 'M', u'كم'), (0xFCED, 'M', u'لم'), (0xFCEE, 'M', u'نم'), - ] - -def _seg_47(): - return [ (0xFCEF, 'M', u'نه'), (0xFCF0, 'M', u'يم'), (0xFCF1, 'M', u'يه'), @@ -4952,6 +4995,10 @@ def _seg_47(): (0xFD25, 'M', u'شج'), (0xFD26, 'M', u'شح'), (0xFD27, 'M', u'شخ'), + ] + +def _seg_48(): + return [ (0xFD28, 'M', u'شم'), (0xFD29, 'M', u'شر'), (0xFD2A, 'M', u'سر'), @@ -4995,10 +5042,6 @@ def _seg_47(): (0xFD66, 'M', u'صمم'), (0xFD67, 'M', u'شحم'), (0xFD69, 'M', u'شجي'), - ] - -def _seg_48(): - return [ (0xFD6A, 'M', u'شمخ'), (0xFD6C, 'M', u'شمم'), (0xFD6E, 'M', u'ضحى'), @@ -5056,6 +5099,10 @@ def _seg_48(): (0xFDAC, 'M', u'لجي'), (0xFDAD, 'M', u'لمي'), (0xFDAE, 'M', u'يحي'), + ] + +def _seg_49(): + return [ (0xFDAF, 'M', u'يجي'), (0xFDB0, 'M', u'يمي'), (0xFDB1, 'M', u'ممي'), @@ -5099,10 +5146,6 @@ def _seg_48(): (0xFDFE, 'X'), (0xFE00, 'I'), (0xFE10, '3', u','), - ] - -def _seg_49(): - return [ (0xFE11, 'M', u'、'), (0xFE12, 'X'), (0xFE13, '3', u':'), @@ -5160,6 +5203,10 @@ def _seg_49(): (0xFE64, '3', u'<'), (0xFE65, '3', u'>'), (0xFE66, '3', u'='), + ] + +def _seg_50(): + return [ (0xFE67, 'X'), (0xFE68, '3', u'\\'), (0xFE69, '3', u'$'), @@ -5203,10 +5250,6 @@ def _seg_49(): (0xFEB1, 'M', u'س'), (0xFEB5, 'M', u'ش'), (0xFEB9, 'M', u'ص'), - ] - -def _seg_50(): - return [ (0xFEBD, 'M', u'ض'), (0xFEC1, 'M', u'ط'), (0xFEC5, 'M', u'ظ'), @@ -5264,6 +5307,10 @@ def _seg_50(): (0xFF21, 'M', u'a'), (0xFF22, 'M', u'b'), (0xFF23, 'M', u'c'), + ] + +def _seg_51(): + return [ (0xFF24, 'M', u'd'), (0xFF25, 'M', u'e'), (0xFF26, 'M', u'f'), @@ -5307,10 +5354,6 @@ def _seg_50(): (0xFF4C, 'M', u'l'), (0xFF4D, 'M', u'm'), (0xFF4E, 'M', u'n'), - ] - -def _seg_51(): - return [ (0xFF4F, 'M', u'o'), (0xFF50, 'M', u'p'), (0xFF51, 'M', u'q'), @@ -5368,6 +5411,10 @@ def _seg_51(): (0xFF85, 'M', u'ナ'), (0xFF86, 'M', u'ニ'), (0xFF87, 'M', u'ヌ'), + ] + +def _seg_52(): + return [ (0xFF88, 'M', u'ネ'), (0xFF89, 'M', u'ノ'), (0xFF8A, 'M', u'ハ'), @@ -5411,10 +5458,6 @@ def _seg_51(): (0xFFB0, 'M', u'ᄚ'), (0xFFB1, 'M', u'ᄆ'), (0xFFB2, 'M', u'ᄇ'), - ] - -def _seg_52(): - return [ (0xFFB3, 'M', u'ᄈ'), (0xFFB4, 'M', u'ᄡ'), (0xFFB5, 'M', u'ᄉ'), @@ -5472,6 +5515,10 @@ def _seg_52(): (0x10000, 'V'), (0x1000C, 'X'), (0x1000D, 'V'), + ] + +def _seg_53(): + return [ (0x10027, 'X'), (0x10028, 'V'), (0x1003B, 'X'), @@ -5490,7 +5537,7 @@ def _seg_52(): (0x10137, 'V'), (0x1018F, 'X'), (0x10190, 'V'), - (0x1019C, 'X'), + (0x1019D, 'X'), (0x101A0, 'V'), (0x101A1, 'X'), (0x101D0, 'V'), @@ -5515,10 +5562,6 @@ def _seg_52(): (0x103D6, 'X'), (0x10400, 'M', u'𐐨'), (0x10401, 'M', u'𐐩'), - ] - -def _seg_53(): - return [ (0x10402, 'M', u'𐐪'), (0x10403, 'M', u'𐐫'), (0x10404, 'M', u'𐐬'), @@ -5576,6 +5619,10 @@ def _seg_53(): (0x104BC, 'M', u'𐓤'), (0x104BD, 'M', u'𐓥'), (0x104BE, 'M', u'𐓦'), + ] + +def _seg_54(): + return [ (0x104BF, 'M', u'𐓧'), (0x104C0, 'M', u'𐓨'), (0x104C1, 'M', u'𐓩'), @@ -5619,10 +5666,6 @@ def _seg_53(): (0x1080A, 'V'), (0x10836, 'X'), (0x10837, 'V'), - ] - -def _seg_54(): - return [ (0x10839, 'X'), (0x1083C, 'V'), (0x1083D, 'X'), @@ -5680,6 +5723,10 @@ def _seg_54(): (0x10B9D, 'X'), (0x10BA9, 'V'), (0x10BB0, 'X'), + ] + +def _seg_55(): + return [ (0x10C00, 'V'), (0x10C49, 'X'), (0x10C80, 'M', u'𐳀'), @@ -5723,10 +5770,6 @@ def _seg_54(): (0x10CA6, 'M', u'𐳦'), (0x10CA7, 'M', u'𐳧'), (0x10CA8, 'M', u'𐳨'), - ] - -def _seg_55(): - return [ (0x10CA9, 'M', u'𐳩'), (0x10CAA, 'M', u'𐳪'), (0x10CAB, 'M', u'𐳫'), @@ -5746,10 +5789,20 @@ def _seg_55(): (0x10D3A, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), (0x10F00, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), @@ -5765,17 +5818,19 @@ def _seg_55(): (0x11100, 'V'), (0x11135, 'X'), (0x11136, 'V'), - (0x11147, 'X'), + (0x11148, 'X'), (0x11150, 'V'), (0x11177, 'X'), (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), (0x111E0, 'X'), (0x111E1, 'V'), (0x111F5, 'X'), (0x11200, 'V'), (0x11212, 'X'), + ] + +def _seg_56(): + return [ (0x11213, 'V'), (0x1123F, 'X'), (0x11280, 'V'), @@ -5823,15 +5878,9 @@ def _seg_55(): (0x11370, 'V'), (0x11375, 'X'), (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), (0x1145C, 'X'), (0x1145D, 'V'), - ] - -def _seg_56(): - return [ - (0x1145F, 'X'), + (0x11462, 'X'), (0x11480, 'V'), (0x114C8, 'X'), (0x114D0, 'V'), @@ -5847,7 +5896,7 @@ def _seg_56(): (0x11660, 'V'), (0x1166D, 'X'), (0x11680, 'V'), - (0x116B8, 'X'), + (0x116B9, 'X'), (0x116C0, 'V'), (0x116CA, 'X'), (0x11700, 'V'), @@ -5882,6 +5931,10 @@ def _seg_56(): (0x118B5, 'M', u'𑣕'), (0x118B6, 'M', u'𑣖'), (0x118B7, 'M', u'𑣗'), + ] + +def _seg_57(): + return [ (0x118B8, 'M', u'𑣘'), (0x118B9, 'M', u'𑣙'), (0x118BA, 'M', u'𑣚'), @@ -5893,12 +5946,30 @@ def _seg_56(): (0x118C0, 'V'), (0x118F3, 'X'), (0x118FF, 'V'), - (0x11900, 'X'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), (0x11A00, 'V'), (0x11A48, 'X'), (0x11A50, 'V'), - (0x11A84, 'X'), - (0x11A86, 'V'), (0x11AA3, 'X'), (0x11AC0, 'V'), (0x11AF9, 'X'), @@ -5931,10 +6002,6 @@ def _seg_56(): (0x11D50, 'V'), (0x11D5A, 'X'), (0x11D60, 'V'), - ] - -def _seg_57(): - return [ (0x11D66, 'X'), (0x11D67, 'V'), (0x11D69, 'X'), @@ -5948,7 +6015,11 @@ def _seg_57(): (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), - (0x12000, 'V'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), (0x1239A, 'X'), (0x12400, 'V'), (0x1246F, 'X'), @@ -5964,6 +6035,10 @@ def _seg_57(): (0x16A39, 'X'), (0x16A40, 'V'), (0x16A5F, 'X'), + ] + +def _seg_58(): + return [ (0x16A60, 'V'), (0x16A6A, 'X'), (0x16A6E, 'V'), @@ -5982,22 +6057,62 @@ def _seg_57(): (0x16B78, 'X'), (0x16B7D, 'V'), (0x16B90, 'X'), + (0x16E40, 'M', u'𖹠'), + (0x16E41, 'M', u'𖹡'), + (0x16E42, 'M', u'𖹢'), + (0x16E43, 'M', u'𖹣'), + (0x16E44, 'M', u'𖹤'), + (0x16E45, 'M', u'𖹥'), + (0x16E46, 'M', u'𖹦'), + (0x16E47, 'M', u'𖹧'), + (0x16E48, 'M', u'𖹨'), + (0x16E49, 'M', u'𖹩'), + (0x16E4A, 'M', u'𖹪'), + (0x16E4B, 'M', u'𖹫'), + (0x16E4C, 'M', u'𖹬'), + (0x16E4D, 'M', u'𖹭'), + (0x16E4E, 'M', u'𖹮'), + (0x16E4F, 'M', u'𖹯'), + (0x16E50, 'M', u'𖹰'), + (0x16E51, 'M', u'𖹱'), + (0x16E52, 'M', u'𖹲'), + (0x16E53, 'M', u'𖹳'), + (0x16E54, 'M', u'𖹴'), + (0x16E55, 'M', u'𖹵'), + (0x16E56, 'M', u'𖹶'), + (0x16E57, 'M', u'𖹷'), + (0x16E58, 'M', u'𖹸'), + (0x16E59, 'M', u'𖹹'), + (0x16E5A, 'M', u'𖹺'), + (0x16E5B, 'M', u'𖹻'), + (0x16E5C, 'M', u'𖹼'), + (0x16E5D, 'M', u'𖹽'), + (0x16E5E, 'M', u'𖹾'), + (0x16E5F, 'M', u'𖹿'), (0x16E60, 'V'), (0x16E9B, 'X'), (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), (0x16F8F, 'V'), (0x16FA0, 'X'), (0x16FE0, 'V'), - (0x16FE2, 'X'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), (0x17000, 'V'), - (0x187F2, 'X'), + (0x187F8, 'X'), (0x18800, 'V'), - (0x18AF3, 'X'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), (0x1B000, 'V'), (0x1B11F, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), (0x1B170, 'V'), (0x1B2FC, 'X'), (0x1BC00, 'V'), @@ -6024,6 +6139,10 @@ def _seg_57(): (0x1D163, 'M', u'𝅘𝅥𝅱'), (0x1D164, 'M', u'𝅘𝅥𝅲'), (0x1D165, 'V'), + ] + +def _seg_59(): + return [ (0x1D173, 'X'), (0x1D17B, 'V'), (0x1D1BB, 'M', u'𝆹𝅥'), @@ -6035,10 +6154,6 @@ def _seg_57(): (0x1D1C1, 'V'), (0x1D1E9, 'X'), (0x1D200, 'V'), - ] - -def _seg_58(): - return [ (0x1D246, 'X'), (0x1D2E0, 'V'), (0x1D2F4, 'X'), @@ -6128,6 +6243,10 @@ def _seg_58(): (0x1D44F, 'M', u'b'), (0x1D450, 'M', u'c'), (0x1D451, 'M', u'd'), + ] + +def _seg_60(): + return [ (0x1D452, 'M', u'e'), (0x1D453, 'M', u'f'), (0x1D454, 'M', u'g'), @@ -6139,10 +6258,6 @@ def _seg_58(): (0x1D45A, 'M', u'm'), (0x1D45B, 'M', u'n'), (0x1D45C, 'M', u'o'), - ] - -def _seg_59(): - return [ (0x1D45D, 'M', u'p'), (0x1D45E, 'M', u'q'), (0x1D45F, 'M', u'r'), @@ -6232,6 +6347,10 @@ def _seg_59(): (0x1D4B6, 'M', u'a'), (0x1D4B7, 'M', u'b'), (0x1D4B8, 'M', u'c'), + ] + +def _seg_61(): + return [ (0x1D4B9, 'M', u'd'), (0x1D4BA, 'X'), (0x1D4BB, 'M', u'f'), @@ -6243,10 +6362,6 @@ def _seg_59(): (0x1D4C1, 'M', u'l'), (0x1D4C2, 'M', u'm'), (0x1D4C3, 'M', u'n'), - ] - -def _seg_60(): - return [ (0x1D4C4, 'X'), (0x1D4C5, 'M', u'p'), (0x1D4C6, 'M', u'q'), @@ -6336,6 +6451,10 @@ def _seg_60(): (0x1D51B, 'M', u'x'), (0x1D51C, 'M', u'y'), (0x1D51D, 'X'), + ] + +def _seg_62(): + return [ (0x1D51E, 'M', u'a'), (0x1D51F, 'M', u'b'), (0x1D520, 'M', u'c'), @@ -6347,10 +6466,6 @@ def _seg_60(): (0x1D526, 'M', u'i'), (0x1D527, 'M', u'j'), (0x1D528, 'M', u'k'), - ] - -def _seg_61(): - return [ (0x1D529, 'M', u'l'), (0x1D52A, 'M', u'm'), (0x1D52B, 'M', u'n'), @@ -6440,6 +6555,10 @@ def _seg_61(): (0x1D581, 'M', u'v'), (0x1D582, 'M', u'w'), (0x1D583, 'M', u'x'), + ] + +def _seg_63(): + return [ (0x1D584, 'M', u'y'), (0x1D585, 'M', u'z'), (0x1D586, 'M', u'a'), @@ -6451,10 +6570,6 @@ def _seg_61(): (0x1D58C, 'M', u'g'), (0x1D58D, 'M', u'h'), (0x1D58E, 'M', u'i'), - ] - -def _seg_62(): - return [ (0x1D58F, 'M', u'j'), (0x1D590, 'M', u'k'), (0x1D591, 'M', u'l'), @@ -6544,6 +6659,10 @@ def _seg_62(): (0x1D5E5, 'M', u'r'), (0x1D5E6, 'M', u's'), (0x1D5E7, 'M', u't'), + ] + +def _seg_64(): + return [ (0x1D5E8, 'M', u'u'), (0x1D5E9, 'M', u'v'), (0x1D5EA, 'M', u'w'), @@ -6555,10 +6674,6 @@ def _seg_62(): (0x1D5F0, 'M', u'c'), (0x1D5F1, 'M', u'd'), (0x1D5F2, 'M', u'e'), - ] - -def _seg_63(): - return [ (0x1D5F3, 'M', u'f'), (0x1D5F4, 'M', u'g'), (0x1D5F5, 'M', u'h'), @@ -6648,6 +6763,10 @@ def _seg_63(): (0x1D649, 'M', u'n'), (0x1D64A, 'M', u'o'), (0x1D64B, 'M', u'p'), + ] + +def _seg_65(): + return [ (0x1D64C, 'M', u'q'), (0x1D64D, 'M', u'r'), (0x1D64E, 'M', u's'), @@ -6659,10 +6778,6 @@ def _seg_63(): (0x1D654, 'M', u'y'), (0x1D655, 'M', u'z'), (0x1D656, 'M', u'a'), - ] - -def _seg_64(): - return [ (0x1D657, 'M', u'b'), (0x1D658, 'M', u'c'), (0x1D659, 'M', u'd'), @@ -6752,6 +6867,10 @@ def _seg_64(): (0x1D6AE, 'M', u'η'), (0x1D6AF, 'M', u'θ'), (0x1D6B0, 'M', u'ι'), + ] + +def _seg_66(): + return [ (0x1D6B1, 'M', u'κ'), (0x1D6B2, 'M', u'λ'), (0x1D6B3, 'M', u'μ'), @@ -6763,10 +6882,6 @@ def _seg_64(): (0x1D6B9, 'M', u'θ'), (0x1D6BA, 'M', u'σ'), (0x1D6BB, 'M', u'τ'), - ] - -def _seg_65(): - return [ (0x1D6BC, 'M', u'υ'), (0x1D6BD, 'M', u'φ'), (0x1D6BE, 'M', u'χ'), @@ -6856,6 +6971,10 @@ def _seg_65(): (0x1D714, 'M', u'ω'), (0x1D715, 'M', u'∂'), (0x1D716, 'M', u'ε'), + ] + +def _seg_67(): + return [ (0x1D717, 'M', u'θ'), (0x1D718, 'M', u'κ'), (0x1D719, 'M', u'φ'), @@ -6867,10 +6986,6 @@ def _seg_65(): (0x1D71F, 'M', u'δ'), (0x1D720, 'M', u'ε'), (0x1D721, 'M', u'ζ'), - ] - -def _seg_66(): - return [ (0x1D722, 'M', u'η'), (0x1D723, 'M', u'θ'), (0x1D724, 'M', u'ι'), @@ -6960,6 +7075,10 @@ def _seg_66(): (0x1D779, 'M', u'κ'), (0x1D77A, 'M', u'λ'), (0x1D77B, 'M', u'μ'), + ] + +def _seg_68(): + return [ (0x1D77C, 'M', u'ν'), (0x1D77D, 'M', u'ξ'), (0x1D77E, 'M', u'ο'), @@ -6971,10 +7090,6 @@ def _seg_66(): (0x1D785, 'M', u'φ'), (0x1D786, 'M', u'χ'), (0x1D787, 'M', u'ψ'), - ] - -def _seg_67(): - return [ (0x1D788, 'M', u'ω'), (0x1D789, 'M', u'∂'), (0x1D78A, 'M', u'ε'), @@ -7064,6 +7179,10 @@ def _seg_67(): (0x1D7E1, 'M', u'9'), (0x1D7E2, 'M', u'0'), (0x1D7E3, 'M', u'1'), + ] + +def _seg_69(): + return [ (0x1D7E4, 'M', u'2'), (0x1D7E5, 'M', u'3'), (0x1D7E6, 'M', u'4'), @@ -7075,10 +7194,6 @@ def _seg_67(): (0x1D7EC, 'M', u'0'), (0x1D7ED, 'M', u'1'), (0x1D7EE, 'M', u'2'), - ] - -def _seg_68(): - return [ (0x1D7EF, 'M', u'3'), (0x1D7F0, 'M', u'4'), (0x1D7F1, 'M', u'5'), @@ -7112,6 +7227,18 @@ def _seg_68(): (0x1E025, 'X'), (0x1E026, 'V'), (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), (0x1E800, 'V'), (0x1E8C5, 'X'), (0x1E8C7, 'V'), @@ -7151,13 +7278,19 @@ def _seg_68(): (0x1E920, 'M', u'𞥂'), (0x1E921, 'M', u'𞥃'), (0x1E922, 'V'), - (0x1E94B, 'X'), + (0x1E94C, 'X'), (0x1E950, 'V'), (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), + ] + +def _seg_70(): + return [ (0x1EC71, 'V'), (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), (0x1EE00, 'M', u'ا'), (0x1EE01, 'M', u'ب'), (0x1EE02, 'M', u'ج'), @@ -7179,10 +7312,6 @@ def _seg_68(): (0x1EE12, 'M', u'ق'), (0x1EE13, 'M', u'ر'), (0x1EE14, 'M', u'ش'), - ] - -def _seg_69(): - return [ (0x1EE15, 'M', u'ت'), (0x1EE16, 'M', u'ث'), (0x1EE17, 'M', u'خ'), @@ -7258,6 +7387,10 @@ def _seg_69(): (0x1EE68, 'M', u'ط'), (0x1EE69, 'M', u'ي'), (0x1EE6A, 'M', u'ك'), + ] + +def _seg_71(): + return [ (0x1EE6B, 'X'), (0x1EE6C, 'M', u'م'), (0x1EE6D, 'M', u'ن'), @@ -7283,10 +7416,6 @@ def _seg_69(): (0x1EE81, 'M', u'ب'), (0x1EE82, 'M', u'ج'), (0x1EE83, 'M', u'د'), - ] - -def _seg_70(): - return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), @@ -7362,10 +7491,13 @@ def _seg_70(): (0x1F106, '3', u'5,'), (0x1F107, '3', u'6,'), (0x1F108, '3', u'7,'), + ] + +def _seg_72(): + return [ (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), (0x1F10B, 'V'), - (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), @@ -7387,10 +7519,6 @@ def _seg_70(): (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), - ] - -def _seg_71(): - return [ (0x1F125, '3', u'(v)'), (0x1F126, '3', u'(w)'), (0x1F127, '3', u'(x)'), @@ -7437,11 +7565,11 @@ def _seg_71(): (0x1F150, 'V'), (0x1F16A, 'M', u'mc'), (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), + (0x1F16C, 'M', u'mr'), + (0x1F16D, 'V'), (0x1F190, 'M', u'dj'), (0x1F191, 'V'), - (0x1F1AD, 'X'), + (0x1F1AE, 'X'), (0x1F1E6, 'V'), (0x1F200, 'M', u'ほか'), (0x1F201, 'M', u'ココ'), @@ -7467,6 +7595,10 @@ def _seg_71(): (0x1F221, 'M', u'終'), (0x1F222, 'M', u'生'), (0x1F223, 'M', u'販'), + ] + +def _seg_73(): + return [ (0x1F224, 'M', u'声'), (0x1F225, 'M', u'吹'), (0x1F226, 'M', u'演'), @@ -7491,10 +7623,6 @@ def _seg_71(): (0x1F239, 'M', u'割'), (0x1F23A, 'M', u'営'), (0x1F23B, 'M', u'配'), - ] - -def _seg_72(): - return [ (0x1F23C, 'X'), (0x1F240, 'M', u'〔本〕'), (0x1F241, 'M', u'〔三〕'), @@ -7512,15 +7640,17 @@ def _seg_72(): (0x1F260, 'V'), (0x1F266, 'X'), (0x1F300, 'V'), - (0x1F6D5, 'X'), + (0x1F6D8, 'X'), (0x1F6E0, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), - (0x1F6FA, 'X'), + (0x1F6FD, 'X'), (0x1F700, 'V'), (0x1F774, 'X'), (0x1F780, 'V'), (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), (0x1F800, 'V'), (0x1F80C, 'X'), (0x1F810, 'V'), @@ -7531,28 +7661,51 @@ def _seg_72(): (0x1F888, 'X'), (0x1F890, 'V'), (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F910, 'V'), - (0x1F93F, 'X'), - (0x1F940, 'V'), - (0x1F971, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), + (0x1F979, 'X'), (0x1F97A, 'V'), - (0x1F97B, 'X'), - (0x1F97C, 'V'), - (0x1F9A3, 'X'), - (0x1F9B0, 'V'), - (0x1F9BA, 'X'), - (0x1F9C0, 'V'), - (0x1F9C3, 'X'), - (0x1F9D0, 'V'), - (0x1FA00, 'X'), + (0x1F9CC, 'X'), + (0x1F9CD, 'V'), + (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7B, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAA9, 'X'), + (0x1FAB0, 'V'), + (0x1FAB7, 'X'), + (0x1FAC0, 'V'), + (0x1FAC3, 'X'), + (0x1FAD0, 'V'), + (0x1FAD7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', u'0'), + (0x1FBF1, 'M', u'1'), + (0x1FBF2, 'M', u'2'), + (0x1FBF3, 'M', u'3'), + (0x1FBF4, 'M', u'4'), + (0x1FBF5, 'M', u'5'), + (0x1FBF6, 'M', u'6'), + (0x1FBF7, 'M', u'7'), + (0x1FBF8, 'M', u'8'), + (0x1FBF9, 'M', u'9'), + ] + +def _seg_74(): + return [ + (0x1FBFA, 'X'), (0x20000, 'V'), - (0x2A6D7, 'X'), + (0x2A6DE, 'X'), (0x2A700, 'V'), (0x2B735, 'X'), (0x2B740, 'V'), @@ -7595,10 +7748,6 @@ def _seg_72(): (0x2F81F, 'M', u'㓟'), (0x2F820, 'M', u'刻'), (0x2F821, 'M', u'剆'), - ] - -def _seg_73(): - return [ (0x2F822, 'M', u'割'), (0x2F823, 'M', u'剷'), (0x2F824, 'M', u'㔕'), @@ -7654,6 +7803,10 @@ def _seg_73(): (0x2F859, 'M', u'𡓤'), (0x2F85A, 'M', u'売'), (0x2F85B, 'M', u'壷'), + ] + +def _seg_75(): + return [ (0x2F85C, 'M', u'夆'), (0x2F85D, 'M', u'多'), (0x2F85E, 'M', u'夢'), @@ -7699,10 +7852,6 @@ def _seg_73(): (0x2F887, 'M', u'幩'), (0x2F888, 'M', u'㡢'), (0x2F889, 'M', u'𢆃'), - ] - -def _seg_74(): - return [ (0x2F88A, 'M', u'㡼'), (0x2F88B, 'M', u'庰'), (0x2F88C, 'M', u'庳'), @@ -7758,6 +7907,10 @@ def _seg_74(): (0x2F8C0, 'M', u'揅'), (0x2F8C1, 'M', u'掩'), (0x2F8C2, 'M', u'㨮'), + ] + +def _seg_76(): + return [ (0x2F8C3, 'M', u'摩'), (0x2F8C4, 'M', u'摾'), (0x2F8C5, 'M', u'撝'), @@ -7803,10 +7956,6 @@ def _seg_74(): (0x2F8ED, 'M', u'櫛'), (0x2F8EE, 'M', u'㰘'), (0x2F8EF, 'M', u'次'), - ] - -def _seg_75(): - return [ (0x2F8F0, 'M', u'𣢧'), (0x2F8F1, 'M', u'歔'), (0x2F8F2, 'M', u'㱎'), @@ -7862,6 +8011,10 @@ def _seg_75(): (0x2F924, 'M', u'犀'), (0x2F925, 'M', u'犕'), (0x2F926, 'M', u'𤜵'), + ] + +def _seg_77(): + return [ (0x2F927, 'M', u'𤠔'), (0x2F928, 'M', u'獺'), (0x2F929, 'M', u'王'), @@ -7907,10 +8060,6 @@ def _seg_75(): (0x2F953, 'M', u'祖'), (0x2F954, 'M', u'𥚚'), (0x2F955, 'M', u'𥛅'), - ] - -def _seg_76(): - return [ (0x2F956, 'M', u'福'), (0x2F957, 'M', u'秫'), (0x2F958, 'M', u'䄯'), @@ -7966,6 +8115,10 @@ def _seg_76(): (0x2F98B, 'M', u'舁'), (0x2F98C, 'M', u'舄'), (0x2F98D, 'M', u'辞'), + ] + +def _seg_78(): + return [ (0x2F98E, 'M', u'䑫'), (0x2F98F, 'M', u'芑'), (0x2F990, 'M', u'芋'), @@ -8011,10 +8164,6 @@ def _seg_76(): (0x2F9B8, 'M', u'蚈'), (0x2F9B9, 'M', u'蜎'), (0x2F9BA, 'M', u'蛢'), - ] - -def _seg_77(): - return [ (0x2F9BB, 'M', u'蝹'), (0x2F9BC, 'M', u'蜨'), (0x2F9BD, 'M', u'蝫'), @@ -8070,6 +8219,10 @@ def _seg_77(): (0x2F9EF, 'M', u'䦕'), (0x2F9F0, 'M', u'閷'), (0x2F9F1, 'M', u'𨵷'), + ] + +def _seg_79(): + return [ (0x2F9F2, 'M', u'䧦'), (0x2F9F3, 'M', u'雃'), (0x2F9F4, 'M', u'嶲'), @@ -8114,11 +8267,9 @@ def _seg_77(): (0x2FA1C, 'M', u'鼻'), (0x2FA1D, 'M', u'𪘀'), (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), (0xE0100, 'I'), - ] - -def _seg_78(): - return [ (0xE01F0, 'X'), ] @@ -8202,4 +8353,5 @@ uts46data = tuple( + _seg_76() + _seg_77() + _seg_78() + + _seg_79() ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/ipaddress.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/ipaddress.py index f2d07668..3e6f9e49 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/ipaddress.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/ipaddress.py @@ -14,7 +14,7 @@ from __future__ import unicode_literals import itertools import struct -__version__ = '1.0.22' +__version__ = '1.0.23' # Compatibility functions _compat_int_types = (int,) @@ -1103,7 +1103,8 @@ class _BaseNetwork(_IPAddressBase): try: # Always false if one is v4 and the other is v6. if a._version != b._version: - raise TypeError("%s and %s are not of the same version" (a, b)) + raise TypeError( + "%s and %s are not of the same version" % (a, b)) return (b.network_address <= a.network_address and b.broadcast_address >= a.broadcast_address) except AttributeError: diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/__init__.py deleted file mode 100644 index a6f44a55..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/__init__.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -lockfile.py - Platform-independent advisory file locks. - -Requires Python 2.5 unless you apply 2.4.diff -Locking is done on a per-thread basis instead of a per-process basis. - -Usage: - ->>> lock = LockFile('somefile') ->>> try: -... lock.acquire() -... except AlreadyLocked: -... print 'somefile', 'is locked already.' -... except LockFailed: -... print 'somefile', 'can\\'t be locked.' -... else: -... print 'got lock' -got lock ->>> print lock.is_locked() -True ->>> lock.release() - ->>> lock = LockFile('somefile') ->>> print lock.is_locked() -False ->>> with lock: -... print lock.is_locked() -True ->>> print lock.is_locked() -False - ->>> lock = LockFile('somefile') ->>> # It is okay to lock twice from the same thread... ->>> with lock: -... lock.acquire() -... ->>> # Though no counter is kept, so you can't unlock multiple times... ->>> print lock.is_locked() -False - -Exceptions: - - Error - base class for other exceptions - LockError - base class for all locking exceptions - AlreadyLocked - Another thread or process already holds the lock - LockFailed - Lock failed for some other reason - UnlockError - base class for all unlocking exceptions - AlreadyUnlocked - File was not locked. - NotMyLock - File was locked but not by the current thread/process -""" - -from __future__ import absolute_import - -import functools -import os -import socket -import threading -import warnings - -# Work with PEP8 and non-PEP8 versions of threading module. -if not hasattr(threading, "current_thread"): - threading.current_thread = threading.currentThread -if not hasattr(threading.Thread, "get_name"): - threading.Thread.get_name = threading.Thread.getName - -__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', - 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', - 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', - 'LockBase', 'locked'] - - -class Error(Exception): - """ - Base class for other exceptions. - - >>> try: - ... raise Error - ... except Exception: - ... pass - """ - pass - - -class LockError(Error): - """ - Base class for error arising from attempts to acquire the lock. - - >>> try: - ... raise LockError - ... except Error: - ... pass - """ - pass - - -class LockTimeout(LockError): - """Raised when lock creation fails within a user-defined period of time. - - >>> try: - ... raise LockTimeout - ... except LockError: - ... pass - """ - pass - - -class AlreadyLocked(LockError): - """Some other thread/process is locking the file. - - >>> try: - ... raise AlreadyLocked - ... except LockError: - ... pass - """ - pass - - -class LockFailed(LockError): - """Lock file creation failed for some other reason. - - >>> try: - ... raise LockFailed - ... except LockError: - ... pass - """ - pass - - -class UnlockError(Error): - """ - Base class for errors arising from attempts to release the lock. - - >>> try: - ... raise UnlockError - ... except Error: - ... pass - """ - pass - - -class NotLocked(UnlockError): - """Raised when an attempt is made to unlock an unlocked file. - - >>> try: - ... raise NotLocked - ... except UnlockError: - ... pass - """ - pass - - -class NotMyLock(UnlockError): - """Raised when an attempt is made to unlock a file someone else locked. - - >>> try: - ... raise NotMyLock - ... except UnlockError: - ... pass - """ - pass - - -class _SharedBase(object): - def __init__(self, path): - self.path = path - - def acquire(self, timeout=None): - """ - Acquire the lock. - - * If timeout is omitted (or None), wait forever trying to lock the - file. - - * If timeout > 0, try to acquire the lock for that many seconds. If - the lock period expires and the file is still locked, raise - LockTimeout. - - * If timeout <= 0, raise AlreadyLocked immediately if the file is - already locked. - """ - raise NotImplemented("implement in subclass") - - def release(self): - """ - Release the lock. - - If the file is not locked, raise NotLocked. - """ - raise NotImplemented("implement in subclass") - - def __enter__(self): - """ - Context manager support. - """ - self.acquire() - return self - - def __exit__(self, *_exc): - """ - Context manager support. - """ - self.release() - - def __repr__(self): - return "<%s: %r>" % (self.__class__.__name__, self.path) - - -class LockBase(_SharedBase): - """Base class for platform-specific lock classes.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = LockBase('somefile') - >>> lock = LockBase('somefile', threaded=False) - """ - super(LockBase, self).__init__(path) - self.lock_file = os.path.abspath(path) + ".lock" - self.hostname = socket.gethostname() - self.pid = os.getpid() - if threaded: - t = threading.current_thread() - # Thread objects in Python 2.4 and earlier do not have ident - # attrs. Worm around that. - ident = getattr(t, "ident", hash(t)) - self.tname = "-%x" % (ident & 0xffffffff) - else: - self.tname = "" - dirname = os.path.dirname(self.lock_file) - - # unique name is mostly about the current process, but must - # also contain the path -- otherwise, two adjacent locked - # files conflict (one file gets locked, creating lock-file and - # unique file, the other one gets locked, creating lock-file - # and overwriting the already existing lock-file, then one - # gets unlocked, deleting both lock-file and unique file, - # finally the last lock errors out upon releasing. - self.unique_name = os.path.join(dirname, - "%s%s.%s%s" % (self.hostname, - self.tname, - self.pid, - hash(self.path))) - self.timeout = timeout - - def is_locked(self): - """ - Tell whether or not the file is locked. - """ - raise NotImplemented("implement in subclass") - - def i_am_locking(self): - """ - Return True if this object is locking the file. - """ - raise NotImplemented("implement in subclass") - - def break_lock(self): - """ - Remove a lock. Useful if a locking thread failed to unlock. - """ - raise NotImplemented("implement in subclass") - - def __repr__(self): - return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, - self.path) - - -def _fl_helper(cls, mod, *args, **kwds): - warnings.warn("Import from %s module instead of lockfile package" % mod, - DeprecationWarning, stacklevel=2) - # This is a bit funky, but it's only for awhile. The way the unit tests - # are constructed this function winds up as an unbound method, so it - # actually takes three args, not two. We want to toss out self. - if not isinstance(args[0], str): - # We are testing, avoid the first arg - args = args[1:] - if len(args) == 1 and not kwds: - kwds["threaded"] = True - return cls(*args, **kwds) - - -def LinkFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import LinkLockFile from the - lockfile.linklockfile module. - """ - from . import linklockfile - return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", - *args, **kwds) - - -def MkdirFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import MkdirLockFile from the - lockfile.mkdirlockfile module. - """ - from . import mkdirlockfile - return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", - *args, **kwds) - - -def SQLiteFileLock(*args, **kwds): - """Factory function provided for backwards compatibility. - - Do not use in new code. Instead, import SQLiteLockFile from the - lockfile.mkdirlockfile module. - """ - from . import sqlitelockfile - return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", - *args, **kwds) - - -def locked(path, timeout=None): - """Decorator which enables locks for decorated function. - - Arguments: - - path: path for lockfile. - - timeout (optional): Timeout for acquiring lock. - - Usage: - @locked('/var/run/myname', timeout=0) - def myname(...): - ... - """ - def decor(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - lock = FileLock(path, timeout=timeout) - lock.acquire() - try: - return func(*args, **kwargs) - finally: - lock.release() - return wrapper - return decor - - -if hasattr(os, "link"): - from . import linklockfile as _llf - LockFile = _llf.LinkLockFile -else: - from . import mkdirlockfile as _mlf - LockFile = _mlf.MkdirLockFile - -FileLock = LockFile diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py deleted file mode 100644 index 2ca9be04..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/linklockfile.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import absolute_import - -import time -import os - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class LinkLockFile(LockBase): - """Lock access to a file using atomic property of link(2). - - >>> lock = LinkLockFile('somefile') - >>> lock = LinkLockFile('somefile', threaded=False) - """ - - def acquire(self, timeout=None): - try: - open(self.unique_name, "wb").close() - except IOError: - raise LockFailed("failed to create %s" % self.unique_name) - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a hard link to it. - try: - os.link(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - nlinks = os.stat(self.unique_name).st_nlink - if nlinks == 2: - # The original link plus the one I created == 2. We're - # good to go. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - os.unlink(self.unique_name) - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name) and - os.stat(self.unique_name).st_nlink == 2) - - def break_lock(self): - if os.path.exists(self.lock_file): - os.unlink(self.lock_file) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py deleted file mode 100644 index 05a8c96c..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py +++ /dev/null @@ -1,84 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os -import sys -import errno - -from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class MkdirLockFile(LockBase): - """Lock file by creating a directory.""" - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = MkdirLockFile('somefile') - >>> lock = MkdirLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - # Lock file itself is a directory. Place the unique file name into - # it. - self.unique_name = os.path.join(self.lock_file, - "%s.%s%s" % (self.hostname, - self.tname, - self.pid)) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - else: - wait = max(0, timeout / 10) - - while True: - try: - os.mkdir(self.lock_file) - except OSError: - err = sys.exc_info()[1] - if err.errno == errno.EEXIST: - # Already locked. - if os.path.exists(self.unique_name): - # Already locked by me. - return - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock. - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(wait) - else: - # Couldn't create the lock for some other reason - raise LockFailed("failed to create %s" % self.lock_file) - else: - open(self.unique_name, "wb").close() - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not os.path.exists(self.unique_name): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.unique_name) - os.rmdir(self.lock_file) - - def is_locked(self): - return os.path.exists(self.lock_file) - - def i_am_locking(self): - return (self.is_locked() and - os.path.exists(self.unique_name)) - - def break_lock(self): - if os.path.exists(self.lock_file): - for name in os.listdir(self.lock_file): - os.unlink(os.path.join(self.lock_file, name)) - os.rmdir(self.lock_file) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py deleted file mode 100644 index 069e85b1..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- - -# pidlockfile.py -# -# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au> -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Python Software Foundation License, version 2 or -# later as published by the Python Software Foundation. -# No warranty expressed or implied. See the file LICENSE.PSF-2 for details. - -""" Lockfile behaviour implemented via Unix PID files. - """ - -from __future__ import absolute_import - -import errno -import os -import time - -from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, - LockTimeout) - - -class PIDLockFile(LockBase): - """ Lockfile implemented as a Unix PID file. - - The lock file is a normal file named by the attribute `path`. - A lock's PID file contains a single line of text, containing - the process ID (PID) of the process that acquired the lock. - - >>> lock = PIDLockFile('somefile') - >>> lock = PIDLockFile('somefile') - """ - - def __init__(self, path, threaded=False, timeout=None): - # pid lockfiles don't support threaded operation, so always force - # False as the threaded arg. - LockBase.__init__(self, path, False, timeout) - self.unique_name = self.path - - def read_pid(self): - """ Get the PID from the lock file. - """ - return read_pid_from_pidfile(self.path) - - def is_locked(self): - """ Test if the lock is currently held. - - The lock is held if the PID file for this lock exists. - - """ - return os.path.exists(self.path) - - def i_am_locking(self): - """ Test if the lock is held by the current process. - - Returns ``True`` if the current process ID matches the - number stored in the PID file. - """ - return self.is_locked() and os.getpid() == self.read_pid() - - def acquire(self, timeout=None): - """ Acquire the lock. - - Creates the PID file for this lock, or raises an error if - the lock could not be acquired. - """ - - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - try: - write_pid_to_pidfile(self.path) - except OSError as exc: - if exc.errno == errno.EEXIST: - # The lock creation failed. Maybe sleep a bit. - if time.time() > end_time: - if timeout is not None and timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout is not None and timeout / 10 or 0.1) - else: - raise LockFailed("failed to create %s" % self.path) - else: - return - - def release(self): - """ Release the lock. - - Removes the PID file to release the lock, or raises an - error if the current process does not hold the lock. - - """ - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - remove_existing_pidfile(self.path) - - def break_lock(self): - """ Break an existing lock. - - Removes the PID file if it already exists, otherwise does - nothing. - - """ - remove_existing_pidfile(self.path) - - -def read_pid_from_pidfile(pidfile_path): - """ Read the PID recorded in the named PID file. - - Read and return the numeric PID recorded as text in the named - PID file. If the PID file cannot be read, or if the content is - not a valid PID, return ``None``. - - """ - pid = None - try: - pidfile = open(pidfile_path, 'r') - except IOError: - pass - else: - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. - # - # Programs that read PID files should be somewhat flexible - # in what they accept; i.e., they should ignore extra - # whitespace, leading zeroes, absence of the trailing - # newline, or additional lines in the PID file. - - line = pidfile.readline().strip() - try: - pid = int(line) - except ValueError: - pass - pidfile.close() - - return pid - - -def write_pid_to_pidfile(pidfile_path): - """ Write the PID in the named PID file. - - Get the numeric process ID (“PID”) of the current process - and write it to the named file as a line of text. - - """ - open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) - open_mode = 0o644 - pidfile_fd = os.open(pidfile_path, open_flags, open_mode) - pidfile = os.fdopen(pidfile_fd, 'w') - - # According to the FHS 2.3 section on PID files in /var/run: - # - # The file must consist of the process identifier in - # ASCII-encoded decimal, followed by a newline character. For - # example, if crond was process number 25, /var/run/crond.pid - # would contain three characters: two, five, and newline. - - pid = os.getpid() - pidfile.write("%s\n" % pid) - pidfile.close() - - -def remove_existing_pidfile(pidfile_path): - """ Remove the named PID file if it exists. - - Removing a PID file that doesn't already exist puts us in the - desired state, so we ignore the condition if the file does not - exist. - - """ - try: - os.remove(pidfile_path) - except OSError as exc: - if exc.errno == errno.ENOENT: - pass - else: - raise diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py deleted file mode 100644 index f997e244..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import absolute_import, division - -import time -import os - -try: - unicode -except NameError: - unicode = str - -from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked - - -class SQLiteLockFile(LockBase): - "Demonstrate SQL-based locking." - - testdb = None - - def __init__(self, path, threaded=True, timeout=None): - """ - >>> lock = SQLiteLockFile('somefile') - >>> lock = SQLiteLockFile('somefile', threaded=False) - """ - LockBase.__init__(self, path, threaded, timeout) - self.lock_file = unicode(self.lock_file) - self.unique_name = unicode(self.unique_name) - - if SQLiteLockFile.testdb is None: - import tempfile - _fd, testdb = tempfile.mkstemp() - os.close(_fd) - os.unlink(testdb) - del _fd, tempfile - SQLiteLockFile.testdb = testdb - - import sqlite3 - self.connection = sqlite3.connect(SQLiteLockFile.testdb) - - c = self.connection.cursor() - try: - c.execute("create table locks" - "(" - " lock_file varchar(32)," - " unique_name varchar(32)" - ")") - except sqlite3.OperationalError: - pass - else: - self.connection.commit() - import atexit - atexit.register(os.unlink, SQLiteLockFile.testdb) - - def acquire(self, timeout=None): - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - if timeout is None: - wait = 0.1 - elif timeout <= 0: - wait = 0 - else: - wait = timeout / 10 - - cursor = self.connection.cursor() - - while True: - if not self.is_locked(): - # Not locked. Try to lock it. - cursor.execute("insert into locks" - " (lock_file, unique_name)" - " values" - " (?, ?)", - (self.lock_file, self.unique_name)) - self.connection.commit() - - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) > 1: - # Nope. Someone else got there. Remove our lock. - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - else: - # Yup. We're done, so go home. - return - else: - # Check to see if we are the only lock holder. - cursor.execute("select * from locks" - " where unique_name = ?", - (self.unique_name,)) - rows = cursor.fetchall() - if len(rows) == 1: - # We're the locker, so go home. - return - - # Maybe we should wait a bit longer. - if timeout is not None and time.time() > end_time: - if timeout > 0: - # No more waiting. - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - # Someone else has the lock and we are impatient.. - raise AlreadyLocked("%s is already locked" % self.path) - - # Well, okay. We'll give it a bit longer. - time.sleep(wait) - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - if not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me (by %s)" % - (self.unique_name, self._who_is_locking())) - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where unique_name = ?", - (self.unique_name,)) - self.connection.commit() - - def _who_is_locking(self): - cursor = self.connection.cursor() - cursor.execute("select unique_name from locks" - " where lock_file = ?", - (self.lock_file,)) - return cursor.fetchone()[0] - - def is_locked(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?", - (self.lock_file,)) - rows = cursor.fetchall() - return not not rows - - def i_am_locking(self): - cursor = self.connection.cursor() - cursor.execute("select * from locks" - " where lock_file = ?" - " and unique_name = ?", - (self.lock_file, self.unique_name)) - return not not cursor.fetchall() - - def break_lock(self): - cursor = self.connection.cursor() - cursor.execute("delete from locks" - " where lock_file = ?", - (self.lock_file,)) - self.connection.commit() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py deleted file mode 100644 index 23b41f58..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py +++ /dev/null @@ -1,70 +0,0 @@ -from __future__ import absolute_import - -import os -import time - -from . import (LockBase, NotLocked, NotMyLock, LockTimeout, - AlreadyLocked) - - -class SymlinkLockFile(LockBase): - """Lock access to a file using symlink(2).""" - - def __init__(self, path, threaded=True, timeout=None): - # super(SymlinkLockFile).__init(...) - LockBase.__init__(self, path, threaded, timeout) - # split it back! - self.unique_name = os.path.split(self.unique_name)[1] - - def acquire(self, timeout=None): - # Hopefully unnecessary for symlink. - # try: - # open(self.unique_name, "wb").close() - # except IOError: - # raise LockFailed("failed to create %s" % self.unique_name) - timeout = timeout if timeout is not None else self.timeout - end_time = time.time() - if timeout is not None and timeout > 0: - end_time += timeout - - while True: - # Try and create a symbolic link to it. - try: - os.symlink(self.unique_name, self.lock_file) - except OSError: - # Link creation failed. Maybe we've double-locked? - if self.i_am_locking(): - # Linked to out unique name. Proceed. - return - else: - # Otherwise the lock creation failed. - if timeout is not None and time.time() > end_time: - if timeout > 0: - raise LockTimeout("Timeout waiting to acquire" - " lock for %s" % - self.path) - else: - raise AlreadyLocked("%s is already locked" % - self.path) - time.sleep(timeout / 10 if timeout is not None else 0.1) - else: - # Link creation succeeded. We're good to go. - return - - def release(self): - if not self.is_locked(): - raise NotLocked("%s is not locked" % self.path) - elif not self.i_am_locking(): - raise NotMyLock("%s is locked, but not by me" % self.path) - os.unlink(self.lock_file) - - def is_locked(self): - return os.path.islink(self.lock_file) - - def i_am_locking(self): - return (os.path.islink(self.lock_file) - and os.readlink(self.lock_file) == self.unique_name) - - def break_lock(self): - if os.path.islink(self.lock_file): # exists && link - os.unlink(self.lock_file) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/__init__.py index 2afca5ad..d6705e22 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/__init__.py @@ -1,31 +1,19 @@ # coding: utf-8 -from pip._vendor.msgpack._version import version -from pip._vendor.msgpack.exceptions import * +from ._version import version +from .exceptions import * +from .ext import ExtType, Timestamp -from collections import namedtuple - - -class ExtType(namedtuple('ExtType', 'code data')): - """ExtType represents ext type in msgpack.""" - def __new__(cls, code, data): - if not isinstance(code, int): - raise TypeError("code must be int") - if not isinstance(data, bytes): - raise TypeError("data must be bytes") - if not 0 <= code <= 127: - raise ValueError("code must be 0~127") - return super(ExtType, cls).__new__(cls, code, data) +import os +import sys -import os -if os.environ.get('MSGPACK_PUREPYTHON'): - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker +if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: + from .fallback import Packer, unpackb, Unpacker else: try: - from pip._vendor.msgpack._packer import Packer - from pip._vendor.msgpack._unpacker import unpackb, Unpacker + from ._cmsgpack import Packer, unpackb, Unpacker except ImportError: - from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker + from .fallback import Packer, unpackb, Unpacker def pack(o, stream, **kwargs): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/_version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/_version.py index d28f0deb..9f55cf50 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/_version.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/_version.py @@ -1 +1 @@ -version = (0, 5, 6) +version = (1, 0, 0) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/exceptions.py index 97668814..d6d2615c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/exceptions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/exceptions.py @@ -1,5 +1,10 @@ class UnpackException(Exception): - """Deprecated. Use Exception instead to catch all exception during unpacking.""" + """Base class for some exceptions raised while unpacking. + + NOTE: unpack may raise exception other than subclass of + UnpackException. If you want to catch all error, catch + Exception instead. + """ class BufferFull(UnpackException): @@ -10,32 +15,34 @@ class OutOfData(UnpackException): pass -class UnpackValueError(UnpackException, ValueError): - """Deprecated. Use ValueError instead.""" +class FormatError(ValueError, UnpackException): + """Invalid msgpack format""" -class ExtraData(UnpackValueError): - def __init__(self, unpacked, extra): - self.unpacked = unpacked - self.extra = extra - - def __str__(self): - return "unpack(b) received extra data." +class StackError(ValueError, UnpackException): + """Too nested""" -class PackException(Exception): - """Deprecated. Use Exception instead to catch all exception during packing.""" +# Deprecated. Use ValueError instead +UnpackValueError = ValueError -class PackValueError(PackException, ValueError): - """PackValueError is raised when type of input data is supported but it's value is unsupported. +class ExtraData(UnpackValueError): + """ExtraData is raised when there is trailing data. - Deprecated. Use ValueError instead. + This exception is raised while only one-shot (not streaming) + unpack. """ + def __init__(self, unpacked, extra): + self.unpacked = unpacked + self.extra = extra + + def __str__(self): + return "unpack(b) received extra data." -class PackOverflowError(PackValueError, OverflowError): - """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). - Deprecated. Use ValueError instead. - """ +# Deprecated. Use Exception instead to catch all exception during packing. +PackException = Exception +PackValueError = ValueError +PackOverflowError = OverflowError diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/ext.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/ext.py new file mode 100644 index 00000000..8341c68b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/ext.py @@ -0,0 +1,191 @@ +# coding: utf-8 +from collections import namedtuple +import datetime +import sys +import struct + + +PY2 = sys.version_info[0] == 2 + +if PY2: + int_types = (int, long) + _utc = None +else: + int_types = int + try: + _utc = datetime.timezone.utc + except AttributeError: + _utc = datetime.timezone(datetime.timedelta(0)) + + +class ExtType(namedtuple("ExtType", "code data")): + """ExtType represents ext type in msgpack.""" + + def __new__(cls, code, data): + if not isinstance(code, int): + raise TypeError("code must be int") + if not isinstance(data, bytes): + raise TypeError("data must be bytes") + if not 0 <= code <= 127: + raise ValueError("code must be 0~127") + return super(ExtType, cls).__new__(cls, code, data) + + +class Timestamp(object): + """Timestamp represents the Timestamp extension type in msgpack. + + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python + msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`. + + This class is immutable: Do not override seconds and nanoseconds. + """ + + __slots__ = ["seconds", "nanoseconds"] + + def __init__(self, seconds, nanoseconds=0): + """Initialize a Timestamp object. + + :param int seconds: + Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds). + May be negative. + + :param int nanoseconds: + Number of nanoseconds to add to `seconds` to get fractional time. + Maximum is 999_999_999. Default is 0. + + Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. + """ + if not isinstance(seconds, int_types): + raise TypeError("seconds must be an interger") + if not isinstance(nanoseconds, int_types): + raise TypeError("nanoseconds must be an integer") + if not (0 <= nanoseconds < 10 ** 9): + raise ValueError( + "nanoseconds must be a non-negative integer less than 999999999." + ) + self.seconds = seconds + self.nanoseconds = nanoseconds + + def __repr__(self): + """String representation of Timestamp.""" + return "Timestamp(seconds={0}, nanoseconds={1})".format( + self.seconds, self.nanoseconds + ) + + def __eq__(self, other): + """Check for equality with another Timestamp object""" + if type(other) is self.__class__: + return ( + self.seconds == other.seconds and self.nanoseconds == other.nanoseconds + ) + return False + + def __ne__(self, other): + """not-equals method (see :func:`__eq__()`)""" + return not self.__eq__(other) + + def __hash__(self): + return hash((self.seconds, self.nanoseconds)) + + @staticmethod + def from_bytes(b): + """Unpack bytes into a `Timestamp` object. + + Used for pure-Python msgpack unpacking. + + :param b: Payload from msgpack ext message with code -1 + :type b: bytes + + :returns: Timestamp object unpacked from msgpack ext payload + :rtype: Timestamp + """ + if len(b) == 4: + seconds = struct.unpack("!L", b)[0] + nanoseconds = 0 + elif len(b) == 8: + data64 = struct.unpack("!Q", b)[0] + seconds = data64 & 0x00000003FFFFFFFF + nanoseconds = data64 >> 34 + elif len(b) == 12: + nanoseconds, seconds = struct.unpack("!Iq", b) + else: + raise ValueError( + "Timestamp type can only be created from 32, 64, or 96-bit byte objects" + ) + return Timestamp(seconds, nanoseconds) + + def to_bytes(self): + """Pack this Timestamp object into bytes. + + Used for pure-Python msgpack packing. + + :returns data: Payload for EXT message with code -1 (timestamp type) + :rtype: bytes + """ + if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits + data64 = self.nanoseconds << 34 | self.seconds + if data64 & 0xFFFFFFFF00000000 == 0: + # nanoseconds is zero and seconds < 2**32, so timestamp 32 + data = struct.pack("!L", data64) + else: + # timestamp 64 + data = struct.pack("!Q", data64) + else: + # timestamp 96 + data = struct.pack("!Iq", self.nanoseconds, self.seconds) + return data + + @staticmethod + def from_unix(unix_sec): + """Create a Timestamp from posix timestamp in seconds. + + :param unix_float: Posix timestamp in seconds. + :type unix_float: int or float. + """ + seconds = int(unix_sec // 1) + nanoseconds = int((unix_sec % 1) * 10 ** 9) + return Timestamp(seconds, nanoseconds) + + def to_unix(self): + """Get the timestamp as a floating-point value. + + :returns: posix timestamp + :rtype: float + """ + return self.seconds + self.nanoseconds / 1e9 + + @staticmethod + def from_unix_nano(unix_ns): + """Create a Timestamp from posix timestamp in nanoseconds. + + :param int unix_ns: Posix timestamp in nanoseconds. + :rtype: Timestamp + """ + return Timestamp(*divmod(unix_ns, 10 ** 9)) + + def to_unix_nano(self): + """Get the timestamp as a unixtime in nanoseconds. + + :returns: posix timestamp in nanoseconds + :rtype: int + """ + return self.seconds * 10 ** 9 + self.nanoseconds + + def to_datetime(self): + """Get the timestamp as a UTC datetime. + + Python 2 is not supported. + + :rtype: datetime. + """ + return datetime.datetime.fromtimestamp(self.to_unix(), _utc) + + @staticmethod + def from_datetime(dt): + """Create a Timestamp from datetime with tzinfo. + + Python 2 is not supported. + + :rtype: Timestamp + """ + return Timestamp.from_unix(dt.timestamp()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/fallback.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/fallback.py index 94184218..9f6665b3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/fallback.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/msgpack/fallback.py @@ -1,76 +1,98 @@ """Fallback pure Python implementation of msgpack""" +from datetime import datetime as _DateTime import sys import struct -import warnings -if sys.version_info[0] == 3: - PY3 = True + +PY2 = sys.version_info[0] == 2 +if PY2: + int_types = (int, long) + + def dict_iteritems(d): + return d.iteritems() + + +else: int_types = int - Unicode = str + unicode = str xrange = range + def dict_iteritems(d): return d.items() + + +if sys.version_info < (3, 5): + # Ugly hack... + RecursionError = RuntimeError + + def _is_recursionerror(e): + return ( + len(e.args) == 1 + and isinstance(e.args[0], str) + and e.args[0].startswith("maximum recursion depth exceeded") + ) + + else: - PY3 = False - int_types = (int, long) - Unicode = unicode - def dict_iteritems(d): - return d.iteritems() + + def _is_recursionerror(e): + return True -if hasattr(sys, 'pypy_version_info'): - # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own +if hasattr(sys, "pypy_version_info"): + # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own # StringBuilder is fastest. from __pypy__ import newlist_hint + try: from __pypy__.builders import BytesBuilder as StringBuilder except ImportError: from __pypy__.builders import StringBuilder USING_STRINGBUILDER = True + class StringIO(object): - def __init__(self, s=b''): + def __init__(self, s=b""): if s: self.builder = StringBuilder(len(s)) self.builder.append(s) else: self.builder = StringBuilder() + def write(self, s): if isinstance(s, memoryview): s = s.tobytes() elif isinstance(s, bytearray): s = bytes(s) self.builder.append(s) + def getvalue(self): return self.builder.build() + + else: USING_STRINGBUILDER = False from io import BytesIO as StringIO + newlist_hint = lambda size: [] -from pip._vendor.msgpack.exceptions import ( - BufferFull, - OutOfData, - UnpackValueError, - PackValueError, - PackOverflowError, - ExtraData) +from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError -from pip._vendor.msgpack import ExtType +from .ext import ExtType, Timestamp -EX_SKIP = 0 -EX_CONSTRUCT = 1 -EX_READ_ARRAY_HEADER = 2 -EX_READ_MAP_HEADER = 3 +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 -TYPE_IMMEDIATE = 0 -TYPE_ARRAY = 1 -TYPE_MAP = 2 -TYPE_RAW = 3 -TYPE_BIN = 4 -TYPE_EXT = 5 +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 DEFAULT_RECURSE_LIMIT = 511 @@ -83,53 +105,54 @@ def _check_type_strict(obj, t, type=type, tuple=tuple): def _get_data_from_buffer(obj): - try: - view = memoryview(obj) - except TypeError: - # try to use legacy buffer protocol if 2.7, otherwise re-raise - if not PY3: - view = memoryview(buffer(obj)) - warnings.warn("using old buffer interface to unpack %s; " - "this leads to unpacking errors if slicing is used and " - "will be removed in a future version" % type(obj), - RuntimeWarning) - else: - raise + view = memoryview(obj) if view.itemsize != 1: raise ValueError("cannot unpack from multi-byte object") return view -def unpack(stream, **kwargs): - warnings.warn( - "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", - PendingDeprecationWarning) - data = stream.read() - return unpackb(data, **kwargs) - - def unpackb(packed, **kwargs): """ Unpack an object from `packed`. - Raises `ExtraData` when `packed` contains extra bytes. + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``ValueError`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + See :class:`Unpacker` for options. """ - unpacker = Unpacker(None, **kwargs) + unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) unpacker.feed(packed) try: ret = unpacker._unpack() except OutOfData: - raise UnpackValueError("Data is not enough.") + raise ValueError("Unpack failed: incomplete input") + except RecursionError as e: + if _is_recursionerror(e): + raise StackError + raise if unpacker._got_extradata(): raise ExtraData(ret, unpacker._get_extradata()) return ret +if sys.version_info < (2, 7, 6): + + def _unpack_from(f, b, o=0): + """Explicit type cast for legacy struct.unpack_from""" + return struct.unpack_from(f, bytes(b), o) + + +else: + _unpack_from = struct.unpack_from + + class Unpacker(object): """Streaming unpacker. - arguments: + Arguments: :param file_like: File-like object having `.read(n)` method. @@ -143,14 +166,19 @@ class Unpacker(object): Otherwise, unpack to Python tuple. (default: True) :param bool raw: - If true, unpack msgpack raw to Python bytes (default). - Otherwise, unpack to Python str (or unicode on Python 2) by decoding - with UTF-8 encoding (recommended). - Currently, the default is true, but it will be changed to false in - near future. So you must specify it explicitly for keeping backward - compatibility. + If true, unpack msgpack raw to Python bytes. + Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). + + :param int timestamp: + Control how timestamp type is unpacked: + + 0 - Timestamp + 1 - float (Seconds from the EPOCH) + 2 - int (Nanoseconds from the EPOCH) + 3 - datetime.datetime (UTC). Python 2 is not supported. - *encoding* option which is deprecated overrides this option. + :param bool strict_map_key: + If true (default), only str or bytes are accepted for map (dict) keys. :param callable object_hook: When specified, it should be callable. @@ -162,41 +190,46 @@ class Unpacker(object): Unpacker calls it with a list of key-value pairs after unpacking msgpack map. (See also simplejson) - :param str encoding: - Encoding used for decoding msgpack raw. - If it is None (default), msgpack raw is deserialized to Python bytes. - :param str unicode_errors: - (deprecated) Used for decoding msgpack raw with *encoding*. - (default: `'strict'`) + The error handler for decoding unicode. (default: 'strict') + This option should be used only when you have msgpack data which + contains invalid UTF-8 string. :param int max_buffer_size: - Limits size of data waiting unpacked. 0 means system's INT_MAX (default). + Limits size of data waiting unpacked. 0 means 2**32-1. + The default value is 100*1024*1024 (100MiB). Raises `BufferFull` exception when it is insufficient. You should set this parameter when unpacking data from untrusted source. :param int max_str_len: - Limits max length of str. (default: 2**31-1) + Deprecated, use *max_buffer_size* instead. + Limits max length of str. (default: max_buffer_size) :param int max_bin_len: - Limits max length of bin. (default: 2**31-1) + Deprecated, use *max_buffer_size* instead. + Limits max length of bin. (default: max_buffer_size) :param int max_array_len: - Limits max length of array. (default: 2**31-1) + Limits max length of array. + (default: max_buffer_size) :param int max_map_len: - Limits max length of map. (default: 2**31-1) + Limits max length of map. + (default: max_buffer_size//2) + :param int max_ext_len: + Deprecated, use *max_buffer_size* instead. + Limits max size of ext type. (default: max_buffer_size) - example of streaming deserialize from file-like object:: + Example of streaming deserialize from file-like object:: - unpacker = Unpacker(file_like, raw=False) + unpacker = Unpacker(file_like) for o in unpacker: process(o) - example of streaming deserialize from socket:: + Example of streaming deserialize from socket:: - unpacker = Unpacker(raw=False) + unpacker = Unpacker(max_buffer_size) while True: buf = sock.recv(1024**2) if not buf: @@ -204,25 +237,36 @@ class Unpacker(object): unpacker.feed(buf) for o in unpacker: process(o) - """ - def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, - object_hook=None, object_pairs_hook=None, list_hook=None, - encoding=None, unicode_errors=None, max_buffer_size=0, - ext_hook=ExtType, - max_str_len=2147483647, # 2**32-1 - max_bin_len=2147483647, - max_array_len=2147483647, - max_map_len=2147483647, - max_ext_len=2147483647): - - if encoding is not None: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - PendingDeprecationWarning) + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + def __init__( + self, + file_like=None, + read_size=0, + use_list=True, + raw=False, + timestamp=0, + strict_map_key=True, + object_hook=None, + object_pairs_hook=None, + list_hook=None, + unicode_errors=None, + max_buffer_size=100 * 1024 * 1024, + ext_hook=ExtType, + max_str_len=-1, + max_bin_len=-1, + max_array_len=-1, + max_map_len=-1, + max_ext_len=-1, + ): if unicode_errors is None: - unicode_errors = 'strict' + unicode_errors = "strict" if file_like is None: self._feeding = True @@ -234,12 +278,6 @@ class Unpacker(object): #: array of bytes fed. self._buffer = bytearray() - # Some very old pythons don't support `struct.unpack_from()` with a - # `bytearray`. So we wrap it in a `buffer()` there. - if sys.version_info < (2, 7, 6): - self._buffer_view = buffer(self._buffer) - else: - self._buffer_view = self._buffer #: Which position we currently reads self._buff_i = 0 @@ -252,14 +290,30 @@ class Unpacker(object): # state, which _buf_checkpoint records. self._buf_checkpoint = 0 - self._max_buffer_size = max_buffer_size or 2**31-1 + if not max_buffer_size: + max_buffer_size = 2 ** 31 - 1 + if max_str_len == -1: + max_str_len = max_buffer_size + if max_bin_len == -1: + max_bin_len = max_buffer_size + if max_array_len == -1: + max_array_len = max_buffer_size + if max_map_len == -1: + max_map_len = max_buffer_size // 2 + if max_ext_len == -1: + max_ext_len = max_buffer_size + + self._max_buffer_size = max_buffer_size if read_size > self._max_buffer_size: raise ValueError("read_size must be smaller than max_buffer_size") - self._read_size = read_size or min(self._max_buffer_size, 16*1024) + self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) self._raw = bool(raw) - self._encoding = encoding + self._strict_map_key = bool(strict_map_key) self._unicode_errors = unicode_errors self._use_list = use_list + if not (0 <= timestamp <= 3): + raise ValueError("timestamp must be 0..3") + self._timestamp = timestamp self._list_hook = list_hook self._object_hook = object_hook self._object_pairs_hook = object_pairs_hook @@ -272,30 +326,32 @@ class Unpacker(object): self._stream_offset = 0 if list_hook is not None and not callable(list_hook): - raise TypeError('`list_hook` is not callable') + raise TypeError("`list_hook` is not callable") if object_hook is not None and not callable(object_hook): - raise TypeError('`object_hook` is not callable') + raise TypeError("`object_hook` is not callable") if object_pairs_hook is not None and not callable(object_pairs_hook): - raise TypeError('`object_pairs_hook` is not callable') + raise TypeError("`object_pairs_hook` is not callable") if object_hook is not None and object_pairs_hook is not None: - raise TypeError("object_pairs_hook and object_hook are mutually " - "exclusive") + raise TypeError( + "object_pairs_hook and object_hook are mutually " "exclusive" + ) if not callable(ext_hook): raise TypeError("`ext_hook` is not callable") def feed(self, next_bytes): assert self._feeding view = _get_data_from_buffer(next_bytes) - if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): + if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: raise BufferFull # Strip buffer before checkpoint before reading file. if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] + del self._buffer[: self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 - self._buffer += view + # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython + self._buffer.extend(view) def _consume(self): """ Gets rid of the used parts of the buffer. """ @@ -306,17 +362,19 @@ class Unpacker(object): return self._buff_i < len(self._buffer) def _get_extradata(self): - return self._buffer[self._buff_i:] + return self._buffer[self._buff_i :] def read_bytes(self, n): - return self._read(n) + ret = self._read(n) + self._consume() + return ret def _read(self, n): # (int) -> bytearray self._reserve(n) i = self._buff_i - self._buff_i = i+n - return self._buffer[i:i+n] + self._buff_i = i + n + return self._buffer[i : i + n] def _reserve(self, n): remain_bytes = len(self._buffer) - self._buff_i - n @@ -331,7 +389,7 @@ class Unpacker(object): # Strip buffer before checkpoint before reading file. if self._buf_checkpoint > 0: - del self._buffer[:self._buf_checkpoint] + del self._buffer[: self._buf_checkpoint] self._buff_i -= self._buf_checkpoint self._buf_checkpoint = 0 @@ -360,206 +418,206 @@ class Unpacker(object): if b & 0b10000000 == 0: obj = b elif b & 0b11100000 == 0b11100000: - obj = -1 - (b ^ 0xff) + obj = -1 - (b ^ 0xFF) elif b & 0b11100000 == 0b10100000: n = b & 0b00011111 typ = TYPE_RAW if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) elif b & 0b11110000 == 0b10010000: n = b & 0b00001111 typ = TYPE_ARRAY if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) elif b & 0b11110000 == 0b10000000: n = b & 0b00001111 typ = TYPE_MAP if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) - elif b == 0xc0: + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + elif b == 0xC0: obj = None - elif b == 0xc2: + elif b == 0xC2: obj = False - elif b == 0xc3: + elif b == 0xC3: obj = True - elif b == 0xc4: + elif b == 0xC4: typ = TYPE_BIN self._reserve(1) n = self._buffer[self._buff_i] self._buff_i += 1 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc5: + elif b == 0xC5: typ = TYPE_BIN self._reserve(2) - n = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + n = _unpack_from(">H", self._buffer, self._buff_i)[0] self._buff_i += 2 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc6: + elif b == 0xC6: typ = TYPE_BIN self._reserve(4) - n = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + n = _unpack_from(">I", self._buffer, self._buff_i)[0] self._buff_i += 4 if n > self._max_bin_len: - raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) obj = self._read(n) - elif b == 0xc7: # ext 8 + elif b == 0xC7: # ext 8 typ = TYPE_EXT self._reserve(2) - L, n = struct.unpack_from('Bb', self._buffer_view, self._buff_i) + L, n = _unpack_from("Bb", self._buffer, self._buff_i) self._buff_i += 2 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xc8: # ext 16 + elif b == 0xC8: # ext 16 typ = TYPE_EXT self._reserve(3) - L, n = struct.unpack_from('>Hb', self._buffer_view, self._buff_i) + L, n = _unpack_from(">Hb", self._buffer, self._buff_i) self._buff_i += 3 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xc9: # ext 32 + elif b == 0xC9: # ext 32 typ = TYPE_EXT self._reserve(5) - L, n = struct.unpack_from('>Ib', self._buffer_view, self._buff_i) + L, n = _unpack_from(">Ib", self._buffer, self._buff_i) self._buff_i += 5 if L > self._max_ext_len: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) obj = self._read(L) - elif b == 0xca: + elif b == 0xCA: self._reserve(4) - obj = struct.unpack_from(">f", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">f", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xcb: + elif b == 0xCB: self._reserve(8) - obj = struct.unpack_from(">d", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">d", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xcc: + elif b == 0xCC: self._reserve(1) obj = self._buffer[self._buff_i] self._buff_i += 1 - elif b == 0xcd: + elif b == 0xCD: self._reserve(2) - obj = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">H", self._buffer, self._buff_i)[0] self._buff_i += 2 - elif b == 0xce: + elif b == 0xCE: self._reserve(4) - obj = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">I", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xcf: + elif b == 0xCF: self._reserve(8) - obj = struct.unpack_from(">Q", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xd0: + elif b == 0xD0: self._reserve(1) - obj = struct.unpack_from("b", self._buffer_view, self._buff_i)[0] + obj = _unpack_from("b", self._buffer, self._buff_i)[0] self._buff_i += 1 - elif b == 0xd1: + elif b == 0xD1: self._reserve(2) - obj = struct.unpack_from(">h", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">h", self._buffer, self._buff_i)[0] self._buff_i += 2 - elif b == 0xd2: + elif b == 0xD2: self._reserve(4) - obj = struct.unpack_from(">i", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">i", self._buffer, self._buff_i)[0] self._buff_i += 4 - elif b == 0xd3: + elif b == 0xD3: self._reserve(8) - obj = struct.unpack_from(">q", self._buffer_view, self._buff_i)[0] + obj = _unpack_from(">q", self._buffer, self._buff_i)[0] self._buff_i += 8 - elif b == 0xd4: # fixext 1 + elif b == 0xD4: # fixext 1 typ = TYPE_EXT if self._max_ext_len < 1: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) self._reserve(2) - n, obj = struct.unpack_from("b1s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b1s", self._buffer, self._buff_i) self._buff_i += 2 - elif b == 0xd5: # fixext 2 + elif b == 0xD5: # fixext 2 typ = TYPE_EXT if self._max_ext_len < 2: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) self._reserve(3) - n, obj = struct.unpack_from("b2s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b2s", self._buffer, self._buff_i) self._buff_i += 3 - elif b == 0xd6: # fixext 4 + elif b == 0xD6: # fixext 4 typ = TYPE_EXT if self._max_ext_len < 4: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) self._reserve(5) - n, obj = struct.unpack_from("b4s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b4s", self._buffer, self._buff_i) self._buff_i += 5 - elif b == 0xd7: # fixext 8 + elif b == 0xD7: # fixext 8 typ = TYPE_EXT if self._max_ext_len < 8: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) self._reserve(9) - n, obj = struct.unpack_from("b8s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b8s", self._buffer, self._buff_i) self._buff_i += 9 - elif b == 0xd8: # fixext 16 + elif b == 0xD8: # fixext 16 typ = TYPE_EXT if self._max_ext_len < 16: - raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) + raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) self._reserve(17) - n, obj = struct.unpack_from("b16s", self._buffer_view, self._buff_i) + n, obj = _unpack_from("b16s", self._buffer, self._buff_i) self._buff_i += 17 - elif b == 0xd9: + elif b == 0xD9: typ = TYPE_RAW self._reserve(1) n = self._buffer[self._buff_i] self._buff_i += 1 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xda: + elif b == 0xDA: typ = TYPE_RAW self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xdb: + elif b == 0xDB: typ = TYPE_RAW self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_str_len: - raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) + raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) obj = self._read(n) - elif b == 0xdc: + elif b == 0xDC: typ = TYPE_ARRAY self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xdd: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDD: typ = TYPE_ARRAY self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_array_len: - raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) - elif b == 0xde: + raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) + elif b == 0xDE: self._reserve(2) - n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">H", self._buffer, self._buff_i) self._buff_i += 2 if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) typ = TYPE_MAP - elif b == 0xdf: + elif b == 0xDF: self._reserve(4) - n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) + (n,) = _unpack_from(">I", self._buffer, self._buff_i) self._buff_i += 4 if n > self._max_map_len: - raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) + raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) typ = TYPE_MAP else: - raise UnpackValueError("Unknown header: 0x%x" % b) + raise FormatError("Unknown header: 0x%x" % b) return typ, n, obj def _unpack(self, execute=EX_CONSTRUCT): @@ -567,11 +625,11 @@ class Unpacker(object): if execute == EX_READ_ARRAY_HEADER: if typ != TYPE_ARRAY: - raise UnpackValueError("Expected array") + raise ValueError("Expected array") return n if execute == EX_READ_MAP_HEADER: if typ != TYPE_MAP: - raise UnpackValueError("Expected map") + raise ValueError("Expected map") return n # TODO should we eliminate the recursion? if typ == TYPE_ARRAY: @@ -596,13 +654,19 @@ class Unpacker(object): return if self._object_pairs_hook is not None: ret = self._object_pairs_hook( - (self._unpack(EX_CONSTRUCT), - self._unpack(EX_CONSTRUCT)) - for _ in xrange(n)) + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) + for _ in xrange(n) + ) else: ret = {} for _ in xrange(n): key = self._unpack(EX_CONSTRUCT) + if self._strict_map_key and type(key) not in (unicode, bytes): + raise ValueError( + "%s is not allowed for map key" % str(type(key)) + ) + if not PY2 and type(key) is str: + key = sys.intern(key) ret[key] = self._unpack(EX_CONSTRUCT) if self._object_hook is not None: ret = self._object_hook(ret) @@ -610,17 +674,26 @@ class Unpacker(object): if execute == EX_SKIP: return if typ == TYPE_RAW: - if self._encoding is not None: - obj = obj.decode(self._encoding, self._unicode_errors) - elif self._raw: + if self._raw: obj = bytes(obj) else: - obj = obj.decode('utf_8') + obj = obj.decode("utf_8", self._unicode_errors) return obj - if typ == TYPE_EXT: - return self._ext_hook(n, bytes(obj)) if typ == TYPE_BIN: return bytes(obj) + if typ == TYPE_EXT: + if n == -1: # timestamp + ts = Timestamp.from_bytes(bytes(obj)) + if self._timestamp == 1: + return ts.to_unix() + elif self._timestamp == 2: + return ts.to_unix_nano() + elif self._timestamp == 3: + return ts.to_datetime() + else: + return ts + else: + return self._ext_hook(n, bytes(obj)) assert typ == TYPE_IMMEDIATE return obj @@ -635,37 +708,30 @@ class Unpacker(object): except OutOfData: self._consume() raise StopIteration + except RecursionError: + raise StackError next = __next__ - def skip(self, write_bytes=None): + def skip(self): self._unpack(EX_SKIP) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() - def unpack(self, write_bytes=None): - ret = self._unpack(EX_CONSTRUCT) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) + def unpack(self): + try: + ret = self._unpack(EX_CONSTRUCT) + except RecursionError: + raise StackError self._consume() return ret - def read_array_header(self, write_bytes=None): + def read_array_header(self): ret = self._unpack(EX_READ_ARRAY_HEADER) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() return ret - def read_map_header(self, write_bytes=None): + def read_map_header(self): ret = self._unpack(EX_READ_MAP_HEADER) - if write_bytes is not None: - warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) - write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) self._consume() return ret @@ -677,7 +743,7 @@ class Packer(object): """ MessagePack Packer - usage: + Usage: packer = Packer() astream.write(packer.pack(a)) @@ -698,49 +764,58 @@ class Packer(object): :param bool use_bin_type: Use bin type introduced in msgpack spec 2.0 for bytes. - It also enables str8 type for unicode. + It also enables str8 type for unicode. (default: True) :param bool strict_types: If set to true, types will be checked to be exact. Derived classes - from serializeable types will not be serialized and will be + from serializable types will not be serialized and will be treated as unsupported type and forwarded to default. Additionally tuples will not be serialized as lists. This is useful when trying to implement accurate serialization for python types. - :param str encoding: - (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') + :param bool datetime: + If set to true, datetime with tzinfo is packed into Timestamp type. + Note that the tzinfo is stripped in the timestamp. + You can get UTC datetime with `timestamp=3` option of the Unpacker. + (Python 2 is not supported). :param str unicode_errors: - Error handler for encoding unicode. (default: 'strict') + The error handler for encoding unicode. (default: 'strict') + DO NOT USE THIS!! This option is kept for very specific usage. """ - def __init__(self, default=None, encoding=None, unicode_errors=None, - use_single_float=False, autoreset=True, use_bin_type=False, - strict_types=False): - if encoding is None: - encoding = 'utf_8' - else: - warnings.warn( - "encoding is deprecated, Use raw=False instead.", - PendingDeprecationWarning) - - if unicode_errors is None: - unicode_errors = 'strict' + def __init__( + self, + default=None, + use_single_float=False, + autoreset=True, + use_bin_type=True, + strict_types=False, + datetime=False, + unicode_errors=None, + ): self._strict_types = strict_types self._use_float = use_single_float self._autoreset = autoreset self._use_bin_type = use_bin_type - self._encoding = encoding - self._unicode_errors = unicode_errors self._buffer = StringIO() + if PY2 and datetime: + raise ValueError("datetime is not supported in Python 2") + self._datetime = bool(datetime) + self._unicode_errors = unicode_errors or "strict" if default is not None: if not callable(default): raise TypeError("default must be callable") self._default = default - def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, - check=isinstance, check_type_strict=_check_type_strict): + def _pack( + self, + obj, + nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, + check_type_strict=_check_type_strict, + ): default_used = False if self._strict_types: check = check_type_strict @@ -749,7 +824,7 @@ class Packer(object): list_types = (list, tuple) while True: if nest_limit < 0: - raise PackValueError("recursion limit exceeded") + raise ValueError("recursion limit exceeded") if obj is None: return self._buffer.write(b"\xc0") if check(obj, bool): @@ -761,76 +836,76 @@ class Packer(object): return self._buffer.write(struct.pack("B", obj)) if -0x20 <= obj < 0: return self._buffer.write(struct.pack("b", obj)) - if 0x80 <= obj <= 0xff: - return self._buffer.write(struct.pack("BB", 0xcc, obj)) + if 0x80 <= obj <= 0xFF: + return self._buffer.write(struct.pack("BB", 0xCC, obj)) if -0x80 <= obj < 0: - return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) - if 0xff < obj <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xcd, obj)) + return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) + if 0xFF < obj <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xCD, obj)) if -0x8000 <= obj < -0x80: - return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) - if 0xffff < obj <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xce, obj)) + return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) + if 0xFFFF < obj <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xCE, obj)) if -0x80000000 <= obj < -0x8000: - return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) - if 0xffffffff < obj <= 0xffffffffffffffff: - return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) + return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) + if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: + return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) if -0x8000000000000000 <= obj < -0x80000000: - return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) + return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) if not default_used and self._default is not None: obj = self._default(obj) default_used = True continue - raise PackOverflowError("Integer value out of range") + raise OverflowError("Integer value out of range") if check(obj, (bytes, bytearray)): n = len(obj) - if n >= 2**32: - raise PackValueError("%s is too large" % type(obj).__name__) + if n >= 2 ** 32: + raise ValueError("%s is too large" % type(obj).__name__) self._pack_bin_header(n) return self._buffer.write(obj) - if check(obj, Unicode): - if self._encoding is None: - raise TypeError( - "Can't encode unicode string: " - "no encoding is specified") - obj = obj.encode(self._encoding, self._unicode_errors) + if check(obj, unicode): + obj = obj.encode("utf-8", self._unicode_errors) n = len(obj) - if n >= 2**32: - raise PackValueError("String is too large") + if n >= 2 ** 32: + raise ValueError("String is too large") self._pack_raw_header(n) return self._buffer.write(obj) if check(obj, memoryview): n = len(obj) * obj.itemsize - if n >= 2**32: - raise PackValueError("Memoryview is too large") + if n >= 2 ** 32: + raise ValueError("Memoryview is too large") self._pack_bin_header(n) return self._buffer.write(obj) if check(obj, float): if self._use_float: - return self._buffer.write(struct.pack(">Bf", 0xca, obj)) - return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) - if check(obj, ExtType): - code = obj.code - data = obj.data + return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) + return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) + if check(obj, (ExtType, Timestamp)): + if check(obj, Timestamp): + code = -1 + data = obj.to_bytes() + else: + code = obj.code + data = obj.data assert isinstance(code, int) assert isinstance(data, bytes) L = len(data) if L == 1: - self._buffer.write(b'\xd4') + self._buffer.write(b"\xd4") elif L == 2: - self._buffer.write(b'\xd5') + self._buffer.write(b"\xd5") elif L == 4: - self._buffer.write(b'\xd6') + self._buffer.write(b"\xd6") elif L == 8: - self._buffer.write(b'\xd7') + self._buffer.write(b"\xd7") elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(struct.pack(">BB", 0xc7, L)) - elif L <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xc8, L)) + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xC7, L)) + elif L <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xC8, L)) else: - self._buffer.write(struct.pack(">BI", 0xc9, L)) + self._buffer.write(struct.pack(">BI", 0xC9, L)) self._buffer.write(struct.pack("b", code)) self._buffer.write(data) return @@ -841,13 +916,20 @@ class Packer(object): self._pack(obj[i], nest_limit - 1) return if check(obj, dict): - return self._pack_map_pairs(len(obj), dict_iteritems(obj), - nest_limit - 1) + return self._pack_map_pairs( + len(obj), dict_iteritems(obj), nest_limit - 1 + ) + + if self._datetime and check(obj, _DateTime): + obj = Timestamp.from_datetime(obj) + default_used = 1 + continue + if not default_used and self._default is not None: obj = self._default(obj) default_used = 1 continue - raise TypeError("Cannot serialize %r" % (obj, )) + raise TypeError("Cannot serialize %r" % (obj,)) def pack(self, obj): try: @@ -855,43 +937,35 @@ class Packer(object): except: self._buffer = StringIO() # force reset raise - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_map_pairs(self, pairs): self._pack_map_pairs(len(pairs), pairs) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_array_header(self, n): - if n >= 2**32: - raise PackValueError + if n >= 2 ** 32: + raise ValueError self._pack_array_header(n) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_map_header(self, n): - if n >= 2**32: - raise PackValueError + if n >= 2 ** 32: + raise ValueError self._pack_map_header(n) - ret = self._buffer.getvalue() if self._autoreset: + ret = self._buffer.getvalue() self._buffer = StringIO() - elif USING_STRINGBUILDER: - self._buffer = StringIO(ret) - return ret + return ret def pack_ext_type(self, typecode, data): if not isinstance(typecode, int): @@ -901,44 +975,44 @@ class Packer(object): if not isinstance(data, bytes): raise TypeError("data must have bytes type") L = len(data) - if L > 0xffffffff: - raise PackValueError("Too large data") + if L > 0xFFFFFFFF: + raise ValueError("Too large data") if L == 1: - self._buffer.write(b'\xd4') + self._buffer.write(b"\xd4") elif L == 2: - self._buffer.write(b'\xd5') + self._buffer.write(b"\xd5") elif L == 4: - self._buffer.write(b'\xd6') + self._buffer.write(b"\xd6") elif L == 8: - self._buffer.write(b'\xd7') + self._buffer.write(b"\xd7") elif L == 16: - self._buffer.write(b'\xd8') - elif L <= 0xff: - self._buffer.write(b'\xc7' + struct.pack('B', L)) - elif L <= 0xffff: - self._buffer.write(b'\xc8' + struct.pack('>H', L)) + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(b"\xc7" + struct.pack("B", L)) + elif L <= 0xFFFF: + self._buffer.write(b"\xc8" + struct.pack(">H", L)) else: - self._buffer.write(b'\xc9' + struct.pack('>I', L)) - self._buffer.write(struct.pack('B', typecode)) + self._buffer.write(b"\xc9" + struct.pack(">I", L)) + self._buffer.write(struct.pack("B", typecode)) self._buffer.write(data) def _pack_array_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x90 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xdc, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdd, n)) - raise PackValueError("Array is too large") + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x90 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDC, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDD, n)) + raise ValueError("Array is too large") def _pack_map_header(self, n): - if n <= 0x0f: - return self._buffer.write(struct.pack('B', 0x80 + n)) - if n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xde, n)) - if n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xdf, n)) - raise PackValueError("Dict is too large") + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x80 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDE, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDF, n)) + raise ValueError("Dict is too large") def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): self._pack_map_header(n) @@ -947,31 +1021,43 @@ class Packer(object): self._pack(v, nest_limit - 1) def _pack_raw_header(self, n): - if n <= 0x1f: - self._buffer.write(struct.pack('B', 0xa0 + n)) - elif self._use_bin_type and n <= 0xff: - self._buffer.write(struct.pack('>BB', 0xd9, n)) - elif n <= 0xffff: - self._buffer.write(struct.pack(">BH", 0xda, n)) - elif n <= 0xffffffff: - self._buffer.write(struct.pack(">BI", 0xdb, n)) + if n <= 0x1F: + self._buffer.write(struct.pack("B", 0xA0 + n)) + elif self._use_bin_type and n <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xD9, n)) + elif n <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xDA, n)) + elif n <= 0xFFFFFFFF: + self._buffer.write(struct.pack(">BI", 0xDB, n)) else: - raise PackValueError('Raw is too large') + raise ValueError("Raw is too large") def _pack_bin_header(self, n): if not self._use_bin_type: return self._pack_raw_header(n) - elif n <= 0xff: - return self._buffer.write(struct.pack('>BB', 0xc4, n)) - elif n <= 0xffff: - return self._buffer.write(struct.pack(">BH", 0xc5, n)) - elif n <= 0xffffffff: - return self._buffer.write(struct.pack(">BI", 0xc6, n)) + elif n <= 0xFF: + return self._buffer.write(struct.pack(">BB", 0xC4, n)) + elif n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xC5, n)) + elif n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xC6, n)) else: - raise PackValueError('Bin is too large') + raise ValueError("Bin is too large") def bytes(self): + """Return internal buffer contents as bytes object""" return self._buffer.getvalue() def reset(self): + """Reset internal buffer. + + This method is useful only when autoreset=False. + """ self._buffer = StringIO() + + def getbuffer(self): + """Return view of internal buffer.""" + if USING_STRINGBUILDER or PY2: + return memoryview(self.bytes()) + else: + return self._buffer.getbuffer() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/__about__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/__about__.py index 7481c9e2..4d998578 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/__about__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/__about__.py @@ -18,10 +18,10 @@ __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "19.0" +__version__ = "20.4" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" -__license__ = "BSD or Apache License, Version 2.0" +__license__ = "BSD-2-Clause or Apache-2.0" __copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_compat.py index 25da473c..e54bd4ed 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_compat.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_compat.py @@ -5,6 +5,11 @@ from __future__ import absolute_import, division, print_function import sys +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 @@ -18,14 +23,16 @@ else: def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any """ Create a base class with a metaclass. """ # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): + class metaclass(meta): # type: ignore def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any return meta(name, bases, d) return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_structures.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_structures.py index 68dcca63..800d5c55 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_structures.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_structures.py @@ -4,65 +4,83 @@ from __future__ import absolute_import, division, print_function -class Infinity(object): +class InfinityType(object): def __repr__(self): + # type: () -> str return "Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return False def __le__(self, other): + # type: (object) -> bool return False def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return True def __ge__(self, other): + # type: (object) -> bool return True def __neg__(self): + # type: (object) -> NegativeInfinityType return NegativeInfinity -Infinity = Infinity() +Infinity = InfinityType() -class NegativeInfinity(object): +class NegativeInfinityType(object): def __repr__(self): + # type: () -> str return "-Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return True def __le__(self, other): + # type: (object) -> bool return True def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return False def __ge__(self, other): + # type: (object) -> bool return False def __neg__(self): + # type: (object) -> InfinityType return Infinity -NegativeInfinity = NegativeInfinity() +NegativeInfinity = NegativeInfinityType() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_typing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_typing.py new file mode 100644 index 00000000..2846133b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/_typing.py @@ -0,0 +1,48 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from pip._vendor.packaging._typing import TYPE_CHECKING + + if TYPE_CHECKING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +__all__ = ["TYPE_CHECKING", "cast"] + +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast +else: + # executed at runtime + def cast(type_, value): # noqa + return value diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/markers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/markers.py index 54824768..ed642b01 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/markers.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/markers.py @@ -13,8 +13,14 @@ from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString from pip._vendor.pyparsing import Literal as L # noqa from ._compat import string_types +from ._typing import TYPE_CHECKING from .specifiers import Specifier, InvalidSpecifier +if TYPE_CHECKING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + __all__ = [ "InvalidMarker", @@ -46,30 +52,37 @@ class UndefinedEnvironmentName(ValueError): class Node(object): def __init__(self, value): + # type: (Any) -> None self.value = value def __str__(self): + # type: () -> str return str(self.value) def __repr__(self): + # type: () -> str return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) def serialize(self): + # type: () -> str raise NotImplementedError class Variable(Node): def serialize(self): + # type: () -> str return str(self) class Value(Node): def serialize(self): + # type: () -> str return '"{0}"'.format(self) class Op(Node): def serialize(self): + # type: () -> str return str(self) @@ -85,13 +98,13 @@ VARIABLE = ( | L("python_version") | L("sys_platform") | L("os_name") - | L("os.name") + | L("os.name") # PEP-345 | L("sys.platform") # PEP-345 | L("platform.version") # PEP-345 | L("platform.machine") # PEP-345 | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # PEP-345 - | L("extra") # undocumented setuptools legacy + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 ) ALIASES = { "os.name": "os_name", @@ -131,6 +144,7 @@ MARKER = stringStart + MARKER_EXPR + stringEnd def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] if isinstance(results, ParseResults): return [_coerce_parse_result(i) for i in results] else: @@ -138,6 +152,8 @@ def _coerce_parse_result(results): def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + assert isinstance(marker, (list, tuple, string_types)) # Sometimes we have a structure like [[...]] which is a single item list @@ -172,10 +188,11 @@ _operators = { "!=": operator.ne, ">=": operator.ge, ">": operator.gt, -} +} # type: Dict[str, Operator] def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool try: spec = Specifier("".join([op.serialize(), rhs])) except InvalidSpecifier: @@ -183,7 +200,7 @@ def _eval_op(lhs, op, rhs): else: return spec.contains(lhs) - oper = _operators.get(op.serialize()) + oper = _operators.get(op.serialize()) # type: Optional[Operator] if oper is None: raise UndefinedComparison( "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) @@ -192,13 +209,18 @@ def _eval_op(lhs, op, rhs): return oper(lhs, rhs) -_undefined = object() +class Undefined(object): + pass + + +_undefined = Undefined() def _get_env(environment, name): - value = environment.get(name, _undefined) + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] - if value is _undefined: + if isinstance(value, Undefined): raise UndefinedEnvironmentName( "{0!r} does not exist in evaluation environment.".format(name) ) @@ -207,7 +229,8 @@ def _get_env(environment, name): def _evaluate_markers(markers, environment): - groups = [[]] + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] for marker in markers: assert isinstance(marker, (list, tuple, string_types)) @@ -234,6 +257,7 @@ def _evaluate_markers(markers, environment): def format_full_version(info): + # type: (sys._version_info) -> str version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -242,9 +266,13 @@ def format_full_version(info): def default_environment(): + # type: () -> Dict[str, str] if hasattr(sys, "implementation"): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore else: iver = "0" implementation_name = "" @@ -259,13 +287,14 @@ def default_environment(): "platform_version": platform.version(), "python_full_version": platform.python_version(), "platform_python_implementation": platform.python_implementation(), - "python_version": platform.python_version()[:3], + "python_version": ".".join(platform.python_version_tuple()[:2]), "sys_platform": sys.platform, } class Marker(object): def __init__(self, marker): + # type: (str) -> None try: self._markers = _coerce_parse_result(MARKER.parseString(marker)) except ParseException as e: @@ -275,12 +304,15 @@ class Marker(object): raise InvalidMarker(err_str) def __str__(self): + # type: () -> str return _format_marker(self._markers) def __repr__(self): + # type: () -> str return "<Marker({0!r})>".format(str(self)) def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool """Evaluate a marker. Return the boolean from evaluating the given marker against the diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/requirements.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/requirements.py index dbc5f11d..5e64101c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/requirements.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/requirements.py @@ -11,9 +11,13 @@ from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from pip._vendor.pyparsing import Literal as L # noqa from pip._vendor.six.moves.urllib import parse as urlparse +from ._typing import TYPE_CHECKING from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet +if TYPE_CHECKING: # pragma: no cover + from typing import List + class InvalidRequirement(ValueError): """ @@ -89,6 +93,7 @@ class Requirement(object): # TODO: Can we normalize the name and extra name? def __init__(self, requirement_string): + # type: (str) -> None try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: @@ -116,7 +121,8 @@ class Requirement(object): self.marker = req.marker if req.marker else None def __str__(self): - parts = [self.name] + # type: () -> str + parts = [self.name] # type: List[str] if self.extras: parts.append("[{0}]".format(",".join(sorted(self.extras)))) @@ -135,4 +141,5 @@ class Requirement(object): return "".join(parts) def __repr__(self): + # type: () -> str return "<Requirement({0!r})>".format(str(self)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/specifiers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/specifiers.py index 743576a0..fe09bb1d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/specifiers.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/specifiers.py @@ -9,8 +9,27 @@ import itertools import re from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version from .version import Version, LegacyVersion, parse +if TYPE_CHECKING: # pragma: no cover + from typing import ( + List, + Dict, + Union, + Iterable, + Iterator, + Optional, + Callable, + Tuple, + FrozenSet, + ) + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + class InvalidSpecifier(ValueError): """ @@ -18,9 +37,10 @@ class InvalidSpecifier(ValueError): """ -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore @abc.abstractmethod def __str__(self): + # type: () -> str """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. @@ -28,12 +48,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __hash__(self): + # type: () -> int """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are equal. @@ -41,6 +63,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __ne__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are not equal. @@ -48,6 +71,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractproperty def prereleases(self): + # type: () -> Optional[bool] """ Returns whether or not pre-releases as a whole are allowed by this specifier. @@ -55,6 +79,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None """ Sets whether or not pre-releases as a whole are allowed by this specifier. @@ -62,12 +87,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. @@ -76,19 +103,24 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): class _IndividualSpecifier(BaseSpecifier): - _operators = {} + _operators = {} # type: Dict[str, str] def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - self._spec = (match.group("operator").strip(), match.group("version").strip()) + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -98,26 +130,35 @@ class _IndividualSpecifier(BaseSpecifier): return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) def __str__(self): + # type: () -> str return "{0}{1}".format(*self._spec) + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + def __hash__(self): - return hash(self._spec) + # type: () -> int + return hash(self._canonical_spec) def __eq__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented - return self._spec == other._spec + return self._canonical_spec == other._canonical_spec def __ne__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): @@ -126,52 +167,67 @@ class _IndividualSpecifier(BaseSpecifier): return self._spec != other._spec def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def operator(self): + # type: () -> str return self._spec[0] @property def version(self): + # type: () -> str return self._spec[1] @property def prereleases(self): + # type: () -> Optional[bool] return self._prereleases @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (str) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) + normalized_item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: + if normalized_item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + yielded = False found_prereleases = [] @@ -230,32 +286,43 @@ class LegacySpecifier(_IndividualSpecifier): } def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective > self._coerce_version(spec) -def _require_version_compare(fn): +def _require_version_compare( + fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] @functools.wraps(fn) def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool if not isinstance(prospective, Version): return False return fn(self, prospective, spec) @@ -373,6 +440,8 @@ class Specifier(_IndividualSpecifier): @_require_version_compare def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of @@ -400,56 +469,75 @@ class Specifier(_IndividualSpecifier): @_require_version_compare def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* + split_spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. - prospective = _version_split(str(prospective)) + split_prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - prospective = prospective[: len(spec)] + shortened_prospective = split_prospective[: len(split_spec)] # Pad out our two sides with zeros so that they both equal the same # length. - spec, prospective = _pad_version(spec, prospective) + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec else: # Convert our spec string into a Version - spec = Version(spec) + spec_version = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. - if not spec.local: + if not spec_version.local: prospective = Version(prospective.public) - return prospective == spec + return prospective == spec_version @_require_version_compare def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) + # type: (ParsedVersion, str) -> bool + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) @_require_version_compare - def _compare_less_than(self, prospective, spec): + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now @@ -471,10 +559,12 @@ class Specifier(_IndividualSpecifier): return True @_require_version_compare - def _compare_greater_than(self, prospective, spec): + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now @@ -502,10 +592,13 @@ class Specifier(_IndividualSpecifier): return True def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool return str(prospective).lower() == str(spec).lower() @property def prereleases(self): + # type: () -> bool + # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: @@ -530,6 +623,7 @@ class Specifier(_IndividualSpecifier): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value @@ -537,7 +631,8 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") def _version_split(version): - result = [] + # type: (str) -> List[str] + result = [] # type: List[str] for item in version.split("."): match = _prefix_regex.search(item) if match: @@ -548,6 +643,7 @@ def _version_split(version): def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] left_split, right_split = [], [] # Get the release segment of our versions @@ -567,14 +663,16 @@ def _pad_version(left, right): class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() - for specifier in specifiers: + for specifier in split_specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: @@ -588,6 +686,7 @@ class SpecifierSet(BaseSpecifier): self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -597,12 +696,15 @@ class SpecifierSet(BaseSpecifier): return "<SpecifierSet({0!r}{1})>".format(str(self), pre) def __str__(self): + # type: () -> str return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): + # type: () -> int return hash(self._specs) def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): @@ -626,9 +728,8 @@ class SpecifierSet(BaseSpecifier): return specifier def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -636,9 +737,8 @@ class SpecifierSet(BaseSpecifier): return self._specs == other._specs def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -646,13 +746,17 @@ class SpecifierSet(BaseSpecifier): return self._specs != other._specs def __len__(self): + # type: () -> int return len(self._specs) def __iter__(self): + # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] return iter(self._specs) @property def prereleases(self): + # type: () -> Optional[bool] + # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: @@ -670,12 +774,16 @@ class SpecifierSet(BaseSpecifier): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) @@ -701,7 +809,13 @@ class SpecifierSet(BaseSpecifier): # will always return True, this is an explicit design decision. return all(s.contains(item, prereleases=prereleases) for s in self._specs) - def filter(self, iterable, prereleases=None): + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. @@ -719,8 +833,8 @@ class SpecifierSet(BaseSpecifier): # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: - filtered = [] - found_prereleases = [] + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] for item in iterable: # Ensure that we some kind of Version class for this item. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/tags.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/tags.py new file mode 100644 index 00000000..9064910b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/tags.py @@ -0,0 +1,751 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import logging +import os +import platform +import re +import struct +import sys +import sysconfig +import warnings + +from ._typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: # pragma: no cover + from typing import ( + Dict, + FrozenSet, + IO, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} # type: Dict[str, str] + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + + @property + def interpreter(self): + # type: () -> str + return self._interpreter + + @property + def abi(self): + # type: () -> str + return self._abi + + @property + def platform(self): + # type: () -> str + return self._platform + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + # type: () -> int + return hash((self._interpreter, self._abi, self._platform)) + + def __str__(self): + # type: () -> str + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + # type: () -> str + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string): + # type: (str) -> str + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp<python_version>-<abi>-<platform> + - cp<python_version>-abi3-<platform> + - cp<python_version>-none-<platform> + - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + interpreter = "cp{}".format(_version_nodot(python_version[:2])) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + # type: () -> Iterator[str] + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - <interpreter>-<abi>-<platform> + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=_version_nodot(py_version[:2])) + yield "py{major}".format(major=py_version[0]) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=_version_nodot((py_version[0], minor))) + + +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none-<platform> + - <interpreter>-none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + formats.append("universal") + return formats + + +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + for minor_version in range(version[1], -1, -1): + compat_version = version[0], minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +# From PEP 513. +def _is_manylinux_compatible(name, glibc_version): + # type: (str, GlibcVersion) -> bool + # Check for presence of _manylinux module. + try: + import _manylinux # noqa + + return bool(getattr(_manylinux, name + "_compatible")) + except (ImportError, AttributeError): + # Fall through to heuristic check below. + pass + + return _have_compatible_glibc(*glibc_version) + + +def _glibc_version_string(): + # type: () -> Optional[str] + # Returns glibc version string, or None if not using glibc. + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() # type: str + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing. +def _check_glibc_version(version_str, required_major, minimum_minor): + # type: (str, int, int) -> bool + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return False + return ( + int(m.group("major")) == required_major + and int(m.group("minor")) >= minimum_minor + ) + + +def _have_compatible_glibc(required_major, minimum_minor): + # type: (int, int) -> bool + version_str = _glibc_version_string() + if version_str is None: + return False + return _check_glibc_version(version_str, required_major, minimum_minor) + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + (result,) = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H" + format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I" + format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return True + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" + manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: + manylinux_support.append( + ("manylinux2014", (2, 17)) + ) # CentOS 7 w/ glibc 2.17 (PEP 599) + if arch in {"x86_64", "i686"}: + manylinux_support.append( + ("manylinux2010", (2, 12)) + ) # CentOS 6 w/ glibc 2.12 (PEP 571) + manylinux_support.append( + ("manylinux1", (2, 5)) + ) # CentOS 5 w/ glibc 2.5 (PEP 513) + manylinux_support_iter = iter(manylinux_support) + for name, glibc_version in manylinux_support_iter: + if _is_manylinux_compatible(name, glibc_version): + yield linux.replace("linux", name) + break + # Support for a later manylinux implies support for an earlier version. + for name, _ in manylinux_support_iter: + yield linux.replace("linux", name) + yield linux + + +def _generic_platforms(): + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version): + # type: (PythonVersion) -> str + if any(v >= 10 for v in version): + sep = "_" + else: + sep = "" + return sep.join(map(str, version)) + + +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/utils.py index 88418786..19579c1a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/utils.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/utils.py @@ -5,28 +5,36 @@ from __future__ import absolute_import, division, print_function import re +from ._typing import TYPE_CHECKING, cast from .version import InvalidVersion, Version +if TYPE_CHECKING: # pragma: no cover + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) _canonicalize_regex = re.compile(r"[-_.]+") def canonicalize_name(name): + # type: (str) -> NormalizedName # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) -def canonicalize_version(version): +def canonicalize_version(_version): + # type: (str) -> Union[Version, str] """ - This is very similar to Version.__str__, but has one subtle differences + This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. """ try: - version = Version(version) + version = Version(_version) except InvalidVersion: # Legacy versions cannot be normalized - return version + return _version parts = [] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/version.py index 95157a1f..00371e86 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/version.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/packaging/version.py @@ -7,8 +7,35 @@ import collections import itertools import re -from ._structures import Infinity - +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] @@ -19,6 +46,7 @@ _Version = collections.namedtuple( def parse(version): + # type: (str) -> Union[LegacyVersion, Version] """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is @@ -37,28 +65,38 @@ class InvalidVersion(ValueError): class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + def __hash__(self): + # type: () -> int return hash(self._key) def __lt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s < o) def __le__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s <= o) def __eq__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s == o) def __ge__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s >= o) def __gt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s > o) def __ne__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s != o) def _compare(self, other, method): + # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] if not isinstance(other, _BaseVersion): return NotImplemented @@ -67,57 +105,71 @@ class _BaseVersion(object): class LegacyVersion(_BaseVersion): def __init__(self, version): + # type: (str) -> None self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): + # type: () -> str return self._version def __repr__(self): + # type: () -> str return "<LegacyVersion({0})>".format(repr(str(self))) @property def public(self): + # type: () -> str return self._version @property def base_version(self): + # type: () -> str return self._version @property def epoch(self): + # type: () -> int return -1 @property def release(self): + # type: () -> None return None @property def pre(self): + # type: () -> None return None @property def post(self): + # type: () -> None return None @property def dev(self): + # type: () -> None return None @property def local(self): + # type: () -> None return None @property def is_prerelease(self): + # type: () -> bool return False @property def is_postrelease(self): + # type: () -> bool return False @property def is_devrelease(self): + # type: () -> bool return False @@ -133,6 +185,7 @@ _legacy_version_replacement_map = { def _parse_version_parts(s): + # type: (str) -> Iterator[str] for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) @@ -150,6 +203,8 @@ def _parse_version_parts(s): def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, @@ -158,7 +213,7 @@ def _legacy_cmpkey(version): # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. - parts = [] + parts = [] # type: List[str] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag @@ -171,9 +226,8 @@ def _legacy_cmpkey(version): parts.pop() parts.append(part) - parts = tuple(parts) - return epoch, parts + return epoch, tuple(parts) # Deliberately not anchored to the start and end of the string, to make it @@ -215,6 +269,8 @@ class Version(_BaseVersion): _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version): + # type: (str) -> None + # Validate the version and parse it into pieces match = self._regex.search(version) if not match: @@ -243,9 +299,11 @@ class Version(_BaseVersion): ) def __repr__(self): + # type: () -> str return "<Version({0})>".format(repr(str(self))) def __str__(self): + # type: () -> str parts = [] # Epoch @@ -275,26 +333,35 @@ class Version(_BaseVersion): @property def epoch(self): - return self._version.epoch + # type: () -> int + _epoch = self._version.epoch # type: int + return _epoch @property def release(self): - return self._version.release + # type: () -> Tuple[int, ...] + _release = self._version.release # type: Tuple[int, ...] + return _release @property def pre(self): - return self._version.pre + # type: () -> Optional[Tuple[str, int]] + _pre = self._version.pre # type: Optional[Tuple[str, int]] + return _pre @property def post(self): + # type: () -> Optional[Tuple[str, int]] return self._version.post[1] if self._version.post else None @property def dev(self): + # type: () -> Optional[Tuple[str, int]] return self._version.dev[1] if self._version.dev else None @property def local(self): + # type: () -> Optional[str] if self._version.local: return ".".join(str(x) for x in self._version.local) else: @@ -302,10 +369,12 @@ class Version(_BaseVersion): @property def public(self): + # type: () -> str return str(self).split("+", 1)[0] @property def base_version(self): + # type: () -> str parts = [] # Epoch @@ -319,18 +388,41 @@ class Version(_BaseVersion): @property def is_prerelease(self): + # type: () -> bool return self.dev is not None or self.pre is not None @property def is_postrelease(self): + # type: () -> bool return self.post is not None @property def is_devrelease(self): + # type: () -> bool return self.dev is not None + @property + def major(self): + # type: () -> int + return self.release[0] if len(self.release) >= 1 else 0 + + @property + def minor(self): + # type: () -> int + return self.release[1] if len(self.release) >= 2 else 0 + + @property + def micro(self): + # type: () -> int + return self.release[2] if len(self.release) >= 3 else 0 + + +def _parse_letter_version( + letter, # type: str + number, # type: Union[str, bytes, SupportsInt] +): + # type: (...) -> Optional[Tuple[str, int]] -def _parse_letter_version(letter, number): if letter: # We consider there to be an implicit 0 in a pre-release if there is # not a numeral associated with it. @@ -360,11 +452,14 @@ def _parse_letter_version(letter, number): return letter, int(number) + return None + _local_version_separators = re.compile(r"[\._-]") def _parse_local_version(local): + # type: (str) -> Optional[LocalType] """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -373,15 +468,25 @@ def _parse_local_version(local): part.lower() if not part.isdigit() else int(part) for part in _local_version_separators.split(local) ) + return None + +def _cmpkey( + epoch, # type: int + release, # type: Tuple[int, ...] + pre, # type: Optional[Tuple[str, int]] + post, # type: Optional[Tuple[str, int]] + dev, # type: Optional[Tuple[str, int]] + local, # type: Optional[Tuple[SubLocalType]] +): + # type: (...) -> CmpKey -def _cmpkey(epoch, release, pre, post, dev, local): # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now # leading zeros until we come to something non zero, then take the rest # re-reverse it back into the correct order and make it a tuple and use # that for our sorting key. - release = tuple( + _release = tuple( reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) ) @@ -390,23 +495,31 @@ def _cmpkey(epoch, release, pre, post, dev, local): # if there is not a pre or a post segment. If we have one of those then # the normal sorting rules will handle this case correctly. if pre is None and post is None and dev is not None: - pre = -Infinity + _pre = NegativeInfinity # type: PrePostDevType # Versions without a pre-release (except as noted above) should sort after # those with one. elif pre is None: - pre = Infinity + _pre = Infinity + else: + _pre = pre # Versions without a post segment should sort before those with one. if post is None: - post = -Infinity + _post = NegativeInfinity # type: PrePostDevType + + else: + _post = post # Versions without a development segment should sort after those with one. if dev is None: - dev = Infinity + _dev = Infinity # type: PrePostDevType + + else: + _dev = dev if local is None: # Versions without a local segment should sort before those with one. - local = -Infinity + _local = NegativeInfinity # type: LocalType else: # Versions with a local segment need that segment parsed to implement # the sorting rules in PEP440. @@ -415,6 +528,8 @@ def _cmpkey(epoch, release, pre, post, dev, local): # - Numeric segments sort numerically # - Shorter versions sort before longer versions when the prefixes # match exactly - local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) + _local = tuple( + (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local + ) - return epoch, release, pre, post, dev, local + return epoch, _release, _pre, _post, _dev, _local diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/__init__.py index 9c1a098f..7355b68a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/__init__.py @@ -1,4 +1,4 @@ """Wrappers to build Python packages using PEP 517 hooks """ -__version__ = '0.5.0' +__version__ = '0.8.2' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/_in_process.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/_in_process.py index d6524b66..a536b03e 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/_in_process.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/_in_process.py @@ -2,7 +2,9 @@ It expects: - Command line args: hook_name, control_dir -- Environment variable: PEP517_BUILD_BACKEND=entry.point:spec +- Environment variables: + PEP517_BUILD_BACKEND=entry.point:spec + PEP517_BACKEND_PATH=paths (separated with os.pathsep) - control_dir/input.json: - {"kwargs": {...}} @@ -12,28 +14,86 @@ Results: """ from glob import glob from importlib import import_module +import json import os +import os.path from os.path import join as pjoin import re import shutil import sys +import traceback -# This is run as a script, not a module, so it can't do a relative import -import compat +# This file is run as a script, and `import compat` is not zip-safe, so we +# include write_json() and read_json() from compat.py. +# +# Handle reading and writing JSON in UTF-8, on Python 3 and 2. + +if sys.version_info[0] >= 3: + # Python 3 + def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + def read_json(path): + with open(path, 'r', encoding='utf-8') as f: + return json.load(f) + +else: + # Python 2 + def write_json(obj, path, **kwargs): + with open(path, 'wb') as f: + json.dump(obj, f, encoding='utf-8', **kwargs) + + def read_json(path): + with open(path, 'rb') as f: + return json.load(f) class BackendUnavailable(Exception): """Raised if we cannot import the backend""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Raised if the backend is invalid""" + def __init__(self, message): + self.message = message + + +class HookMissing(Exception): + """Raised if a hook is missing and we are not executing the fallback""" + + +def contained_in(filename, directory): + """Test if a file is located within the given directory.""" + filename = os.path.normcase(os.path.abspath(filename)) + directory = os.path.normcase(os.path.abspath(directory)) + return os.path.commonprefix([filename, directory]) == directory def _build_backend(): """Find and load the build backend""" + # Add in-tree backend directories to the front of sys.path. + backend_path = os.environ.get('PEP517_BACKEND_PATH') + if backend_path: + extra_pathitems = backend_path.split(os.pathsep) + sys.path[:0] = extra_pathitems + ep = os.environ['PEP517_BUILD_BACKEND'] mod_path, _, obj_path = ep.partition(':') try: obj = import_module(mod_path) except ImportError: - raise BackendUnavailable + raise BackendUnavailable(traceback.format_exc()) + + if backend_path: + if not any( + contained_in(obj.__file__, path) + for path in extra_pathitems + ): + raise BackendInvalid("Backend was not loaded from backend-path") + if obj_path: for path_part in obj_path.split('.'): obj = getattr(obj, path_part) @@ -54,15 +114,19 @@ def get_requires_for_build_wheel(config_settings): return hook(config_settings) -def prepare_metadata_for_build_wheel(metadata_directory, config_settings): +def prepare_metadata_for_build_wheel( + metadata_directory, config_settings, _allow_fallback): """Invoke optional prepare_metadata_for_build_wheel - Implements a fallback by building a wheel if the hook isn't defined. + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. """ backend = _build_backend() try: hook = backend.prepare_metadata_for_build_wheel except AttributeError: + if not _allow_fallback: + raise HookMissing() return _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings) else: @@ -161,6 +225,8 @@ class _DummyException(Exception): class GotUnsupportedOperation(Exception): """For internal use when backend raises UnsupportedOperation""" + def __init__(self, traceback): + self.traceback = traceback def build_sdist(sdist_directory, config_settings): @@ -169,7 +235,7 @@ def build_sdist(sdist_directory, config_settings): try: return backend.build_sdist(sdist_directory, config_settings) except getattr(backend, 'UnsupportedOperation', _DummyException): - raise GotUnsupportedOperation + raise GotUnsupportedOperation(traceback.format_exc()) HOOK_NAMES = { @@ -190,17 +256,24 @@ def main(): sys.exit("Unknown hook: %s" % hook_name) hook = globals()[hook_name] - hook_input = compat.read_json(pjoin(control_dir, 'input.json')) + hook_input = read_json(pjoin(control_dir, 'input.json')) json_out = {'unsupported': False, 'return_val': None} try: json_out['return_val'] = hook(**hook_input['kwargs']) - except BackendUnavailable: + except BackendUnavailable as e: json_out['no_backend'] = True - except GotUnsupportedOperation: + json_out['traceback'] = e.traceback + except BackendInvalid as e: + json_out['backend_invalid'] = True + json_out['backend_error'] = e.message + except GotUnsupportedOperation as e: json_out['unsupported'] = True + json_out['traceback'] = e.traceback + except HookMissing: + json_out['hook_missing'] = True - compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) + write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) if __name__ == '__main__': diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/build.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/build.py index ac6c9495..26430144 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/build.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/build.py @@ -3,25 +3,56 @@ import argparse import logging import os -import contextlib -from pip._vendor import pytoml +from pip._vendor import toml import shutil -import errno -import tempfile from .envbuild import BuildEnvironment from .wrappers import Pep517HookCaller +from .dirtools import tempdir, mkdir_p +from .compat import FileNotFoundError log = logging.getLogger(__name__) -@contextlib.contextmanager -def tempdir(): - td = tempfile.mkdtemp() +def validate_system(system): + """ + Ensure build system has the requisite fields. + """ + required = {'requires', 'build-backend'} + if not (required <= set(system)): + message = "Missing required fields: {missing}".format( + missing=required-set(system), + ) + raise ValueError(message) + + +def load_system(source_dir): + """ + Load the build system from a source dir (pyproject.toml). + """ + pyproject = os.path.join(source_dir, 'pyproject.toml') + with open(pyproject) as f: + pyproject_data = toml.load(f) + return pyproject_data['build-system'] + + +def compat_system(source_dir): + """ + Given a source dir, attempt to get a build system backend + and requirements from pyproject.toml. Fallback to + setuptools but only if the file was not found or a build + system was not indicated. + """ try: - yield td - finally: - shutil.rmtree(td) + system = load_system(source_dir) + except (FileNotFoundError, KeyError): + system = {} + system.setdefault( + 'build-backend', + 'setuptools.build_meta:__legacy__', + ) + system.setdefault('requires', ['setuptools', 'wheel']) + return system def _do_build(hooks, env, dist, dest): @@ -42,33 +73,18 @@ def _do_build(hooks, env, dist, dest): shutil.move(source, os.path.join(dest, os.path.basename(filename))) -def mkdir_p(*args, **kwargs): - """Like `mkdir`, but does not raise an exception if the - directory already exists. - """ - try: - return os.mkdir(*args, **kwargs) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - - -def build(source_dir, dist, dest=None): - pyproject = os.path.join(source_dir, 'pyproject.toml') +def build(source_dir, dist, dest=None, system=None): + system = system or load_system(source_dir) dest = os.path.join(source_dir, dest or 'dist') mkdir_p(dest) - with open(pyproject) as f: - pyproject_data = pytoml.load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - - hooks = Pep517HookCaller(source_dir, backend) + validate_system(system) + hooks = Pep517HookCaller( + source_dir, system['build-backend'], system.get('backend-path') + ) with BuildEnvironment() as env: - env.pip_install(requires) + env.pip_install(system['requires']) _do_build(hooks, env, dist, dest) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/check.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/check.py index f4cdc6be..13e722a3 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/check.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/check.py @@ -4,7 +4,7 @@ import argparse import logging import os from os.path import isfile, join as pjoin -from pip._vendor.pytoml import TomlError, load as toml_load +from pip._vendor.toml import TomlDecodeError, load as toml_load import shutil from subprocess import CalledProcessError import sys @@ -147,12 +147,13 @@ def check(source_dir): buildsys = pyproject_data['build-system'] requires = buildsys['requires'] backend = buildsys['build-backend'] + backend_path = buildsys.get('backend-path') log.info('Loaded pyproject.toml') - except (TomlError, KeyError): + except (TomlDecodeError, KeyError): log.error("Invalid pyproject.toml", exc_info=True) return False - hooks = Pep517HookCaller(source_dir, backend) + hooks = Pep517HookCaller(source_dir, backend, backend_path) sdist_ok = check_build_sdist(hooks, requires) wheel_ok = check_build_wheel(hooks, requires) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/compat.py index 01c66fc7..8432acb7 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/compat.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/compat.py @@ -1,7 +1,10 @@ -"""Handle reading and writing JSON in UTF-8, on Python 3 and 2.""" +"""Python 2/3 compatibility""" import json import sys + +# Handle reading and writing JSON in UTF-8, on Python 3 and 2. + if sys.version_info[0] >= 3: # Python 3 def write_json(obj, path, **kwargs): @@ -21,3 +24,11 @@ else: def read_json(path): with open(path, 'rb') as f: return json.load(f) + + +# FileNotFoundError + +try: + FileNotFoundError = FileNotFoundError +except NameError: + FileNotFoundError = IOError diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/dirtools.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/dirtools.py new file mode 100644 index 00000000..58c6ca0c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/dirtools.py @@ -0,0 +1,44 @@ +import os +import io +import contextlib +import tempfile +import shutil +import errno +import zipfile + + +@contextlib.contextmanager +def tempdir(): + """Create a temporary directory in a context manager.""" + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +def mkdir_p(*args, **kwargs): + """Like `mkdir`, but does not raise an exception if the + directory already exists. + """ + try: + return os.mkdir(*args, **kwargs) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + +def dir_to_zipfile(root): + """Construct an in-memory zip file for a directory.""" + buffer = io.BytesIO() + zip_file = zipfile.ZipFile(buffer, 'w') + for root, dirs, files in os.walk(root): + for path in dirs: + fs_path = os.path.join(root, path) + rel_path = os.path.relpath(fs_path, root) + zip_file.writestr(rel_path + '/', '') + for path in files: + fs_path = os.path.join(root, path) + rel_path = os.path.relpath(fs_path, root) + zip_file.write(fs_path, rel_path) + return zip_file diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/envbuild.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/envbuild.py index f7ac5f46..4088dcdb 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/envbuild.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/envbuild.py @@ -3,23 +3,27 @@ import os import logging -from pip._vendor import pytoml +from pip._vendor import toml import shutil from subprocess import check_call import sys from sysconfig import get_paths from tempfile import mkdtemp -from .wrappers import Pep517HookCaller +from .wrappers import Pep517HookCaller, LoggerWrapper log = logging.getLogger(__name__) def _load_pyproject(source_dir): with open(os.path.join(source_dir, 'pyproject.toml')) as f: - pyproject_data = pytoml.load(f) + pyproject_data = toml.load(f) buildsys = pyproject_data['build-system'] - return buildsys['requires'], buildsys['build-backend'] + return ( + buildsys['requires'], + buildsys['build-backend'], + buildsys.get('backend-path'), + ) class BuildEnvironment(object): @@ -90,9 +94,14 @@ class BuildEnvironment(object): if not reqs: return log.info('Calling pip to install %s', reqs) - check_call([ + cmd = [ sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', self.path] + list(reqs)) + '--prefix', self.path] + list(reqs) + check_call( + cmd, + stdout=LoggerWrapper(log, logging.INFO), + stderr=LoggerWrapper(log, logging.ERROR), + ) def __exit__(self, exc_type, exc_val, exc_tb): needs_cleanup = ( @@ -126,8 +135,8 @@ def build_wheel(source_dir, wheel_dir, config_settings=None): """ if config_settings is None: config_settings = {} - requires, backend = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend) + requires, backend, backend_path = _load_pyproject(source_dir) + hooks = Pep517HookCaller(source_dir, backend, backend_path) with BuildEnvironment() as env: env.pip_install(requires) @@ -148,8 +157,8 @@ def build_sdist(source_dir, sdist_dir, config_settings=None): """ if config_settings is None: config_settings = {} - requires, backend = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend) + requires, backend, backend_path = _load_pyproject(source_dir) + hooks = Pep517HookCaller(source_dir, backend, backend_path) with BuildEnvironment() as env: env.pip_install(requires) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/meta.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/meta.py new file mode 100644 index 00000000..d525de5c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/meta.py @@ -0,0 +1,92 @@ +"""Build metadata for a project using PEP 517 hooks. +""" +import argparse +import logging +import os +import shutil +import functools + +try: + import importlib.metadata as imp_meta +except ImportError: + import importlib_metadata as imp_meta + +try: + from zipfile import Path +except ImportError: + from zipp import Path + +from .envbuild import BuildEnvironment +from .wrappers import Pep517HookCaller, quiet_subprocess_runner +from .dirtools import tempdir, mkdir_p, dir_to_zipfile +from .build import validate_system, load_system, compat_system + +log = logging.getLogger(__name__) + + +def _prep_meta(hooks, env, dest): + reqs = hooks.get_requires_for_build_wheel({}) + log.info('Got build requires: %s', reqs) + + env.pip_install(reqs) + log.info('Installed dynamic build dependencies') + + with tempdir() as td: + log.info('Trying to build metadata in %s', td) + filename = hooks.prepare_metadata_for_build_wheel(td, {}) + source = os.path.join(td, filename) + shutil.move(source, os.path.join(dest, os.path.basename(filename))) + + +def build(source_dir='.', dest=None, system=None): + system = system or load_system(source_dir) + dest = os.path.join(source_dir, dest or 'dist') + mkdir_p(dest) + validate_system(system) + hooks = Pep517HookCaller( + source_dir, system['build-backend'], system.get('backend-path') + ) + + with hooks.subprocess_runner(quiet_subprocess_runner): + with BuildEnvironment() as env: + env.pip_install(system['requires']) + _prep_meta(hooks, env, dest) + + +def build_as_zip(builder=build): + with tempdir() as out_dir: + builder(dest=out_dir) + return dir_to_zipfile(out_dir) + + +def load(root): + """ + Given a source directory (root) of a package, + return an importlib.metadata.Distribution object + with metadata build from that package. + """ + root = os.path.expanduser(root) + system = compat_system(root) + builder = functools.partial(build, source_dir=root, system=system) + path = Path(build_as_zip(builder)) + return imp_meta.PathDistribution(path) + + +parser = argparse.ArgumentParser() +parser.add_argument( + 'source_dir', + help="A directory containing pyproject.toml", +) +parser.add_argument( + '--out-dir', '-o', + help="Destination in which to save the builds relative to source dir", +) + + +def main(): + args = parser.parse_args() + build(args.source_dir, args.out_dir) + + +if __name__ == '__main__': + main() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/wrappers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/wrappers.py index b14b8991..00a3d1a7 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/wrappers.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pep517/wrappers.py @@ -1,14 +1,24 @@ +import threading from contextlib import contextmanager import os from os.path import dirname, abspath, join as pjoin import shutil -from subprocess import check_call +from subprocess import check_call, check_output, STDOUT import sys from tempfile import mkdtemp from . import compat -_in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py') + +try: + import importlib.resources as resources + + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') +except ImportError: + @contextmanager + def _in_proc_script_path(): + yield pjoin(dirname(abspath(__file__)), '_in_process.py') @contextmanager @@ -22,10 +32,29 @@ def tempdir(): class BackendUnavailable(Exception): """Will be raised if the backend cannot be imported in the hook process.""" + def __init__(self, traceback): + self.traceback = traceback + + +class BackendInvalid(Exception): + """Will be raised if the backend is invalid.""" + def __init__(self, backend_name, backend_path, message): + self.backend_name = backend_name + self.backend_path = backend_path + self.message = message + + +class HookMissing(Exception): + """Will be raised on missing hooks.""" + def __init__(self, hook_name): + super(HookMissing, self).__init__(hook_name) + self.hook_name = hook_name class UnsupportedOperation(Exception): """May be raised by build_sdist if the backend indicates that it can't.""" + def __init__(self, traceback): + self.traceback = traceback def default_subprocess_runner(cmd, cwd=None, extra_environ=None): @@ -37,25 +66,86 @@ def default_subprocess_runner(cmd, cwd=None, extra_environ=None): check_call(cmd, cwd=cwd, env=env) +def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): + """A method of calling the wrapper subprocess while suppressing output.""" + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) + + +def norm_and_check(source_tree, requested): + """Normalise and check a backend path. + + Ensure that the requested backend path is specified as a relative path, + and resolves to a location under the given source tree. + + Return an absolute version of the requested path. + """ + if os.path.isabs(requested): + raise ValueError("paths must be relative") + + abs_source = os.path.abspath(source_tree) + abs_requested = os.path.normpath(os.path.join(abs_source, requested)) + # We have to use commonprefix for Python 2.7 compatibility. So we + # normalise case to avoid problems because commonprefix is a character + # based comparison :-( + norm_source = os.path.normcase(abs_source) + norm_requested = os.path.normcase(abs_requested) + if os.path.commonprefix([norm_source, norm_requested]) != norm_source: + raise ValueError("paths must be inside source tree") + + return abs_requested + + class Pep517HookCaller(object): """A wrapper around a source directory to be built with a PEP 517 backend. source_dir : The path to the source directory, containing pyproject.toml. - backend : The build backend spec, as per PEP 517, from pyproject.toml. + build_backend : The build backend spec, as per PEP 517, from + pyproject.toml. + backend_path : The backend path, as per PEP 517, from pyproject.toml. + runner : A callable that invokes the wrapper subprocess. + + The 'runner', if provided, must expect the following: + cmd : a list of strings representing the command and arguments to + execute, as would be passed to e.g. 'subprocess.check_call'. + cwd : a string representing the working directory that must be + used for the subprocess. Corresponds to the provided source_dir. + extra_environ : a dict mapping environment variable names to values + which must be set for the subprocess execution. """ - def __init__(self, source_dir, build_backend): + def __init__( + self, + source_dir, + build_backend, + backend_path=None, + runner=None, + ): + if runner is None: + runner = default_subprocess_runner + self.source_dir = abspath(source_dir) self.build_backend = build_backend - self._subprocess_runner = default_subprocess_runner + if backend_path: + backend_path = [ + norm_and_check(self.source_dir, p) for p in backend_path + ] + self.backend_path = backend_path + self._subprocess_runner = runner - # TODO: Is this over-engineered? Maybe frontends only need to - # set this when creating the wrapper, not on every call. @contextmanager def subprocess_runner(self, runner): + """A context manager for temporarily overriding the default subprocess + runner. + """ prev = self._subprocess_runner self._subprocess_runner = runner - yield - self._subprocess_runner = prev + try: + yield + finally: + self._subprocess_runner = prev def get_requires_for_build_wheel(self, config_settings=None): """Identify packages required for building a wheel @@ -72,18 +162,21 @@ class Pep517HookCaller(object): }) def prepare_metadata_for_build_wheel( - self, metadata_directory, config_settings=None): + self, metadata_directory, config_settings=None, + _allow_fallback=True): """Prepare a *.dist-info folder with metadata for this project. Returns the name of the newly created folder. If the build backend defines a hook with this name, it will be called in a subprocess. If not, the backend will be asked to build a wheel, - and the dist-info extracted from that. + and the dist-info extracted from that (unless _allow_fallback is + False). """ return self._call_hook('prepare_metadata_for_build_wheel', { 'metadata_directory': abspath(metadata_directory), 'config_settings': config_settings, + '_allow_fallback': _allow_fallback, }) def build_wheel( @@ -139,25 +232,77 @@ class Pep517HookCaller(object): # letters, digits and _, . and : characters, and will be used as a # Python identifier, so non-ASCII content is wrong on Python 2 in # any case). + # For backend_path, we use sys.getfilesystemencoding. if sys.version_info[0] == 2: build_backend = self.build_backend.encode('ASCII') else: build_backend = self.build_backend + extra_environ = {'PEP517_BUILD_BACKEND': build_backend} + + if self.backend_path: + backend_path = os.pathsep.join(self.backend_path) + if sys.version_info[0] == 2: + backend_path = backend_path.encode(sys.getfilesystemencoding()) + extra_environ['PEP517_BACKEND_PATH'] = backend_path with tempdir() as td: - compat.write_json({'kwargs': kwargs}, pjoin(td, 'input.json'), + hook_input = {'kwargs': kwargs} + compat.write_json(hook_input, pjoin(td, 'input.json'), indent=2) # Run the hook in a subprocess - self._subprocess_runner( - [sys.executable, _in_proc_script, hook_name, td], - cwd=self.source_dir, - extra_environ={'PEP517_BUILD_BACKEND': build_backend} - ) + with _in_proc_script_path() as script: + self._subprocess_runner( + [sys.executable, str(script), hook_name, td], + cwd=self.source_dir, + extra_environ=extra_environ + ) data = compat.read_json(pjoin(td, 'output.json')) if data.get('unsupported'): - raise UnsupportedOperation + raise UnsupportedOperation(data.get('traceback', '')) if data.get('no_backend'): - raise BackendUnavailable + raise BackendUnavailable(data.get('traceback', '')) + if data.get('backend_invalid'): + raise BackendInvalid( + backend_name=self.build_backend, + backend_path=self.backend_path, + message=data.get('backend_error', '') + ) + if data.get('hook_missing'): + raise HookMissing(hook_name) return data['return_val'] + + +class LoggerWrapper(threading.Thread): + """ + Read messages from a pipe and redirect them + to a logger (see python's logging module). + """ + + def __init__(self, logger, level): + threading.Thread.__init__(self) + self.daemon = True + + self.logger = logger + self.level = level + + # create the pipe and reader + self.fd_read, self.fd_write = os.pipe() + self.reader = os.fdopen(self.fd_read) + + self.start() + + def fileno(self): + return self.fd_write + + @staticmethod + def remove_newline(msg): + return msg[:-1] if msg.endswith(os.linesep) else msg + + def run(self): + for line in self.reader: + self._write(self.remove_newline(line)) + + def _write(self, message): + self.logger.log(self.level, message) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py index 9c4fd8ea..a457ff27 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py @@ -39,6 +39,8 @@ import tempfile import textwrap import itertools import inspect +import ntpath +import posixpath from pkgutil import get_importer try: @@ -86,8 +88,8 @@ __import__('pip._vendor.packaging.markers') __metaclass__ = type -if (3, 0) < sys.version_info < (3, 4): - raise RuntimeError("Python 3.4 or later is required") +if (3, 0) < sys.version_info < (3, 5): + raise RuntimeError("Python 3.5 or later is required") if six.PY2: # Those builtin exceptions are only defined in Python 3 @@ -331,7 +333,7 @@ class UnknownExtra(ResolutionError): _provider_factories = {} -PY_MAJOR = sys.version[:3] +PY_MAJOR = '{}.{}'.format(*sys.version_info) EGG_DIST = 3 BINARY_DIST = 2 SOURCE_DIST = 1 @@ -1401,14 +1403,30 @@ class NullProvider: def has_resource(self, resource_name): return self._has(self._fn(self.module_path, resource_name)) + def _get_metadata_path(self, name): + return self._fn(self.egg_info, name) + def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info, name)) + if not self.egg_info: + return self.egg_info + + path = self._get_metadata_path(name) + return self._has(path) def get_metadata(self, name): if not self.egg_info: return "" - value = self._get(self._fn(self.egg_info, name)) - return value.decode('utf-8') if six.PY3 else value + path = self._get_metadata_path(name) + value = self._get(path) + if six.PY2: + return value + try: + return value.decode('utf-8') + except UnicodeDecodeError as exc: + # Include the path in the error message to simplify + # troubleshooting, and without changing the exception type. + exc.reason += ' in {} file at path: {}'.format(name, path) + raise def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -1466,10 +1484,86 @@ class NullProvider: ) def _fn(self, base, resource_name): + self._validate_resource_path(resource_name) if resource_name: return os.path.join(base, *resource_name.split('/')) return base + @staticmethod + def _validate_resource_path(path): + """ + Validate the resource paths according to the docs. + https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + + >>> warned = getfixture('recwarn') + >>> warnings.simplefilter('always') + >>> vrp = NullProvider._validate_resource_path + >>> vrp('foo/bar.txt') + >>> bool(warned) + False + >>> vrp('../foo/bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('/foo/bar.txt') + >>> bool(warned) + True + >>> vrp('foo/../../bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('foo/f../bar.txt') + >>> bool(warned) + False + + Windows path separators are straight-up disallowed. + >>> vrp(r'\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + >>> vrp(r'C:\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + Blank values are allowed + + >>> vrp('') + >>> bool(warned) + False + + Non-string values are not. + + >>> vrp(None) + Traceback (most recent call last): + ... + AttributeError: ... + """ + invalid = ( + os.path.pardir in path.split(posixpath.sep) or + posixpath.isabs(path) or + ntpath.isabs(path) + ) + if not invalid: + return + + msg = "Use of .. or absolute path in a resource path is not allowed." + + # Aggressively disallow Windows absolute paths + if ntpath.isabs(path) and not posixpath.isabs(path): + raise ValueError(msg) + + # for compatibility, warn; in future + # raise ValueError(msg) + warnings.warn( + msg[:-1] + " and will raise exceptions in a future release.", + DeprecationWarning, + stacklevel=4, + ) + def _get(self, path): if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) @@ -1790,6 +1884,9 @@ class FileMetadata(EmptyProvider): def __init__(self, path): self.path = path + def _get_metadata_path(self, name): + return self.path + def has_metadata(self, name): return name == 'PKG-INFO' and os.path.isfile(self.path) @@ -1888,7 +1985,7 @@ def find_eggs_in_zip(importer, path_item, only=False): if only: # don't yield nested distros return - for subitem in metadata.resource_listdir('/'): + for subitem in metadata.resource_listdir(''): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) @@ -2583,10 +2680,14 @@ class Distribution: try: return self._version except AttributeError: - version = _version_from_file(self._get_metadata(self.PKG_INFO)) + version = self._get_version() if version is None: - tmpl = "Missing 'Version:' header and/or %s file" - raise ValueError(tmpl % self.PKG_INFO, self) + path = self._get_metadata_path_for_display(self.PKG_INFO) + msg = ( + "Missing 'Version:' header and/or {} file at path: {}" + ).format(self.PKG_INFO, path) + raise ValueError(msg, self) + return version @property @@ -2644,11 +2745,34 @@ class Distribution: ) return deps + def _get_metadata_path_for_display(self, name): + """ + Return the path to the given metadata file, if available. + """ + try: + # We need to access _get_metadata_path() on the provider object + # directly rather than through this class's __getattr__() + # since _get_metadata_path() is marked private. + path = self._provider._get_metadata_path(name) + + # Handle exceptions e.g. in case the distribution's metadata + # provider doesn't support _get_metadata_path(). + except Exception: + return '[could not detect]' + + return path + def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line + def _get_version(self): + lines = self._get_metadata(self.PKG_INFO) + version = _version_from_file(lines) + + return version + def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: @@ -2867,7 +2991,7 @@ class EggInfoDistribution(Distribution): take an extra step and try to get the version number from the metadata file itself instead of the filename. """ - md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) + md_version = self._get_version() if md_version: self._version = md_version return self @@ -2985,6 +3109,7 @@ class Requirement(packaging.requirements.Requirement): self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, + self.url, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/__init__.py index a41f65dc..e434c257 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/__init__.py @@ -12,31 +12,49 @@ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -from __future__ import division +from __future__ import division, print_function from collections import deque from datetime import timedelta from math import ceil from sys import stderr -from time import time +try: + from time import monotonic +except ImportError: + from time import time as monotonic -__version__ = '1.4' +__version__ = '1.5' + +HIDE_CURSOR = '\x1b[?25l' +SHOW_CURSOR = '\x1b[?25h' class Infinite(object): file = stderr sma_window = 10 # Simple Moving Average window + check_tty = True + hide_cursor = True - def __init__(self, *args, **kwargs): + def __init__(self, message='', **kwargs): self.index = 0 - self.start_ts = time() + self.start_ts = monotonic() self.avg = 0 + self._avg_update_ts = self.start_ts self._ts = self.start_ts self._xput = deque(maxlen=self.sma_window) for key, val in kwargs.items(): setattr(self, key, val) + self._width = 0 + self.message = message + + if self.file and self.is_tty(): + if self.hide_cursor: + print(HIDE_CURSOR, end='', file=self.file) + print(self.message, end='', file=self.file) + self.file.flush() + def __getitem__(self, key): if key.startswith('_'): return None @@ -44,7 +62,7 @@ class Infinite(object): @property def elapsed(self): - return int(time() - self.start_ts) + return int(monotonic() - self.start_ts) @property def elapsed_td(self): @@ -52,8 +70,14 @@ class Infinite(object): def update_avg(self, n, dt): if n > 0: + xput_len = len(self._xput) self._xput.append(dt / n) - self.avg = sum(self._xput) / len(self._xput) + now = monotonic() + # update when we're still filling _xput, then after every second + if (xput_len < self.sma_window or + now - self._avg_update_ts > 1): + self.avg = sum(self._xput) / len(self._xput) + self._avg_update_ts = now def update(self): pass @@ -61,11 +85,34 @@ class Infinite(object): def start(self): pass + def clearln(self): + if self.file and self.is_tty(): + print('\r\x1b[K', end='', file=self.file) + + def write(self, s): + if self.file and self.is_tty(): + line = self.message + s.ljust(self._width) + print('\r' + line, end='', file=self.file) + self._width = max(self._width, len(s)) + self.file.flush() + + def writeln(self, line): + if self.file and self.is_tty(): + self.clearln() + print(line, end='', file=self.file) + self.file.flush() + def finish(self): - pass + if self.file and self.is_tty(): + print(file=self.file) + if self.hide_cursor: + print(SHOW_CURSOR, end='', file=self.file) + + def is_tty(self): + return self.file.isatty() if self.check_tty else True def next(self, n=1): - now = time() + now = monotonic() dt = now - self._ts self.update_avg(n, dt) self._ts = now @@ -73,12 +120,17 @@ class Infinite(object): self.update() def iter(self, it): - try: + with self: for x in it: yield x self.next() - finally: - self.finish() + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.finish() class Progress(Infinite): @@ -119,9 +171,7 @@ class Progress(Infinite): except TypeError: pass - try: + with self: for x in it: yield x self.next() - finally: - self.finish() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/bar.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/bar.py index 025e61c4..8819efda 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/bar.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/bar.py @@ -19,18 +19,15 @@ from __future__ import unicode_literals import sys from . import Progress -from .helpers import WritelnMixin -class Bar(WritelnMixin, Progress): +class Bar(Progress): width = 32 - message = '' suffix = '%(index)d/%(max)d' bar_prefix = ' |' bar_suffix = '| ' empty_fill = ' ' fill = '#' - hide_cursor = True def update(self): filled_length = int(self.width * self.progress) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/counter.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/counter.py index 6b45a1ec..d955ca47 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/counter.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/counter.py @@ -16,27 +16,20 @@ from __future__ import unicode_literals from . import Infinite, Progress -from .helpers import WriteMixin -class Counter(WriteMixin, Infinite): - message = '' - hide_cursor = True - +class Counter(Infinite): def update(self): self.write(str(self.index)) -class Countdown(WriteMixin, Progress): - hide_cursor = True - +class Countdown(Progress): def update(self): self.write(str(self.remaining)) -class Stack(WriteMixin, Progress): +class Stack(Progress): phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') - hide_cursor = True def update(self): nphases = len(self.phases) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/helpers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/helpers.py deleted file mode 100644 index 0cde44ec..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/helpers.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import print_function - - -HIDE_CURSOR = '\x1b[?25l' -SHOW_CURSOR = '\x1b[?25h' - - -class WriteMixin(object): - hide_cursor = False - - def __init__(self, message=None, **kwargs): - super(WriteMixin, self).__init__(**kwargs) - self._width = 0 - if message: - self.message = message - - if self.file and self.file.isatty(): - if self.hide_cursor: - print(HIDE_CURSOR, end='', file=self.file) - print(self.message, end='', file=self.file) - self.file.flush() - - def write(self, s): - if self.file and self.file.isatty(): - b = '\b' * self._width - c = s.ljust(self._width) - print(b + c, end='', file=self.file) - self._width = max(self._width, len(s)) - self.file.flush() - - def finish(self): - if self.file and self.file.isatty() and self.hide_cursor: - print(SHOW_CURSOR, end='', file=self.file) - - -class WritelnMixin(object): - hide_cursor = False - - def __init__(self, message=None, **kwargs): - super(WritelnMixin, self).__init__(**kwargs) - if message: - self.message = message - - if self.file and self.file.isatty() and self.hide_cursor: - print(HIDE_CURSOR, end='', file=self.file) - - def clearln(self): - if self.file and self.file.isatty(): - print('\r\x1b[K', end='', file=self.file) - - def writeln(self, line): - if self.file and self.file.isatty(): - self.clearln() - print(line, end='', file=self.file) - self.file.flush() - - def finish(self): - if self.file and self.file.isatty(): - print(file=self.file) - if self.hide_cursor: - print(SHOW_CURSOR, end='', file=self.file) - - -from signal import signal, SIGINT -from sys import exit - - -class SigIntMixin(object): - """Registers a signal handler that calls finish on SIGINT""" - - def __init__(self, *args, **kwargs): - super(SigIntMixin, self).__init__(*args, **kwargs) - signal(SIGINT, self._sigint_handler) - - def _sigint_handler(self, signum, frame): - self.finish() - exit(0) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/spinner.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/spinner.py index 464c7b27..4e100cab 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/spinner.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/progress/spinner.py @@ -16,11 +16,9 @@ from __future__ import unicode_literals from . import Infinite -from .helpers import WriteMixin -class Spinner(WriteMixin, Infinite): - message = '' +class Spinner(Infinite): phases = ('-', '\\', '|', '/') hide_cursor = True @@ -40,5 +38,6 @@ class MoonSpinner(Spinner): class LineSpinner(Spinner): phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] + class PixelSpinner(Spinner): - phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] + phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pyparsing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pyparsing.py index bea4d9c7..7ebc7eb9 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pyparsing.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pyparsing.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # module pyparsing.py # # Copyright (c) 2003-2019 Paul T. McGuire @@ -87,14 +87,16 @@ classes inherit from. Use the docstrings for examples of how to: more complex ones - associate names with your parsed results using :class:`ParserElement.setResultsName` + - access the parsed data, which is returned as a :class:`ParseResults` + object - find some helpful expression short-cuts like :class:`delimitedList` and :class:`oneOf` - find more useful common expressions in the :class:`pyparsing_common` namespace class """ -__version__ = "2.3.1" -__versionTime__ = "09 Jan 2019 23:26 UTC" +__version__ = "2.4.7" +__versionTime__ = "30 Mar 2020 00:43 UTC" __author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" import string @@ -109,6 +111,10 @@ import pprint import traceback import types from datetime import datetime +from operator import itemgetter +import itertools +from functools import wraps +from contextlib import contextmanager try: # Python 3 @@ -124,11 +130,11 @@ except ImportError: try: # Python 3 from collections.abc import Iterable - from collections.abc import MutableMapping + from collections.abc import MutableMapping, Mapping except ImportError: # Python 2.7 from collections import Iterable - from collections import MutableMapping + from collections import MutableMapping, Mapping try: from collections import OrderedDict as _OrderedDict @@ -143,29 +149,73 @@ try: except ImportError: class SimpleNamespace: pass - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', -] +# version compatibility configuration +__compat__ = SimpleNamespace() +__compat__.__doc__ = """ + A cross-version compatibility configuration for pyparsing features that will be + released in a future version. By setting values in this configuration to True, + those features can be enabled in prior versions for compatibility development + and testing. + + - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping + of results names when an And expression is nested within an Or or MatchFirst; set to + True to enable bugfix released in pyparsing 2.3.0, or False to preserve + pre-2.3.0 handling of named results +""" +__compat__.collect_all_And_tokens = True + +__diag__ = SimpleNamespace() +__diag__.__doc__ = """ +Diagnostic configuration (all default to False) + - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results + name is defined on a MatchFirst or Or expression with one or more And subexpressions + (only warns if __compat__.collect_all_And_tokens is False) + - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results + name is defined on a containing expression with ungrouped subexpressions that also + have results names + - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined + with a results name, but has no contents defined + - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is + incorrectly called with multiple str arguments + - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent + calls to ParserElement.setName() +""" +__diag__.warn_multiple_tokens_in_named_alternation = False +__diag__.warn_ungrouped_named_tokens_in_collection = False +__diag__.warn_name_set_on_empty_Forward = False +__diag__.warn_on_multiple_string_args_to_oneof = False +__diag__.enable_debug_on_named_expressions = False +__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")] + +def _enable_all_warnings(): + __diag__.warn_multiple_tokens_in_named_alternation = True + __diag__.warn_ungrouped_named_tokens_in_collection = True + __diag__.warn_name_set_on_empty_Forward = True + __diag__.warn_on_multiple_string_args_to_oneof = True +__diag__.enable_all_warnings = _enable_all_warnings + + +__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__', + 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', + 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', + 'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', + 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', + 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', + 'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', + 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', + 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', + 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', + 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', + 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', + 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', + 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', + 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', + 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', + 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', + 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass', + 'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', + 'conditionAsParseAction', 're', + ] system_version = tuple(sys.version_info)[:3] PY_3 = system_version[0] == 3 @@ -190,7 +240,7 @@ else: < returns the unicode object | encodes it with the default encoding | ... >. """ - if isinstance(obj,unicode): + if isinstance(obj, unicode): return obj try: @@ -208,9 +258,10 @@ else: # build list of single arg builtins, tolerant of Python version, that can be used as parse actions singleArgBuiltins = [] import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): try: - singleArgBuiltins.append(getattr(__builtin__,fname)) + singleArgBuiltins.append(getattr(__builtin__, fname)) except AttributeError: continue @@ -221,23 +272,36 @@ def _xml_escape(data): # ampersand must be replaced first from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): + to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split()) + for from_, to_ in zip(from_symbols, to_symbols): data = data.replace(from_, to_) return data -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) printables = "".join(c for c in string.printable if c not in string.whitespace) + +def conditionAsParseAction(fn, message=None, fatal=False): + msg = message if message is not None else "failed user-defined condition" + exc_type = ParseFatalException if fatal else ParseException + fn = _trim_arity(fn) + + @wraps(fn) + def pa(s, l, t): + if not bool(fn(s, l, t)): + raise exc_type(s, l, msg) + + return pa + class ParseBaseException(Exception): """base exception class for all parsing runtime exceptions""" # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): + def __init__(self, pstr, loc=0, msg=None, elem=None): self.loc = loc if msg is None: self.msg = pstr @@ -256,27 +320,34 @@ class ParseBaseException(Exception): """ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - def __getattr__( self, aname ): + def __getattr__(self, aname): """supported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) + if aname == "lineno": + return lineno(self.loc, self.pstr) + elif aname in ("col", "column"): + return col(self.loc, self.pstr) + elif aname == "line": + return line(self.loc, self.pstr) else: raise AttributeError(aname) - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): + def __str__(self): + if self.pstr: + if self.loc >= len(self.pstr): + foundstr = ', found end of text' + else: + foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\') + else: + foundstr = '' + return ("%s%s (at char %d), (line:%d, col:%d)" % + (self.msg, foundstr, self.loc, self.lineno, self.column)) + def __repr__(self): return _ustr(self) - def markInputline( self, markerString = ">!<" ): + def markInputline(self, markerString=">!<"): """Extracts the exception line from the input string, and marks the location of the exception with a special symbol. """ @@ -350,7 +421,7 @@ class ParseException(ParseBaseException): callers = inspect.getinnerframes(exc.__traceback__, context=depth) seen = set() for i, ff in enumerate(callers[-depth:]): - frm = ff.frame + frm = ff[0] f_self = frm.f_locals.get('self', None) if isinstance(f_self, ParserElement): @@ -412,21 +483,21 @@ class RecursiveGrammarException(Exception): """exception thrown by :class:`ParserElement.validate` if the grammar could be improperly recursive """ - def __init__( self, parseElementList ): + def __init__(self, parseElementList): self.parseElementTrace = parseElementList - def __str__( self ): + def __str__(self): return "RecursiveGrammarException: %s" % self.parseElementTrace class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): + def __init__(self, p1, p2): + self.tup = (p1, p2) + def __getitem__(self, i): return self.tup[i] def __repr__(self): return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) + def setOffset(self, i): + self.tup = (self.tup[0], i) class ParseResults(object): """Structured parse results, to provide multiple means of access to @@ -471,7 +542,7 @@ class ParseResults(object): - month: 12 - year: 1999 """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + def __new__(cls, toklist=None, name=None, asList=True, modal=True): if isinstance(toklist, cls): return toklist retobj = object.__new__(cls) @@ -480,7 +551,7 @@ class ParseResults(object): # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance): if self.__doinit: self.__doinit = False self.__name = None @@ -501,85 +572,93 @@ class ParseResults(object): if name is not None and name: if not modal: self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency + if isinstance(name, int): + name = _ustr(name) # will always return a str, but use _ustr for consistency self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])): + if isinstance(toklist, basestring): + toklist = [toklist] if asList: - if isinstance(toklist,ParseResults): + if isinstance(toklist, ParseResults): self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0) else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0) self[name].__name = name else: try: self[name] = toklist[0] - except (KeyError,TypeError,IndexError): + except (KeyError, TypeError, IndexError): self[name] = toklist - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): + def __getitem__(self, i): + if isinstance(i, (int, slice)): return self.__toklist[i] else: if i not in self.__accumNames: return self.__tokdict[i][-1][0] else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) + return ParseResults([v[0] for v in self.__tokdict[i]]) - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + def __setitem__(self, k, v, isinstance=isinstance): + if isinstance(v, _ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k, list()) + [v] sub = v[0] - elif isinstance(k,(int,slice)): + elif isinstance(k, (int, slice)): self.__toklist[k] = v sub = v else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)] sub = v - if isinstance(sub,ParseResults): + if isinstance(sub, ParseResults): sub.__parent = wkref(self) - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) + def __delitem__(self, i): + if isinstance(i, (int, slice)): + mylen = len(self.__toklist) del self.__toklist[i] # convert int to slice if isinstance(i, int): if i < 0: i += mylen - i = slice(i, i+1) + i = slice(i, i + 1) # get removed indices removed = list(range(*i.indices(mylen))) removed.reverse() # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): + for name, occurrences in self.__tokdict.items(): for j in removed: for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) else: del self.__tokdict[i] - def __contains__( self, k ): + def __contains__(self, k): return k in self.__tokdict - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) + def __len__(self): + return len(self.__toklist) + + def __bool__(self): + return (not not self.__toklist) __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): + + def __iter__(self): + return iter(self.__toklist) + + def __reversed__(self): + return iter(self.__toklist[::-1]) + + def _iterkeys(self): if hasattr(self.__tokdict, "iterkeys"): return self.__tokdict.iterkeys() else: return iter(self.__tokdict) - def _itervalues( self ): + def _itervalues(self): return (self[k] for k in self._iterkeys()) - def _iteritems( self ): + def _iteritems(self): return ((k, self[k]) for k in self._iterkeys()) if PY_3: @@ -602,24 +681,24 @@ class ParseResults(object): iteritems = _iteritems """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - def keys( self ): + def keys(self): """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.iterkeys()) - def values( self ): + def values(self): """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.itervalues()) - def items( self ): + def items(self): """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" return list(self.iteritems()) - def haskeys( self ): + def haskeys(self): """Since keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.""" return bool(self.__tokdict) - def pop( self, *args, **kwargs): + def pop(self, *args, **kwargs): """ Removes and returns item at specified index (default= ``last``). Supports both ``list`` and ``dict`` semantics for ``pop()``. If @@ -658,14 +737,14 @@ class ParseResults(object): """ if not args: args = [-1] - for k,v in kwargs.items(): + for k, v in kwargs.items(): if k == 'default': args = (args[0], v) else: raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): + if (isinstance(args[0], int) + or len(args) == 1 + or args[0] in self): index = args[0] ret = self[index] del self[index] @@ -697,7 +776,7 @@ class ParseResults(object): else: return defaultValue - def insert( self, index, insStr ): + def insert(self, index, insStr): """ Inserts new element at location index in the list of parsed tokens. @@ -714,11 +793,11 @@ class ParseResults(object): """ self.__toklist.insert(index, insStr) # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): + for name, occurrences in self.__tokdict.items(): for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - def append( self, item ): + def append(self, item): """ Add single element to end of ParseResults list of elements. @@ -733,7 +812,7 @@ class ParseResults(object): """ self.__toklist.append(item) - def extend( self, itemseq ): + def extend(self, itemseq): """ Add sequence of elements to end of ParseResults list of elements. @@ -748,78 +827,70 @@ class ParseResults(object): print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' """ if isinstance(itemseq, ParseResults): - self += itemseq + self.__iadd__(itemseq) else: self.__toklist.extend(itemseq) - def clear( self ): + def clear(self): """ Clear all elements and results names. """ del self.__toklist[:] self.__tokdict.clear() - def __getattr__( self, name ): + def __getattr__(self, name): try: return self[name] except KeyError: return "" - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): + def __add__(self, other): ret = self.copy() ret += other return ret - def __iadd__( self, other ): + def __iadd__(self, other): if other.__tokdict: offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset + addoffset = lambda a: offset if a < 0 else a + offset otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: + otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) + for k, vlist in otheritems for v in vlist] + for k, v in otherdictitems: self[k] = v - if isinstance(v[0],ParseResults): + if isinstance(v[0], ParseResults): v[0].__parent = wkref(self) self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) + self.__accumNames.update(other.__accumNames) return self def __radd__(self, other): - if isinstance(other,int) and other == 0: + if isinstance(other, int) and other == 0: # useful for merging many ParseResults using sum() builtin return self.copy() else: # this may raise a TypeError - so be it return other + self - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + def __repr__(self): + return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict)) - def __str__( self ): + def __str__(self): return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - def _asStringList( self, sep='' ): + def _asStringList(self, sep=''): out = [] for item in self.__toklist: if out and sep: out.append(sep) - if isinstance( item, ParseResults ): + if isinstance(item, ParseResults): out += item._asStringList() else: - out.append( _ustr(item) ) + out.append(_ustr(item)) return out - def asList( self ): + def asList(self): """ Returns the parse results as a nested list of matching tokens, all converted to strings. @@ -834,9 +905,9 @@ class ParseResults(object): result_list = result.asList() print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist] - def asDict( self ): + def asDict(self): """ Returns the named parse results as a nested dictionary. @@ -870,27 +941,27 @@ class ParseResults(object): else: return obj - return dict((k,toItem(v)) for k,v in item_fn()) + return dict((k, toItem(v)) for k, v in item_fn()) - def copy( self ): + def copy(self): """ Returns a new copy of a :class:`ParseResults` object. """ - ret = ParseResults( self.__toklist ) + ret = ParseResults(self.__toklist) ret.__tokdict = dict(self.__tokdict.items()) ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) + ret.__accumNames.update(self.__accumNames) ret.__name = self.__name return ret - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True): """ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. """ nl = "\n" out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) + namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items() + for v in vlist) nextLevelIndent = indent + " " # collapse out indents if formatting is not desired @@ -912,20 +983,20 @@ class ParseResults(object): else: selfTag = "ITEM" - out += [ nl, indent, "<", selfTag, ">" ] + out += [nl, indent, "<", selfTag, ">"] - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): + for i, res in enumerate(self.__toklist): + if isinstance(res, ParseResults): if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] + out += [res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] + out += [res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] else: # individual token, see if there is a name for it resTag = None @@ -937,16 +1008,16 @@ class ParseResults(object): else: resTag = "ITEM" xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "</", resTag, ">" ] + out += [nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">"] - out += [ nl, indent, "</", selfTag, ">" ] + out += [nl, indent, "</", selfTag, ">"] return "".join(out) - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: + def __lookup(self, sub): + for k, vlist in self.__tokdict.items(): + for v, loc in vlist: if sub is v: return k return None @@ -984,14 +1055,14 @@ class ParseResults(object): return par.__lookup(self) else: return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + elif (len(self) == 1 + and len(self.__tokdict) == 1 + and next(iter(self.__tokdict.values()))[0][1] in (0, -1)): return next(iter(self.__tokdict.keys())) else: return None - def dump(self, indent='', depth=0, full=True): + def dump(self, indent='', full=True, include_list=True, _depth=0): """ Diagnostic method for listing out the contents of a :class:`ParseResults`. Accepts an optional ``indent`` argument so @@ -1014,28 +1085,45 @@ class ParseResults(object): """ out = [] NL = '\n' - out.append( indent+_ustr(self.asList()) ) + if include_list: + out.append(indent + _ustr(self.asList())) + else: + out.append('') + if full: if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: if out: out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): + out.append("%s%s- %s: " % (indent, (' ' * _depth), k)) + if isinstance(v, ParseResults): if v: - out.append( v.dump(indent,depth+1) ) + out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1)) else: out.append(_ustr(v)) else: out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): + elif any(isinstance(vv, ParseResults) for vv in self): v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + vv.dump(indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1))) else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + out.append("\n%s%s[%d]:\n%s%s%s" % (indent, + (' ' * (_depth)), + i, + indent, + (' ' * (_depth + 1)), + _ustr(vv))) return "".join(out) @@ -1068,18 +1156,15 @@ class ParseResults(object): # add support for pickle protocol def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) + return (self.__toklist, + (self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name)) - def __setstate__(self,state): + def __setstate__(self, state): self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] + self.__tokdict, par, inAccumNames, self.__name = state[1] self.__accumNames = {} self.__accumNames.update(inAccumNames) if par is not None: @@ -1091,11 +1176,39 @@ class ParseResults(object): return self.__toklist, self.__name, self.__asList, self.__modal def __dir__(self): - return (dir(type(self)) + list(self.keys())) + return dir(type(self)) + list(self.keys()) + + @classmethod + def from_dict(cls, other, name=None): + """ + Helper classmethod to construct a ParseResults from a dict, preserving the + name-value relations as results names. If an optional 'name' argument is + given, a nested ParseResults will be returned + """ + def is_iterable(obj): + try: + iter(obj) + except Exception: + return False + else: + if PY_3: + return not isinstance(obj, (str, bytes)) + else: + return not isinstance(obj, basestring) + + ret = cls([]) + for k, v in other.items(): + if isinstance(v, Mapping): + ret += cls.from_dict(v, name=k) + else: + ret += cls([v], name=k, asList=is_iterable(v)) + if name is not None: + ret = cls([ret], name=name) + return ret MutableMapping.register(ParseResults) -def col (loc,strg): +def col (loc, strg): """Returns current column within a string, counting newlines as line separators. The first column is number 1. @@ -1107,9 +1220,9 @@ def col (loc,strg): location, and line and column positions within the parsed string. """ s = strg - return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) -def lineno(loc,strg): +def lineno(loc, strg): """Returns current line number within a string, counting newlines as line separators. The first line is number 1. @@ -1119,26 +1232,26 @@ def lineno(loc,strg): suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ - return strg.count("\n",0,loc) + 1 + return strg.count("\n", 0, loc) + 1 -def line( loc, strg ): +def line(loc, strg): """Returns the line of text containing loc within a string, counting newlines as line separators. """ lastCR = strg.rfind("\n", 0, loc) nextCR = strg.find("\n", loc) if nextCR >= 0: - return strg[lastCR+1:nextCR] + return strg[lastCR + 1:nextCR] else: - return strg[lastCR+1:] + return strg[lastCR + 1:] -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) +def _defaultStartDebugAction(instring, loc, expr): + print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)))) -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) +def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks): + print("Matched " + _ustr(expr) + " -> " + str(toks.asList())) -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) +def _defaultExceptionDebugAction(instring, loc, expr, exc): + print("Exception raised:" + _ustr(exc)) def nullDebugAction(*args): """'Do-nothing' debug action, to suppress debugging output during parsing.""" @@ -1169,16 +1282,16 @@ def nullDebugAction(*args): 'decorator to trim function calls to match the arity of the target' def _trim_arity(func, maxargs=2): if func in singleArgBuiltins: - return lambda s,l,t: func(t) + return lambda s, l, t: func(t) limit = [0] foundArity = [False] # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): + if system_version[:2] >= (3, 5): def extract_stack(limit=0): # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + offset = -3 if system_version == (3, 5, 0) else -2 + frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset] return [frame_summary[:2]] def extract_tb(tb, limit=0): frames = traceback.extract_tb(tb, limit=limit) @@ -1195,7 +1308,7 @@ def _trim_arity(func, maxargs=2): # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF) def wrapper(*args): while 1: @@ -1213,7 +1326,10 @@ def _trim_arity(func, maxargs=2): if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: raise finally: - del tb + try: + del tb + except NameError: + pass if limit[0] <= maxargs: limit[0] += 1 @@ -1231,13 +1347,14 @@ def _trim_arity(func, maxargs=2): return wrapper + class ParserElement(object): """Abstract base level parser element class.""" DEFAULT_WHITE_CHARS = " \n\t\r" verbose_stacktrace = False @staticmethod - def setDefaultWhitespaceChars( chars ): + def setDefaultWhitespaceChars(chars): r""" Overrides the default whitespace chars @@ -1274,10 +1391,16 @@ class ParserElement(object): """ ParserElement._literalStringClass = cls - def __init__( self, savelist=False ): + @classmethod + def _trim_traceback(cls, tb): + while tb.tb_next: + tb = tb.tb_next + return tb + + def __init__(self, savelist=False): self.parseAction = list() self.failAction = None - #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + # ~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall self.strRepr = None self.resultsName = None self.saveAsList = savelist @@ -1292,12 +1415,12 @@ class ParserElement(object): self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index self.errmsg = "" self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions + self.debugActions = (None, None, None) # custom debug actions self.re = None self.callPreparse = True # used to avoid redundant calls to preParse self.callDuringTry = False - def copy( self ): + def copy(self): """ Make a copy of this :class:`ParserElement`. Useful for defining different parse actions for the same parsing pattern, using copies of @@ -1306,8 +1429,8 @@ class ParserElement(object): Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) @@ -1317,16 +1440,16 @@ class ParserElement(object): Equivalent form of ``expr.copy()`` is just ``expr()``:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") """ - cpy = copy.copy( self ) + cpy = copy.copy(self) cpy.parseAction = self.parseAction[:] cpy.ignoreExprs = self.ignoreExprs[:] if self.copyDefaultWhiteChars: cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS return cpy - def setName( self, name ): + def setName(self, name): """ Define name for this expression, makes debugging and exception messages clearer. @@ -1337,11 +1460,11 @@ class ParserElement(object): """ self.name = name self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg + if __diag__.enable_debug_on_named_expressions: + self.setDebug() return self - def setResultsName( self, name, listAllMatches=False ): + def setResultsName(self, name, listAllMatches=False): """ Define name for referencing matching tokens as a nested attribute of the returned parse results. @@ -1362,15 +1485,18 @@ class ParserElement(object): # equivalent form: date_str = integer("year") + '/' + integer("month") + '/' + integer("day") """ + return self._setResultsName(name, listAllMatches) + + def _setResultsName(self, name, listAllMatches=False): newself = self.copy() if name.endswith("*"): name = name[:-1] - listAllMatches=True + listAllMatches = True newself.resultsName = name newself.modalResults = not listAllMatches return newself - def setBreak(self,breakFlag = True): + def setBreak(self, breakFlag=True): """Method to invoke the Python pdb debugger when this element is about to be parsed. Set ``breakFlag`` to True to enable, False to disable. @@ -1379,20 +1505,21 @@ class ParserElement(object): _parseMethod = self._parse def breaker(instring, loc, doActions=True, callPreParse=True): import pdb + # this call to pdb.set_trace() is intentional, not a checkin error pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) + return _parseMethod(instring, loc, doActions, callPreParse) breaker._originalParseMethod = _parseMethod self._parse = breaker else: - if hasattr(self._parse,"_originalParseMethod"): + if hasattr(self._parse, "_originalParseMethod"): self._parse = self._parse._originalParseMethod return self - def setParseAction( self, *fns, **kwargs ): + def setParseAction(self, *fns, **kwargs): """ Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as ``fn(s,loc,toks)`` , - ``fn(loc,toks)`` , ``fn(toks)`` , or just ``fn()`` , where: + Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` , + ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: - s = the original string being parsed (see note below) - loc = the location of the matching substring @@ -1402,8 +1529,11 @@ class ParserElement(object): value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. + If None is passed as the parse action, all previously added parse actions for this + expression are cleared. + Optional keyword arguments: - - callDuringTry = (default= ``False`` ) indicate if parse action should be run during lookaheads and alternate testing + - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See :class:`parseString for more @@ -1425,11 +1555,16 @@ class ParserElement(object): # note that integer fields are now ints, not strings date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) + if list(fns) == [None,]: + self.parseAction = [] + else: + if not all(callable(fn) for fn in fns): + raise TypeError("parse actions must be callable") + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) return self - def addParseAction( self, *fns, **kwargs ): + def addParseAction(self, *fns, **kwargs): """ Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`. @@ -1457,21 +1592,17 @@ class ParserElement(object): result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException for fn in fns: - fn = _trim_arity(fn) - def pa(s,l,t): - if not bool(fn(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) + self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'), + fatal=kwargs.get('fatal', False))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self - def setFailAction( self, fn ): + def setFailAction(self, fn): """Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments - ``fn(s,loc,expr,err)`` where: + ``fn(s, loc, expr, err)`` where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed @@ -1481,22 +1612,22 @@ class ParserElement(object): self.failAction = fn return self - def _skipIgnorables( self, instring, loc ): + def _skipIgnorables(self, instring, loc): exprsFound = True while exprsFound: exprsFound = False for e in self.ignoreExprs: try: while 1: - loc,dummy = e._parse( instring, loc ) + loc, dummy = e._parse(instring, loc) exprsFound = True except ParseException: pass return loc - def preParse( self, instring, loc ): + def preParse(self, instring, loc): if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) + loc = self._skipIgnorables(instring, loc) if self.skipWhitespace: wt = self.whiteChars @@ -1506,101 +1637,105 @@ class ParserElement(object): return loc - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): return loc, [] - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): return tokenlist - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) + # ~ @profile + def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): + TRY, MATCH, FAIL = 0, 1, 2 + debugging = (self.debug) # and doActions) if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc + # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring))) + if self.debugActions[TRY]: + self.debugActions[TRY](instring, loc, self) try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) + if callPreParse and self.callPreparse: + preloc = self.preParse(instring, loc) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except IndexError: + raise ParseException(instring, len(instring), self.errmsg, self) + else: + loc, tokens = self.parseImpl(instring, preloc, doActions) + except Exception as err: + # ~ print ("Exception raised:", err) + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) if self.failAction: - self.failAction( instring, tokensStart, self, err ) + self.failAction(instring, tokensStart, self, err) raise else: if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) + preloc = self.preParse(instring, loc) else: preloc = loc tokensStart = preloc if self.mayIndexError or preloc >= len(instring): try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) + loc, tokens = self.parseImpl(instring, preloc, doActions) except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) + raise ParseException(instring, len(instring), self.errmsg, self) else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) + loc, tokens = self.parseImpl(instring, preloc, doActions) - tokens = self.postParse( instring, loc, tokens ) + tokens = self.postParse(instring, loc, tokens) - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults) if self.parseAction and (doActions or self.callDuringTry): if debugging: try: for fn in self.parseAction: try: - tokens = fn( instring, tokensStart, retTokens ) + tokens = fn(instring, tokensStart, retTokens) except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") exc.__cause__ = parse_action_exc raise exc if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, + retTokens = ParseResults(tokens, self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) + except Exception as err: + # ~ print "Exception raised in user parse action:", err + if self.debugActions[FAIL]: + self.debugActions[FAIL](instring, tokensStart, self, err) raise else: for fn in self.parseAction: try: - tokens = fn( instring, tokensStart, retTokens ) + tokens = fn(instring, tokensStart, retTokens) except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") exc.__cause__ = parse_action_exc raise exc if tokens is not None and tokens is not retTokens: - retTokens = ParseResults( tokens, + retTokens = ParseResults(tokens, self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) + asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), + modal=self.modalResults) if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + # ~ print ("Matched", self, "->", retTokens.asList()) + if self.debugActions[MATCH]: + self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens) return loc, retTokens - def tryParse( self, instring, loc ): + def tryParse(self, instring, loc): try: - return self._parse( instring, loc, doActions=False )[0] + return self._parse(instring, loc, doActions=False)[0] except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) + raise ParseException(instring, loc, self.errmsg, self) def canParseNext(self, instring, loc): try: @@ -1697,7 +1832,7 @@ class ParserElement(object): # this method gets repeatedly called during backtracking with the same arguments - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + def _parseCache(self, instring, loc, doActions=True, callPreParse=True): HIT, MISS = 0, 1 lookup = (self, instring, loc, callPreParse, doActions) with ParserElement.packrat_cache_lock: @@ -1718,7 +1853,7 @@ class ParserElement(object): ParserElement.packrat_cache_stats[HIT] += 1 if isinstance(value, Exception): raise value - return (value[0], value[1].copy()) + return value[0], value[1].copy() _parse = _parseNoCache @@ -1763,12 +1898,16 @@ class ParserElement(object): ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) ParserElement._parse = ParserElement._parseCache - def parseString( self, instring, parseAll=False ): + def parseString(self, instring, parseAll=False): """ Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. + Returns the parsed data as a :class:`ParseResults` object, which may be + accessed as a list, or as a dict or object with attributes if the given parser + includes results names. + If you want the grammar to require that the entire input string be successfully parsed, then set ``parseAll`` to True (equivalent to ending the grammar with ``StringEnd()``). @@ -1782,7 +1921,7 @@ class ParserElement(object): - calling ``parseWithTabs`` on your grammar before calling ``parseString`` (see :class:`parseWithTabs`) - - define your parse action using the full ``(s,loc,toks)`` signature, and + - define your parse action using the full ``(s, loc, toks)`` signature, and reference the input string using the parse action's ``s`` argument - explictly expand the tabs in your input string before calling ``parseString`` @@ -1795,27 +1934,29 @@ class ParserElement(object): ParserElement.resetCache() if not self.streamlined: self.streamline() - #~ self.saveAsList = True + # ~ self.saveAsList = True for e in self.ignoreExprs: e.streamline() if not self.keepTabs: instring = instring.expandtabs() try: - loc, tokens = self._parse( instring, 0 ) + loc, tokens = self._parse(instring, 0) if parseAll: - loc = self.preParse( instring, loc ) + loc = self.preParse(instring, loc) se = Empty() + StringEnd() - se._parse( instring, loc ) + se._parse(instring, loc) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc else: return tokens - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + def scanString(self, instring, maxMatches=_MAX_INT, overlap=False): """ Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional @@ -1830,7 +1971,7 @@ class ParserElement(object): source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) - for tokens,start,end in Word(alphas).scanString(source): + for tokens, start, end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) @@ -1862,16 +2003,16 @@ class ParserElement(object): try: while loc <= instrlen and matches < maxMatches: try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + preloc = preparseFn(instring, loc) + nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) except ParseException: - loc = preloc+1 + loc = preloc + 1 else: if nextLoc > loc: matches += 1 yield tokens, preloc, nextLoc if overlap: - nextloc = preparseFn( instring, loc ) + nextloc = preparseFn(instring, loc) if nextloc > loc: loc = nextLoc else: @@ -1879,15 +2020,17 @@ class ParserElement(object): else: loc = nextLoc else: - loc = preloc+1 + loc = preloc + 1 except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def transformString( self, instring ): + def transformString(self, instring): """ Extension to :class:`scanString`, to modify matching text with modified tokens that may be returned from a parse action. To use ``transformString``, define a grammar and @@ -1913,27 +2056,29 @@ class ParserElement(object): # keep string locs straight between transformString and scanString self.keepTabs = True try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) + for t, s, e in self.scanString(instring): + out.append(instring[lastE:s]) if t: - if isinstance(t,ParseResults): + if isinstance(t, ParseResults): out += t.asList() - elif isinstance(t,list): + elif isinstance(t, list): out += t else: out.append(t) lastE = e out.append(instring[lastE:]) out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) + return "".join(map(_ustr, _flatten(out))) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def searchString( self, instring, maxMatches=_MAX_INT ): + def searchString(self, instring, maxMatches=_MAX_INT): """ Another extension to :class:`scanString`, simplifying the access to the tokens found to match the given parse expression. May be called with optional @@ -1955,12 +2100,14 @@ class ParserElement(object): ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] """ try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)]) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): @@ -1981,14 +2128,14 @@ class ParserElement(object): """ splits = 0 last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): + for t, s, e in self.scanString(instring, maxMatches=maxsplit): yield instring[last:s] if includeSeparators: yield t[0] last = e yield instring[last:] - def __add__(self, other ): + def __add__(self, other): """ Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement converts them to :class:`Literal`s by default. @@ -2002,24 +2149,42 @@ class ParserElement(object): prints:: Hello, World! -> ['Hello', ',', 'World', '!'] + + ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. + + Literal('start') + ... + Literal('end') + + is equivalent to: + + Literal('start') + SkipTo('end')("_skipped*") + Literal('end') + + Note that the skipped text is returned with '_skipped' as a results name, + and to support having multiple skips in the same parser, the value returned is + a list of all skipped text. """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return _PendingSkip(self) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return And( [ self, other ] ) + return And([self, other]) - def __radd__(self, other ): + def __radd__(self, other): """ Implementation of + operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return SkipTo(self)("_skipped*") + self + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other + self @@ -2027,64 +2192,70 @@ class ParserElement(object): """ Implementation of - operator, returns :class:`And` with error stop """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return self + And._ErrorStop() + other - def __rsub__(self, other ): + def __rsub__(self, other): """ Implementation of - operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other - self - def __mul__(self,other): + def __mul__(self, other): """ Implementation of * operator, allows use of ``expr * 3`` in place of ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer - tuple, similar to ``{min,max}`` multipliers in regular expressions. Tuples + tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples may also include ``None`` as in: - - ``expr*(n,None)`` or ``expr*(n,)`` is equivalent + - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent to ``expr*n + ZeroOrMore(expr)`` (read as "at least n instances of ``expr``") - - ``expr*(None,n)`` is equivalent to ``expr*(0,n)`` + - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` (read as "0 to n instances of ``expr``") - - ``expr*(None,None)`` is equivalent to ``ZeroOrMore(expr)`` - - ``expr*(1,None)`` is equivalent to ``OneOrMore(expr)`` + - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` + - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` - Note that ``expr*(None,n)`` does not raise an exception if + Note that ``expr*(None, n)`` does not raise an exception if more than n exprs exist in the input stream; that is, - ``expr*(None,n)`` does not enforce a maximum number of expr + ``expr*(None, n)`` does not enforce a maximum number of expr occurrences. If this behavior is desired, then write - ``expr*(None,n) + ~expr`` + ``expr*(None, n) + ~expr`` """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): + if other is Ellipsis: + other = (0, None) + elif isinstance(other, tuple) and other[:1] == (Ellipsis,): + other = ((0, ) + other[1:] + (None,))[:2] + + if isinstance(other, int): + minElements, optElements = other, 0 + elif isinstance(other, tuple): + other = tuple(o if o is not Ellipsis else None for o in other) other = (other + (None, None))[:2] if other[0] is None: other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: + if isinstance(other[0], int) and other[1] is None: if other[0] == 0: return ZeroOrMore(self) if other[0] == 1: return OneOrMore(self) else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): + return self * other[0] + ZeroOrMore(self) + elif isinstance(other[0], int) and isinstance(other[1], int): minElements, optElements = other optElements -= minElements else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1])) else: raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) @@ -2093,108 +2264,152 @@ class ParserElement(object): if optElements < 0: raise ValueError("second tuple value must be greater or equal to first tuple value") if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + raise ValueError("cannot multiply ParserElement by 0 or (0, 0)") - if (optElements): + if optElements: def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) + if n > 1: + return Optional(self + makeOptionalList(n - 1)) else: return Optional(self) if minElements: if minElements == 1: ret = self + makeOptionalList(optElements) else: - ret = And([self]*minElements) + makeOptionalList(optElements) + ret = And([self] * minElements) + makeOptionalList(optElements) else: ret = makeOptionalList(optElements) else: if minElements == 1: ret = self else: - ret = And([self]*minElements) + ret = And([self] * minElements) return ret def __rmul__(self, other): return self.__mul__(other) - def __or__(self, other ): + def __or__(self, other): """ Implementation of | operator - returns :class:`MatchFirst` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if other is Ellipsis: + return _PendingSkip(self, must_skip=True) + + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return MatchFirst( [ self, other ] ) + return MatchFirst([self, other]) - def __ror__(self, other ): + def __ror__(self, other): """ Implementation of | operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other | self - def __xor__(self, other ): + def __xor__(self, other): """ Implementation of ^ operator - returns :class:`Or` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return Or( [ self, other ] ) + return Or([self, other]) - def __rxor__(self, other ): + def __rxor__(self, other): """ Implementation of ^ operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other ^ self - def __and__(self, other ): + def __and__(self, other): """ Implementation of & operator - returns :class:`Each` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None - return Each( [ self, other ] ) + return Each([self, other]) - def __rand__(self, other ): + def __rand__(self, other): """ Implementation of & operator when left operand is not a :class:`ParserElement` """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): + if isinstance(other, basestring): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) return None return other & self - def __invert__( self ): + def __invert__(self): """ Implementation of ~ operator - returns :class:`NotAny` """ - return NotAny( self ) + return NotAny(self) + + def __iter__(self): + # must implement __iter__ to override legacy use of sequential access to __getitem__ to + # iterate over a sequence + raise TypeError('%r object is not iterable' % self.__class__.__name__) + + def __getitem__(self, key): + """ + use ``[]`` indexing notation as a short form for expression repetition: + - ``expr[n]`` is equivalent to ``expr*n`` + - ``expr[m, n]`` is equivalent to ``expr*(m, n)`` + - ``expr[n, ...]`` or ``expr[n,]`` is equivalent + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") + - ``expr[..., n]`` is equivalent to ``expr*(0, n)`` + (read as "0 to n instances of ``expr``") + - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)`` + - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)`` + ``None`` may be used in place of ``...``. + + Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception + if more than ``n`` ``expr``s exist in the input stream. If this behavior is + desired, then write ``expr[..., n] + ~expr``. + """ + + # convert single arg keys to tuples + try: + if isinstance(key, str): + key = (key,) + iter(key) + except TypeError: + key = (key, key) + + if len(key) > 2: + warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5], + '... [{0}]'.format(len(key)) + if len(key) > 5 else '')) + + # clip to 2 elements + ret = self * tuple(key[:2]) + return ret def __call__(self, name=None): """ @@ -2208,22 +2423,22 @@ class ParserElement(object): Example:: # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums + "-")("socsecno") """ if name is not None: - return self.setResultsName(name) + return self._setResultsName(name) else: return self.copy() - def suppress( self ): + def suppress(self): """ Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from cluttering up returned output. """ - return Suppress( self ) + return Suppress(self) - def leaveWhitespace( self ): + def leaveWhitespace(self): """ Disables the skipping of whitespace before matching the characters in the :class:`ParserElement`'s defined pattern. This is normally only used internally by @@ -2232,7 +2447,7 @@ class ParserElement(object): self.skipWhitespace = False return self - def setWhitespaceChars( self, chars ): + def setWhitespaceChars(self, chars): """ Overrides the default whitespace chars """ @@ -2241,7 +2456,7 @@ class ParserElement(object): self.copyDefaultWhiteChars = False return self - def parseWithTabs( self ): + def parseWithTabs(self): """ Overrides default behavior to expand ``<TAB>``s to spaces before parsing the input string. Must be called before ``parseString`` when the input grammar contains elements that @@ -2250,7 +2465,7 @@ class ParserElement(object): self.keepTabs = True return self - def ignore( self, other ): + def ignore(self, other): """ Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other @@ -2267,14 +2482,14 @@ class ParserElement(object): if isinstance(other, basestring): other = Suppress(other) - if isinstance( other, Suppress ): + if isinstance(other, Suppress): if other not in self.ignoreExprs: self.ignoreExprs.append(other) else: - self.ignoreExprs.append( Suppress( other.copy() ) ) + self.ignoreExprs.append(Suppress(other.copy())) return self - def setDebugActions( self, startAction, successAction, exceptionAction ): + def setDebugActions(self, startAction, successAction, exceptionAction): """ Enable display of debugging messages while doing pattern matching. """ @@ -2284,7 +2499,7 @@ class ParserElement(object): self.debug = True return self - def setDebug( self, flag=True ): + def setDebug(self, flag=True): """ Enable display of debugging messages while doing pattern matching. Set ``flag`` to True to enable, False to disable. @@ -2322,32 +2537,32 @@ class ParserElement(object): name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``. """ if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction) else: self.debug = False return self - def __str__( self ): + def __str__(self): return self.name - def __repr__( self ): + def __repr__(self): return _ustr(self) - def streamline( self ): + def streamline(self): self.streamlined = True self.strRepr = None return self - def checkRecursion( self, parseElementList ): + def checkRecursion(self, parseElementList): pass - def validate( self, validateTrace=[] ): + def validate(self, validateTrace=None): """ Check defined expressions for valid structure, check for infinite recursive definitions. """ - self.checkRecursion( [] ) + self.checkRecursion([]) - def parseFile( self, file_or_filename, parseAll=False ): + def parseFile(self, file_or_filename, parseAll=False): """ Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), @@ -2364,27 +2579,30 @@ class ParserElement(object): if ParserElement.verbose_stacktrace: raise else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) raise exc - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) + def __eq__(self, other): + if self is other: + return True elif isinstance(other, basestring): return self.matches(other) - else: - return super(ParserElement,self)==other + elif isinstance(other, ParserElement): + return vars(self) == vars(other) + return False - def __ne__(self,other): + def __ne__(self, other): return not (self == other) def __hash__(self): - return hash(id(self)) + return id(self) - def __req__(self,other): + def __req__(self, other): return self == other - def __rne__(self,other): + def __rne__(self, other): return not (self == other) def matches(self, testString, parseAll=True): @@ -2408,7 +2626,8 @@ class ParserElement(object): return False def runTests(self, tests, parseAll=True, comment='#', - fullDump=True, printResults=True, failureTests=False, postParse=None): + fullDump=True, printResults=True, failureTests=False, postParse=None, + file=None): """ Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to @@ -2425,6 +2644,8 @@ class ParserElement(object): - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing - postParse - (default= ``None``) optional callback for successful parse results; called as `fn(test_string, parse_results)` and returns a string to be added to the test output + - file - (default=``None``) optional file-like object to which test output will be written; + if None, will default to ``sys.stdout`` Returns: a (success, results) tuple, where success indicates that all tests succeeded (or failed if ``failureTests`` is True), and the results contain a list of lines of each @@ -2504,37 +2725,34 @@ class ParserElement(object): tests = list(map(str.strip, tests.rstrip().splitlines())) if isinstance(comment, basestring): comment = Literal(comment) + if file is None: + file = sys.stdout + print_ = file.write + allResults = [] comments = [] success = True + NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString) + BOM = u'\ufeff' for t in tests: if comment is not None and comment.matches(t, False) or comments and not t: comments.append(t) continue if not t: continue - out = ['\n'.join(comments), t] + out = ['\n' + '\n'.join(comments) if comments else '', t] comments = [] try: # convert newline marks to actual newlines, and strip leading BOM if present - t = t.replace(r'\n','\n').lstrip('\ufeff') + t = NL.transformString(t.lstrip(BOM)) result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - if postParse is not None: - try: - pp_value = postParse(t, result) - if pp_value is not None: - out.append(str(pp_value)) - except Exception as e: - out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) except ParseBaseException as pe: fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" if '\n' in t: out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal) else: - out.append(' '*pe.loc + '^' + fatal) + out.append(' ' * pe.loc + '^' + fatal) out.append("FAIL: " + str(pe)) success = success and failureTests result = pe @@ -2542,30 +2760,80 @@ class ParserElement(object): out.append("FAIL-EXCEPTION: " + str(exc)) success = success and failureTests result = exc + else: + success = success and not failureTests + if postParse is not None: + try: + pp_value = postParse(t, result) + if pp_value is not None: + if isinstance(pp_value, ParseResults): + out.append(pp_value.dump()) + else: + out.append(str(pp_value)) + else: + out.append(result.dump()) + except Exception as e: + out.append(result.dump(full=fullDump)) + out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) + else: + out.append(result.dump(full=fullDump)) if printResults: if fullDump: out.append('') - print('\n'.join(out)) + print_('\n'.join(out)) allResults.append((t, result)) return success, allResults +class _PendingSkip(ParserElement): + # internal placeholder class to hold a place were '...' is added to a parser element, + # once another ParserElement is added, this placeholder will be replaced with a SkipTo + def __init__(self, expr, must_skip=False): + super(_PendingSkip, self).__init__() + self.strRepr = str(expr + Empty()).replace('Empty', '...') + self.name = self.strRepr + self.anchor = expr + self.must_skip = must_skip + + def __add__(self, other): + skipper = SkipTo(other).setName("...")("_skipped*") + if self.must_skip: + def must_skip(t): + if not t._skipped or t._skipped.asList() == ['']: + del t[0] + t.pop("_skipped", None) + def show_skip(t): + if t._skipped.asList()[-1:] == ['']: + skipped = t.pop('_skipped') + t['_skipped'] = 'missing <' + repr(self.anchor) + '>' + return (self.anchor + skipper().addParseAction(must_skip) + | skipper().addParseAction(show_skip)) + other + + return self.anchor + skipper + other + + def __repr__(self): + return self.strRepr + + def parseImpl(self, *args): + raise Exception("use of `...` expression without following SkipTo target expression") + + class Token(ParserElement): """Abstract :class:`ParserElement` subclass, for defining atomic matching patterns. """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) + def __init__(self): + super(Token, self).__init__(savelist=False) class Empty(Token): """An empty token, will always match. """ - def __init__( self ): - super(Empty,self).__init__() + def __init__(self): + super(Empty, self).__init__() self.name = "Empty" self.mayReturnEmpty = True self.mayIndexError = False @@ -2574,14 +2842,14 @@ class Empty(Token): class NoMatch(Token): """A token that will never match. """ - def __init__( self ): - super(NoMatch,self).__init__() + def __init__(self): + super(NoMatch, self).__init__() self.name = "NoMatch" self.mayReturnEmpty = True self.mayIndexError = False self.errmsg = "Unmatchable token" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): raise ParseException(instring, loc, self.errmsg, self) @@ -2599,8 +2867,8 @@ class Literal(Token): For keyword matching (force word break before and after the matched string), use :class:`Keyword` or :class:`CaselessKeyword`. """ - def __init__( self, matchString ): - super(Literal,self).__init__() + def __init__(self, matchString): + super(Literal, self).__init__() self.match = matchString self.matchLen = len(matchString) try: @@ -2614,15 +2882,22 @@ class Literal(Token): self.mayReturnEmpty = False self.mayIndexError = False - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match + # Performance tuning: modify __class__ to select + # a parseImpl optimized for single-character check + if self.matchLen == 1 and type(self) is Literal: + self.__class__ = _SingleCharLiteral + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc): + return loc + self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) + +class _SingleCharLiteral(Literal): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] == self.firstMatchChar: + return loc + 1, self.match + raise ParseException(instring, loc, self.errmsg, self) + _L = Literal ParserElement._literalStringClass = Literal @@ -2651,10 +2926,10 @@ class Keyword(Token): For case-insensitive matching, use :class:`CaselessKeyword`. """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" + DEFAULT_KEYWORD_CHARS = alphanums + "_$" - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() + def __init__(self, matchString, identChars=None, caseless=False): + super(Keyword, self).__init__() if identChars is None: identChars = Keyword.DEFAULT_KEYWORD_CHARS self.match = matchString @@ -2663,7 +2938,7 @@ class Keyword(Token): self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) self.name = '"%s"' % self.match self.errmsg = "Expected " + self.name self.mayReturnEmpty = False @@ -2674,27 +2949,32 @@ class Keyword(Token): identChars = identChars.upper() self.identChars = set(identChars) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match + if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen].upper() not in self.identChars) + and (loc == 0 + or instring[loc - 1].upper() not in self.identChars)): + return loc + self.matchLen, self.match + else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match + if instring[loc] == self.firstMatchChar: + if ((self.matchLen == 1 or instring.startswith(self.match, loc)) + and (loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen] not in self.identChars) + and (loc == 0 or instring[loc - 1] not in self.identChars)): + return loc + self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) def copy(self): - c = super(Keyword,self).copy() + c = super(Keyword, self).copy() c.identChars = Keyword.DEFAULT_KEYWORD_CHARS return c @staticmethod - def setDefaultKeywordChars( chars ): + def setDefaultKeywordChars(chars): """Overrides the default Keyword chars """ Keyword.DEFAULT_KEYWORD_CHARS = chars @@ -2710,16 +2990,16 @@ class CaselessLiteral(Literal): (Contrast with example for :class:`CaselessKeyword`.) """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) + def __init__(self, matchString): + super(CaselessLiteral, self).__init__(matchString.upper()) # Preserve the defining literal. self.returnString = matchString self.name = "'%s'" % self.returnString self.errmsg = "Expected " + self.name - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString + def parseImpl(self, instring, loc, doActions=True): + if instring[loc:loc + self.matchLen].upper() == self.match: + return loc + self.matchLen, self.returnString raise ParseException(instring, loc, self.errmsg, self) class CaselessKeyword(Keyword): @@ -2732,8 +3012,8 @@ class CaselessKeyword(Keyword): (Contrast with example for :class:`CaselessLiteral`.) """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + def __init__(self, matchString, identChars=None): + super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True) class CloseMatch(Token): """A variation on :class:`Literal` which matches "close" matches, @@ -2769,7 +3049,7 @@ class CloseMatch(Token): patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) """ def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() + super(CloseMatch, self).__init__() self.name = match_string self.match_string = match_string self.maxMismatches = maxMismatches @@ -2777,7 +3057,7 @@ class CloseMatch(Token): self.mayIndexError = False self.mayReturnEmpty = False - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): start = loc instrlen = len(instring) maxloc = start + len(self.match_string) @@ -2788,8 +3068,8 @@ class CloseMatch(Token): mismatches = [] maxMismatches = self.maxMismatches - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m + for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)): + src, mat = s_m if src != mat: mismatches.append(match_stringloc) if len(mismatches) > maxMismatches: @@ -2797,7 +3077,7 @@ class CloseMatch(Token): else: loc = match_stringloc + 1 results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string + results['original'] = match_string results['mismatches'] = mismatches return loc, results @@ -2849,7 +3129,7 @@ class Word(Token): capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') + hostname = Word(alphas, alphanums + '-') # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") @@ -2857,15 +3137,16 @@ class Word(Token): # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() + def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None): + super(Word, self).__init__() if excludeChars: + excludeChars = set(excludeChars) initChars = ''.join(c for c in initChars if c not in excludeChars) if bodyChars: bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) self.initCharsOrig = initChars self.initChars = set(initChars) - if bodyChars : + if bodyChars: self.bodyCharsOrig = bodyChars self.bodyChars = set(bodyChars) else: @@ -2893,34 +3174,28 @@ class Word(Token): self.mayIndexError = False self.asKeyword = asKeyword - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0): if self.bodyCharsOrig == self.initCharsOrig: self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" + self.reString = r"\b" + self.reString + r"\b" + try: - self.re = re.compile( self.reString ) + self.re = re.compile(self.reString) except Exception: self.re = None + else: + self.re_match = self.re.match + self.__class__ = _WordRegex - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.initChars: raise ParseException(instring, loc, self.errmsg, self) start = loc @@ -2928,17 +3203,18 @@ class Word(Token): instrlen = len(instring) bodychars = self.bodyChars maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) + maxloc = min(maxloc, instrlen) while loc < maxloc and instring[loc] in bodychars: loc += 1 throwException = False if loc - start < self.minLen: throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + elif self.asKeyword: + if (start > 0 and instring[start - 1] in bodychars + or loc < instrlen and instring[loc] in bodychars): throwException = True if throwException: @@ -2946,38 +3222,49 @@ class Word(Token): return loc, instring[start:loc] - def __str__( self ): + def __str__(self): try: - return super(Word,self).__str__() + return super(Word, self).__str__() except Exception: pass - if self.strRepr is None: def charsAsStr(s): - if len(s)>4: - return s[:4]+"..." + if len(s) > 4: + return s[:4] + "..." else: return s - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + if self.initCharsOrig != self.bodyCharsOrig: + self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig)) else: self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) return self.strRepr +class _WordRegex(Word): + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + -class Char(Word): +class Char(_WordRegex): """A short-cut class for defining ``Word(characters, exact=1)``, when defining a match of any single character in a string of characters. """ - def __init__(self, charset): - super(Char, self).__init__(charset, exact=1) - self.reString = "[%s]" % _escapeRegexRangeChars(self.initCharsOrig) - self.re = re.compile( self.reString ) + def __init__(self, charset, asKeyword=False, excludeChars=None): + super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars) + self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars)) + if asKeyword: + self.reString = r"\b%s\b" % self.reString + self.re = re.compile(self.reString) + self.re_match = self.re.match class Regex(Token): @@ -2987,26 +3274,35 @@ class Regex(Token): If the given regex contains named groups (defined using ``(?P<name>...)``), these will be preserved as named parse results. + If instead of the Python stdlib re module you wish to use a different RE module + (such as the `regex` module), you can replace it by either building your + Regex object with a compiled RE that was compiled using regex: + Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + + # use regex module instead of stdlib re module to construct a Regex using + # a compiled regular expression + import regex + parser = pp.Regex(regex.compile(r'[0-9]')) + """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): + def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): """The parameters ``pattern`` and ``flags`` are passed to the ``re.compile()`` function as-is. See the Python `re module <https://docs.python.org/3/library/re.html>`_ module for an explanation of the acceptable patterns and flags. """ - super(Regex,self).__init__() + super(Regex, self).__init__() if isinstance(pattern, basestring): if not pattern: warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) self.pattern = pattern self.flags = flags @@ -3016,46 +3312,64 @@ class Regex(Token): self.reString = self.pattern except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise - elif isinstance(pattern, Regex.compiledREtype): + elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'): self.re = pattern - self.pattern = \ - self.reString = str(pattern) + self.pattern = self.reString = pattern.pattern self.flags = flags else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") + raise TypeError("Regex may only be constructed with a string or a compiled RE object") + + self.re_match = self.re.match self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayIndexError = False - self.mayReturnEmpty = True + self.mayReturnEmpty = self.re_match("") is not None self.asGroupList = asGroupList self.asMatch = asMatch + if self.asGroupList: + self.parseImpl = self.parseImplAsGroupList + if self.asMatch: + self.parseImpl = self.parseImplAsMatch - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) + def parseImpl(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) if not result: raise ParseException(instring, loc, self.errmsg, self) loc = result.end() - if self.asMatch: - ret = result - elif self.asGroupList: - ret = result.groups() - else: - ret = ParseResults(result.group()) - d = result.groupdict() - if d: - for k, v in d.items(): - ret[k] = v - return loc,ret - - def __str__( self ): + ret = ParseResults(result.group()) + d = result.groupdict() + if d: + for k, v in d.items(): + ret[k] = v + return loc, ret + + def parseImplAsGroupList(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.groups() + return loc, ret + + def parseImplAsMatch(self, instring, loc, doActions=True): + result = self.re_match(instring, loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result + return loc, ret + + def __str__(self): try: - return super(Regex,self).__str__() + return super(Regex, self).__str__() except Exception: pass @@ -3065,7 +3379,7 @@ class Regex(Token): return self.strRepr def sub(self, repl): - """ + r""" Return Regex with an attached parse action to transform the parsed result as if called using `re.sub(expr, repl, string) <https://docs.python.org/3/library/re.html#re.sub>`_. @@ -3077,12 +3391,12 @@ class Regex(Token): """ if self.asGroupList: warnings.warn("cannot use sub() with Regex(asGroupList=True)", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise SyntaxError() if self.asMatch and callable(repl): warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise SyntaxError() if self.asMatch: @@ -3102,20 +3416,20 @@ class QuotedString(Token): - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash - (default= ``None`` ) + (default= ``None``) - escQuote - special quote sequence to escape an embedded quote string (such as SQL's ``""`` to escape an embedded ``"``) - (default= ``None`` ) + (default= ``None``) - multiline - boolean indicating whether quotes can span - multiple lines (default= ``False`` ) + multiple lines (default= ``False``) - unquoteResults - boolean indicating whether the matched text - should be unquoted (default= ``True`` ) + should be unquoted (default= ``True``) - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default= ``None`` => same as quoteChar) - convertWhitespaceEscapes - convert escaped whitespace (``'\t'``, ``'\n'``, etc.) to actual whitespace - (default= ``True`` ) + (default= ``True``) Example:: @@ -3132,13 +3446,14 @@ class QuotedString(Token): [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() + def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, + unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString, self).__init__() # remove white space from quote chars - wont work anyway quoteChar = quoteChar.strip() if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) raise SyntaxError() if endQuoteChar is None: @@ -3146,7 +3461,7 @@ class QuotedString(Token): else: endQuoteChar = endQuoteChar.strip() if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) raise SyntaxError() self.quoteChar = quoteChar @@ -3161,35 +3476,34 @@ class QuotedString(Token): if multiline: self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) else: self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '')) if len(self.endQuoteChar) > 1: self.pattern += ( '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')') + if escQuote: self.pattern += (r'|(?:%s)' % re.escape(escQuote)) if escChar: self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.escCharReplacePattern = re.escape(self.escChar) + "(.)" self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern + self.re_match = self.re.match except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) raise self.name = _ustr(self) @@ -3197,8 +3511,8 @@ class QuotedString(Token): self.mayIndexError = False self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + def parseImpl(self, instring, loc, doActions=True): + result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None if not result: raise ParseException(instring, loc, self.errmsg, self) @@ -3208,18 +3522,18 @@ class QuotedString(Token): if self.unquoteResults: # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + ret = ret[self.quoteCharLen: -self.endQuoteCharLen] - if isinstance(ret,basestring): + if isinstance(ret, basestring): # replace escaped whitespace if '\\' in ret and self.convertWhitespaceEscapes: ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', + r'\t': '\t', + r'\n': '\n', + r'\f': '\f', + r'\r': '\r', } - for wslit,wschar in ws_map.items(): + for wslit, wschar in ws_map.items(): ret = ret.replace(wslit, wschar) # replace escaped characters @@ -3232,9 +3546,9 @@ class QuotedString(Token): return loc, ret - def __str__( self ): + def __str__(self): try: - return super(QuotedString,self).__str__() + return super(QuotedString, self).__str__() except Exception: pass @@ -3264,15 +3578,14 @@ class CharsNotIn(Token): ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() + def __init__(self, notChars, min=1, max=0, exact=0): + super(CharsNotIn, self).__init__() self.skipWhitespace = False self.notChars = notChars if min < 1: - raise ValueError( - "cannot specify a minimum length < 1; use " + - "Optional(CharsNotIn()) if zero-length char group is permitted") + raise ValueError("cannot specify a minimum length < 1; use " + "Optional(CharsNotIn()) if zero-length char group is permitted") self.minLen = min @@ -3287,19 +3600,18 @@ class CharsNotIn(Token): self.name = _ustr(self) self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayReturnEmpty = (self.minLen == 0) self.mayIndexError = False - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if instring[loc] in self.notChars: raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): + maxlen = min(start + self.maxLen, len(instring)) + while loc < maxlen and instring[loc] not in notchars: loc += 1 if loc - start < self.minLen: @@ -3307,7 +3619,7 @@ class CharsNotIn(Token): return loc, instring[start:loc] - def __str__( self ): + def __str__(self): try: return super(CharsNotIn, self).__str__() except Exception: @@ -3336,30 +3648,30 @@ class White(Token): '\n': '<LF>', '\r': '<CR>', '\f': '<FF>', - 'u\00A0': '<NBSP>', - 'u\1680': '<OGHAM_SPACE_MARK>', - 'u\180E': '<MONGOLIAN_VOWEL_SEPARATOR>', - 'u\2000': '<EN_QUAD>', - 'u\2001': '<EM_QUAD>', - 'u\2002': '<EN_SPACE>', - 'u\2003': '<EM_SPACE>', - 'u\2004': '<THREE-PER-EM_SPACE>', - 'u\2005': '<FOUR-PER-EM_SPACE>', - 'u\2006': '<SIX-PER-EM_SPACE>', - 'u\2007': '<FIGURE_SPACE>', - 'u\2008': '<PUNCTUATION_SPACE>', - 'u\2009': '<THIN_SPACE>', - 'u\200A': '<HAIR_SPACE>', - 'u\200B': '<ZERO_WIDTH_SPACE>', - 'u\202F': '<NNBSP>', - 'u\205F': '<MMSP>', - 'u\3000': '<IDEOGRAPHIC_SPACE>', + u'\u00A0': '<NBSP>', + u'\u1680': '<OGHAM_SPACE_MARK>', + u'\u180E': '<MONGOLIAN_VOWEL_SEPARATOR>', + u'\u2000': '<EN_QUAD>', + u'\u2001': '<EM_QUAD>', + u'\u2002': '<EN_SPACE>', + u'\u2003': '<EM_SPACE>', + u'\u2004': '<THREE-PER-EM_SPACE>', + u'\u2005': '<FOUR-PER-EM_SPACE>', + u'\u2006': '<SIX-PER-EM_SPACE>', + u'\u2007': '<FIGURE_SPACE>', + u'\u2008': '<PUNCTUATION_SPACE>', + u'\u2009': '<THIN_SPACE>', + u'\u200A': '<HAIR_SPACE>', + u'\u200B': '<ZERO_WIDTH_SPACE>', + u'\u202F': '<NNBSP>', + u'\u205F': '<MMSP>', + u'\u3000': '<IDEOGRAPHIC_SPACE>', } def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() + super(White, self).__init__() self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() + self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) + # ~ self.leaveWhitespace() self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) self.mayReturnEmpty = True self.errmsg = "Expected " + self.name @@ -3375,13 +3687,13 @@ class White(Token): self.maxLen = exact self.minLen = exact - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.matchWhite: raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) + maxloc = min(maxloc, len(instring)) while loc < maxloc and instring[loc] in self.matchWhite: loc += 1 @@ -3392,9 +3704,9 @@ class White(Token): class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ + def __init__(self): + super(_PositionToken, self).__init__() + self.name = self.__class__.__name__ self.mayReturnEmpty = True self.mayIndexError = False @@ -3402,30 +3714,30 @@ class GoToColumn(_PositionToken): """Token to advance to a specific column of input text; useful for tabular report scraping. """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() + def __init__(self, colno): + super(GoToColumn, self).__init__() self.col = colno - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: + def preParse(self, instring, loc): + if col(loc, instring) != self.col: instrlen = len(instring) if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc = self._skipIgnorables(instring, loc) + while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: loc += 1 return loc - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) + def parseImpl(self, instring, loc, doActions=True): + thiscol = col(loc, instring) if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) + raise ParseException(instring, loc, "Text not in expected column", self) newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] + ret = instring[loc: newloc] return newloc, ret class LineStart(_PositionToken): - """Matches if current position is at the beginning of a line within + r"""Matches if current position is at the beginning of a line within the parse string Example:: @@ -3446,11 +3758,11 @@ class LineStart(_PositionToken): ['AAA', ' and this line'] """ - def __init__( self ): - super(LineStart,self).__init__() + def __init__(self): + super(LineStart, self).__init__() self.errmsg = "Expected start of line" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if col(loc, instring) == 1: return loc, [] raise ParseException(instring, loc, self.errmsg, self) @@ -3459,19 +3771,19 @@ class LineEnd(_PositionToken): """Matches if current position is at the end of a line within the parse string """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + def __init__(self): + super(LineEnd, self).__init__() + self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) self.errmsg = "Expected end of line" - def parseImpl( self, instring, loc, doActions=True ): - if loc<len(instring): + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): if instring[loc] == "\n": - return loc+1, "\n" + return loc + 1, "\n" else: raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): - return loc+1, [] + return loc + 1, [] else: raise ParseException(instring, loc, self.errmsg, self) @@ -3479,29 +3791,29 @@ class StringStart(_PositionToken): """Matches if current position is at the beginning of the parse string """ - def __init__( self ): - super(StringStart,self).__init__() + def __init__(self): + super(StringStart, self).__init__() self.errmsg = "Expected start of text" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc != 0: # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse( instring, 0 ): + if loc != self.preParse(instring, 0): raise ParseException(instring, loc, self.errmsg, self) return loc, [] class StringEnd(_PositionToken): """Matches if current position is at the end of the parse string """ - def __init__( self ): - super(StringEnd,self).__init__() + def __init__(self): + super(StringEnd, self).__init__() self.errmsg = "Expected end of text" - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc < len(instring): raise ParseException(instring, loc, self.errmsg, self) elif loc == len(instring): - return loc+1, [] + return loc + 1, [] elif loc > len(instring): return loc, [] else: @@ -3516,15 +3828,15 @@ class WordStart(_PositionToken): the beginning of the string being parsed, or at the beginning of a line. """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() + def __init__(self, wordChars=printables): + super(WordStart, self).__init__() self.wordChars = set(wordChars) self.errmsg = "Not at the start of a word" - def parseImpl(self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): + if (instring[loc - 1] in self.wordChars + or instring[loc] not in self.wordChars): raise ParseException(instring, loc, self.errmsg, self) return loc, [] @@ -3536,17 +3848,17 @@ class WordEnd(_PositionToken): will also match at the end of the string being parsed, or at the end of a line. """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() + def __init__(self, wordChars=printables): + super(WordEnd, self).__init__() self.wordChars = set(wordChars) self.skipWhitespace = False self.errmsg = "Not at the end of a word" - def parseImpl(self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): instrlen = len(instring) - if instrlen>0 and loc<instrlen: + if instrlen > 0 and loc < instrlen: if (instring[loc] in self.wordChars or - instring[loc-1] not in self.wordChars): + instring[loc - 1] not in self.wordChars): raise ParseException(instring, loc, self.errmsg, self) return loc, [] @@ -3555,90 +3867,89 @@ class ParseExpression(ParserElement): """Abstract subclass of ParserElement, for combining and post-processing parsed tokens. """ - def __init__( self, exprs, savelist = False ): - super(ParseExpression,self).__init__(savelist) - if isinstance( exprs, _generatorType ): + def __init__(self, exprs, savelist=False): + super(ParseExpression, self).__init__(savelist) + if isinstance(exprs, _generatorType): exprs = list(exprs) - if isinstance( exprs, basestring ): - self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, Iterable ): + if isinstance(exprs, basestring): + self.exprs = [self._literalStringClass(exprs)] + elif isinstance(exprs, ParserElement): + self.exprs = [exprs] + elif isinstance(exprs, Iterable): exprs = list(exprs) # if sequence of strings provided, wrap with Literal - if all(isinstance(expr, basestring) for expr in exprs): - exprs = map(ParserElement._literalStringClass, exprs) + if any(isinstance(expr, basestring) for expr in exprs): + exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) self.exprs = list(exprs) else: try: - self.exprs = list( exprs ) + self.exprs = list(exprs) except TypeError: - self.exprs = [ exprs ] + self.exprs = [exprs] self.callPreparse = False - def __getitem__( self, i ): - return self.exprs[i] - - def append( self, other ): - self.exprs.append( other ) + def append(self, other): + self.exprs.append(other) self.strRepr = None return self - def leaveWhitespace( self ): + def leaveWhitespace(self): """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on all contained expressions.""" self.skipWhitespace = False - self.exprs = [ e.copy() for e in self.exprs ] + self.exprs = [e.copy() for e in self.exprs] for e in self.exprs: e.leaveWhitespace() return self - def ignore( self, other ): - if isinstance( other, Suppress ): + def ignore(self, other): + if isinstance(other, Suppress): if other not in self.ignoreExprs: - super( ParseExpression, self).ignore( other ) + super(ParseExpression, self).ignore(other) for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) + e.ignore(self.ignoreExprs[-1]) else: - super( ParseExpression, self).ignore( other ) + super(ParseExpression, self).ignore(other) for e in self.exprs: - e.ignore( self.ignoreExprs[-1] ) + e.ignore(self.ignoreExprs[-1]) return self - def __str__( self ): + def __str__(self): try: - return super(ParseExpression,self).__str__() + return super(ParseExpression, self).__str__() except Exception: pass if self.strRepr is None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) return self.strRepr - def streamline( self ): - super(ParseExpression,self).streamline() + def streamline(self): + super(ParseExpression, self).streamline() for e in self.exprs: e.streamline() - # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d) # but only if there are no parse actions or resultsNames on the nested And's # (likewise for Or's and MatchFirst's) - if ( len(self.exprs) == 2 ): + if len(self.exprs) == 2: other = self.exprs[0] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): - self.exprs = other.exprs[:] + [ self.exprs[1] ] + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): + self.exprs = other.exprs[:] + [self.exprs[1]] self.strRepr = None self.mayReturnEmpty |= other.mayReturnEmpty self.mayIndexError |= other.mayIndexError other = self.exprs[-1] - if ( isinstance( other, self.__class__ ) and - not(other.parseAction) and - other.resultsName is None and - not other.debug ): + if (isinstance(other, self.__class__) + and not other.parseAction + and other.resultsName is None + and not other.debug): self.exprs = self.exprs[:-1] + other.exprs[:] self.strRepr = None self.mayReturnEmpty |= other.mayReturnEmpty @@ -3648,21 +3959,31 @@ class ParseExpression(ParserElement): return self - def setResultsName( self, name, listAllMatches=False ): - ret = super(ParseExpression,self).setResultsName(name,listAllMatches) - return ret - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] + def validate(self, validateTrace=None): + tmp = (validateTrace if validateTrace is not None else [])[:] + [self] for e in self.exprs: e.validate(tmp) - self.checkRecursion( [] ) + self.checkRecursion([]) def copy(self): - ret = super(ParseExpression,self).copy() + ret = super(ParseExpression, self).copy() ret.exprs = [e.copy() for e in self.exprs] return ret + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in self.exprs: + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(ParseExpression, self)._setResultsName(name, listAllMatches) + + class And(ParseExpression): """ Requires all given :class:`ParseExpression` s to be found in the given order. @@ -3676,33 +3997,59 @@ class And(ParseExpression): integer = Word(nums) name_expr = OneOrMore(Word(alphas)) - expr = And([integer("id"),name_expr("name"),integer("age")]) + expr = And([integer("id"), name_expr("name"), integer("age")]) # more easily written as: expr = integer("id") + name_expr("name") + integer("age") """ class _ErrorStop(Empty): def __init__(self, *args, **kwargs): - super(And._ErrorStop,self).__init__(*args, **kwargs) + super(And._ErrorStop, self).__init__(*args, **kwargs) self.name = '-' self.leaveWhitespace() - def __init__( self, exprs, savelist = True ): - super(And,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=True): + exprs = list(exprs) + if exprs and Ellipsis in exprs: + tmp = [] + for i, expr in enumerate(exprs): + if expr is Ellipsis: + if i < len(exprs) - 1: + skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + else: + raise Exception("cannot construct And with sequence ending in ...") + else: + tmp.append(expr) + exprs[:] = tmp + super(And, self).__init__(exprs, savelist) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.setWhitespaceChars(self.exprs[0].whiteChars) self.skipWhitespace = self.exprs[0].skipWhitespace self.callPreparse = True def streamline(self): + # collapse any _PendingSkip's + if self.exprs: + if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) + for e in self.exprs[:-1]): + for i, e in enumerate(self.exprs[:-1]): + if e is None: + continue + if (isinstance(e, ParseExpression) + and e.exprs and isinstance(e.exprs[-1], _PendingSkip)): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = None + self.exprs = [e for e in self.exprs if e is not None] + super(And, self).streamline() self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): # pass False as last arg to _parse for first element, since we already # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) errorStop = False for e in self.exprs[1:]: if isinstance(e, And._ErrorStop): @@ -3710,7 +4057,7 @@ class And(ParseExpression): continue if errorStop: try: - loc, exprtokens = e._parse( instring, loc, doActions ) + loc, exprtokens = e._parse(instring, loc, doActions) except ParseSyntaxException: raise except ParseBaseException as pe: @@ -3719,25 +4066,25 @@ class And(ParseExpression): except IndexError: raise ParseSyntaxException(instring, len(instring), self.errmsg, self) else: - loc, exprtokens = e._parse( instring, loc, doActions ) + loc, exprtokens = e._parse(instring, loc, doActions) if exprtokens or exprtokens.haskeys(): resultlist += exprtokens return loc, resultlist - def __iadd__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #And( [ self, other ] ) + def __iadd__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # And([self, other]) - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) if not e.mayReturnEmpty: break - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3763,8 +4110,8 @@ class Or(ParseExpression): [['123'], ['3.1416'], ['789']] """ - def __init__( self, exprs, savelist = False ): - super(Or,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=False): + super(Or, self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) else: @@ -3772,16 +4119,17 @@ class Or(ParseExpression): def streamline(self): super(Or, self).streamline() - self.saveAsList = any(e.saveAsList for e in self.exprs) + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): maxExcLoc = -1 maxException = None matches = [] for e in self.exprs: try: - loc2 = e.tryParse( instring, loc ) + loc2 = e.tryParse(instring, loc) except ParseException as err: err.__traceback__ = None if err.loc > maxExcLoc: @@ -3789,22 +4137,45 @@ class Or(ParseExpression): maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) + maxException = ParseException(instring, len(instring), e.errmsg, self) maxExcLoc = len(instring) else: # save match among all matches, to retry longest to shortest matches.append((loc2, e)) if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: + # re-evaluate all matches in descending order of length of match, in case attached actions + # might change whether or how much they match of the input. + matches.sort(key=itemgetter(0), reverse=True) + + if not doActions: + # no further conditions or parse actions to change the selection of + # alternative, so the first match will be the best match + best_expr = matches[0][1] + return best_expr._parse(instring, loc, doActions) + + longest = -1, None + for loc1, expr1 in matches: + if loc1 <= longest[0]: + # already have a longer match than this one will deliver, we are done + return longest + try: - return e._parse( instring, loc, doActions ) + loc2, toks = expr1._parse(instring, loc, doActions) except ParseException as err: err.__traceback__ = None if err.loc > maxExcLoc: maxException = err maxExcLoc = err.loc + else: + if loc2 >= loc1: + return loc2, toks + # didn't match as much as before + elif loc2 > longest[0]: + longest = loc2, toks + + if longest != (-1, None): + return longest if maxException is not None: maxException.msg = self.errmsg @@ -3813,13 +4184,13 @@ class Or(ParseExpression): raise ParseException(instring, loc, "no defined alternatives to match", self) - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) + def __ixor__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # Or([self, other]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3827,10 +4198,22 @@ class Or(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(Or, self)._setResultsName(name, listAllMatches) class MatchFirst(ParseExpression): @@ -3850,25 +4233,25 @@ class MatchFirst(ParseExpression): number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=False): + super(MatchFirst, self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - # self.saveAsList = any(e.saveAsList for e in self.exprs) else: self.mayReturnEmpty = True def streamline(self): super(MatchFirst, self).streamline() - self.saveAsList = any(e.saveAsList for e in self.exprs) + if __compat__.collect_all_And_tokens: + self.saveAsList = any(e.saveAsList for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): maxExcLoc = -1 maxException = None for e in self.exprs: try: - ret = e._parse( instring, loc, doActions ) + ret = e._parse(instring, loc, doActions) return ret except ParseException as err: if err.loc > maxExcLoc: @@ -3876,7 +4259,7 @@ class MatchFirst(ParseExpression): maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) + maxException = ParseException(instring, len(instring), e.errmsg, self) maxExcLoc = len(instring) # only got here if no expression matched, raise exception for match that made it the furthest @@ -3887,13 +4270,13 @@ class MatchFirst(ParseExpression): else: raise ParseException(instring, loc, "no defined alternatives to match", self) - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) + def __ior__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) # MatchFirst([self, other]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -3901,10 +4284,22 @@ class MatchFirst(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if (not __compat__.collect_all_And_tokens + and __diag__.warn_multiple_tokens_in_named_alternation): + if any(isinstance(e, And) for e in self.exprs): + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "may only return a single token for an And alternative, " + "in future will return the full list of tokens".format( + "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), + stacklevel=3) + + return super(MatchFirst, self)._setResultsName(name, listAllMatches) class Each(ParseExpression): @@ -3964,8 +4359,8 @@ class Each(ParseExpression): - shape: TRIANGLE - size: 20 """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) + def __init__(self, exprs, savelist=True): + super(Each, self).__init__(exprs, savelist) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) self.skipWhitespace = True self.initExprGroups = True @@ -3976,15 +4371,15 @@ class Each(ParseExpression): self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional)) + opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] + opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))] self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] + self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] + self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] self.required += self.multirequired self.initExprGroups = False tmpLoc = loc @@ -3998,11 +4393,11 @@ class Each(ParseExpression): failed = [] for e in tmpExprs: try: - tmpLoc = e.tryParse( instring, tmpLoc ) + tmpLoc = e.tryParse(instring, tmpLoc) except ParseException: failed.append(e) else: - matchOrder.append(self.opt1map.get(id(e),e)) + matchOrder.append(self.opt1map.get(id(e), e)) if e in tmpReqd: tmpReqd.remove(e) elif e in tmpOpt: @@ -4012,21 +4407,21 @@ class Each(ParseExpression): if tmpReqd: missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt] resultlist = [] for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) + loc, results = e._parse(instring, loc, doActions) resultlist.append(results) finalResults = sum(resultlist, ParseResults([])) return loc, finalResults - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4034,86 +4429,88 @@ class Each(ParseExpression): return self.strRepr - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[:] + [self] for e in self.exprs: - e.checkRecursion( subRecCheckList ) + e.checkRecursion(subRecCheckList) class ParseElementEnhance(ParserElement): """Abstract subclass of :class:`ParserElement`, for combining and post-processing parsed tokens. """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) + def __init__(self, expr, savelist=False): + super(ParseElementEnhance, self).__init__(savelist) + if isinstance(expr, basestring): + if issubclass(self._literalStringClass, Token): + expr = self._literalStringClass(expr) else: - expr = ParserElement._literalStringClass(Literal(expr)) + expr = self._literalStringClass(Literal(expr)) self.expr = expr self.strRepr = None if expr is not None: self.mayIndexError = expr.mayIndexError self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) + self.setWhitespaceChars(expr.whiteChars) self.skipWhitespace = expr.skipWhitespace self.saveAsList = expr.saveAsList self.callPreparse = expr.callPreparse self.ignoreExprs.extend(expr.ignoreExprs) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) + return self.expr._parse(instring, loc, doActions, callPreParse=False) else: - raise ParseException("",loc,self.errmsg,self) + raise ParseException("", loc, self.errmsg, self) - def leaveWhitespace( self ): + def leaveWhitespace(self): self.skipWhitespace = False self.expr = self.expr.copy() if self.expr is not None: self.expr.leaveWhitespace() return self - def ignore( self, other ): - if isinstance( other, Suppress ): + def ignore(self, other): + if isinstance(other, Suppress): if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) + super(ParseElementEnhance, self).ignore(other) if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) + self.expr.ignore(self.ignoreExprs[-1]) else: - super( ParseElementEnhance, self).ignore( other ) + super(ParseElementEnhance, self).ignore(other) if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) + self.expr.ignore(self.ignoreExprs[-1]) return self - def streamline( self ): - super(ParseElementEnhance,self).streamline() + def streamline(self): + super(ParseElementEnhance, self).streamline() if self.expr is not None: self.expr.streamline() return self - def checkRecursion( self, parseElementList ): + def checkRecursion(self, parseElementList): if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] + raise RecursiveGrammarException(parseElementList + [self]) + subRecCheckList = parseElementList[:] + [self] if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) + self.expr.checkRecursion(subRecCheckList) - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + tmp = validateTrace[:] + [self] if self.expr is not None: self.expr.validate(tmp) - self.checkRecursion( [] ) + self.checkRecursion([]) - def __str__( self ): + def __str__(self): try: - return super(ParseElementEnhance,self).__str__() + return super(ParseElementEnhance, self).__str__() except Exception: pass if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) return self.strRepr @@ -4139,13 +4536,16 @@ class FollowedBy(ParseElementEnhance): [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) + def __init__(self, expr): + super(FollowedBy, self).__init__(expr) self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): + # by using self._expr.parse and deleting the contents of the returned ParseResults list + # we keep any named results that were defined in the FollowedBy expression _, ret = self.expr._parse(instring, loc, doActions=doActions) del ret[:] + return loc, ret @@ -4198,6 +4598,7 @@ class PrecededBy(ParseElementEnhance): self.retreat = retreat self.errmsg = "not preceded by " + str(expr) self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) def parseImpl(self, instring, loc=0, doActions=True): if self.exact: @@ -4208,19 +4609,18 @@ class PrecededBy(ParseElementEnhance): else: # retreat specified a maximum lookbehind window, iterate test_expr = self.expr + StringEnd() - instring_slice = instring[:loc] + instring_slice = instring[max(0, loc - self.retreat):loc] last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat+1)): + for offset in range(1, min(loc, self.retreat + 1)+1): try: - _, ret = test_expr._parse(instring_slice, loc-offset) + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) except ParseBaseException as pbe: last_expr = pbe else: break else: raise last_expr - # return empty list of tokens, but preserve any defined results names - del ret[:] return loc, ret @@ -4247,20 +4647,20 @@ class NotAny(ParseElementEnhance): # integers that are followed by "." are actually floats integer = Word(nums) + ~Char(".") """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() + def __init__(self, expr): + super(NotAny, self).__init__(expr) + # ~ self.leaveWhitespace() self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) + self.errmsg = "Found unwanted token, " + _ustr(self.expr) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): if self.expr.canParseNext(instring, loc): raise ParseException(instring, loc, self.errmsg, self) return loc, [] - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4269,15 +4669,21 @@ class NotAny(ParseElementEnhance): return self.strRepr class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): + def __init__(self, expr, stopOn=None): super(_MultipleMatch, self).__init__(expr) self.saveAsList = True ender = stopOn if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) + ender = self._literalStringClass(ender) + self.stopOn(ender) + + def stopOn(self, ender): + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) self.not_ender = ~ender if ender is not None else None + return self - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): self_expr_parse = self.expr._parse self_skip_ignorables = self._skipIgnorables check_ender = self.not_ender is not None @@ -4288,24 +4694,38 @@ class _MultipleMatch(ParseElementEnhance): # if so, fail) if check_ender: try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) try: hasIgnoreExprs = (not not self.ignoreExprs) while 1: if check_ender: try_not_ender(instring, loc) if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) + preloc = self_skip_ignorables(instring, loc) else: preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + loc, tmptokens = self_expr_parse(instring, preloc, doActions) if tmptokens or tmptokens.haskeys(): tokens += tmptokens - except (ParseException,IndexError): + except (ParseException, IndexError): pass return loc, tokens + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in [self.expr] + getattr(self.expr, 'exprs', []): + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn("{0}: setting results name {1!r} on {2} expression " + "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", + name, + type(self).__name__, + e.resultsName), + stacklevel=3) + + return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) + + class OneOrMore(_MultipleMatch): """Repetition of one or more of the given expression. @@ -4332,8 +4752,8 @@ class OneOrMore(_MultipleMatch): (attr_expr * (1,)).parseString(text).pprint() """ - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4352,18 +4772,18 @@ class ZeroOrMore(_MultipleMatch): Example: similar to :class:`OneOrMore` """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + def __init__(self, expr, stopOn=None): + super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): try: return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): + except (ParseException, IndexError): return loc, [] - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4371,6 +4791,7 @@ class ZeroOrMore(_MultipleMatch): return self.strRepr + class _NullToken(object): def __bool__(self): return False @@ -4378,7 +4799,6 @@ class _NullToken(object): def __str__(self): return "" -_optionalNotMatched = _NullToken() class Optional(ParseElementEnhance): """Optional matching of the given expression. @@ -4416,28 +4836,30 @@ class Optional(ParseElementEnhance): ^ FAIL: Expected end of text (at char 5), (line:1, col:6) """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) + __optionalNotMatched = _NullToken() + + def __init__(self, expr, default=__optionalNotMatched): + super(Optional, self).__init__(expr, savelist=False) self.saveAsList = self.expr.saveAsList self.defaultValue = default self.mayReturnEmpty = True - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: + loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) + except (ParseException, IndexError): + if self.defaultValue is not self.__optionalNotMatched: if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) + tokens = ParseResults([self.defaultValue]) tokens[self.expr.resultsName] = self.defaultValue else: - tokens = [ self.defaultValue ] + tokens = [self.defaultValue] else: tokens = [] return loc, tokens - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name if self.strRepr is None: @@ -4503,20 +4925,20 @@ class SkipTo(ParseElementEnhance): - issue_num: 79 - sev: Minor """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) + def __init__(self, other, include=False, ignore=None, failOn=None): + super(SkipTo, self).__init__(other) self.ignoreExpr = ignore self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include self.saveAsList = False if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) + self.failOn = self._literalStringClass(failOn) else: self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) + self.errmsg = "No match found for " + _ustr(self.expr) - def parseImpl( self, instring, loc, doActions=True ): + def parseImpl(self, instring, loc, doActions=True): startloc = loc instrlen = len(instring) expr = self.expr @@ -4558,7 +4980,7 @@ class SkipTo(ParseElementEnhance): skipresult = ParseResults(skiptext) if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) skipresult += mat return loc, skipresult @@ -4590,17 +5012,17 @@ class Forward(ParseElementEnhance): See :class:`ParseResults.pprint` for an example of a recursive parser created using ``Forward``. """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) + def __init__(self, other=None): + super(Forward, self).__init__(other, savelist=False) - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) + def __lshift__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) self.expr = other self.strRepr = None self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) + self.setWhitespaceChars(self.expr.whiteChars) self.skipWhitespace = self.expr.skipWhitespace self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) @@ -4609,59 +5031,72 @@ class Forward(ParseElementEnhance): def __ilshift__(self, other): return self << other - def leaveWhitespace( self ): + def leaveWhitespace(self): self.skipWhitespace = False return self - def streamline( self ): + def streamline(self): if not self.streamlined: self.streamlined = True if self.expr is not None: self.expr.streamline() return self - def validate( self, validateTrace=[] ): + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + if self not in validateTrace: - tmp = validateTrace[:]+[self] + tmp = validateTrace[:] + [self] if self.expr is not None: self.expr.validate(tmp) self.checkRecursion([]) - def __str__( self ): - if hasattr(self,"name"): + def __str__(self): + if hasattr(self, "name"): return self.name - return self.__class__.__name__ + ": ..." + if self.strRepr is not None: + return self.strRepr + + # Avoid infinite recursion by setting a temporary strRepr + self.strRepr = ": ..." - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse + # Use the string representation of main expression. + retString = '...' try: if self.expr is not None: - retString = _ustr(self.expr) + retString = _ustr(self.expr)[:1000] else: retString = "None" finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString + self.strRepr = self.__class__.__name__ + ": " + retString + return self.strRepr def copy(self): if self.expr is not None: - return super(Forward,self).copy() + return super(Forward, self).copy() else: ret = Forward() ret <<= self return ret -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_name_set_on_empty_Forward: + if self.expr is None: + warnings.warn("{0}: setting results name {0!r} on {1} expression " + "that has no contained expression".format("warn_name_set_on_empty_Forward", + name, + type(self).__name__), + stacklevel=3) + + return super(Forward, self)._setResultsName(name, listAllMatches) class TokenConverter(ParseElementEnhance): """ Abstract subclass of :class:`ParseExpression`, for converting parsed results. """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) + def __init__(self, expr, savelist=False): + super(TokenConverter, self).__init__(expr) # , savelist) self.saveAsList = False class Combine(TokenConverter): @@ -4682,8 +5117,8 @@ class Combine(TokenConverter): # no match when there are internal spaces print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) + def __init__(self, expr, joinString="", adjacent=True): + super(Combine, self).__init__(expr) # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself if adjacent: self.leaveWhitespace() @@ -4692,20 +5127,20 @@ class Combine(TokenConverter): self.joinString = joinString self.callPreparse = True - def ignore( self, other ): + def ignore(self, other): if self.adjacent: ParserElement.ignore(self, other) else: - super( Combine, self).ignore( other ) + super(Combine, self).ignore(other) return self - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): retToks = tokenlist.copy() del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults) if self.resultsName and retToks.haskeys(): - return [ retToks ] + return [retToks] else: return retToks @@ -4719,17 +5154,17 @@ class Group(TokenConverter): num = Word(nums) term = ident | num func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + print(func.parseString("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + print(func.parseString("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = expr.saveAsList + def __init__(self, expr): + super(Group, self).__init__(expr) + self.saveAsList = True - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] + def postParse(self, instring, loc, tokenlist): + return [tokenlist] class Dict(TokenConverter): """Converter to return a repetitive expression as a list, but also @@ -4770,31 +5205,31 @@ class Dict(TokenConverter): See more examples at :class:`ParseResults` of accessing fields by results name. """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) + def __init__(self, expr): + super(Dict, self).__init__(expr) self.saveAsList = True - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): + def postParse(self, instring, loc, tokenlist): + for i, tok in enumerate(tokenlist): if len(tok) == 0: continue ikey = tok[0] - if isinstance(ikey,int): + if isinstance(ikey, int): ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + if len(tok) == 1: + tokenlist[ikey] = _ParseResultsWithOffset("", i) + elif len(tok) == 2 and not isinstance(tok[1], ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) else: - dictvalue = tok.copy() #ParseResults(i) + dictvalue = tok.copy() # ParseResults(i) del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) if self.resultsName: - return [ tokenlist ] + return [tokenlist] else: return tokenlist @@ -4821,10 +5256,10 @@ class Suppress(TokenConverter): (See also :class:`delimitedList`.) """ - def postParse( self, instring, loc, tokenlist ): + def postParse(self, instring, loc, tokenlist): return [] - def suppress( self ): + def suppress(self): return self @@ -4834,12 +5269,12 @@ class OnlyOnce(object): def __init__(self, methodCall): self.callable = _trim_arity(methodCall) self.called = False - def __call__(self,s,l,t): + def __call__(self, s, l, t): if not self.called: - results = self.callable(s,l,t) + results = self.callable(s, l, t) self.called = True return results - raise ParseException(s,l,"") + raise ParseException(s, l, "") def reset(self): self.called = False @@ -4871,16 +5306,16 @@ def traceParseAction(f): f = _trim_arity(f) def z(*paArgs): thisFunc = f.__name__ - s,l,t = paArgs[-3:] - if len(paArgs)>3: + s, l, t = paArgs[-3:] + if len(paArgs) > 3: thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) try: ret = f(*paArgs) except Exception as exc: - sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + sys.stderr.write("<<leaving %s (exception: %s)\n" % (thisFunc, exc)) raise - sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + sys.stderr.write("<<leaving %s (ret: %r)\n" % (thisFunc, ret)) return ret try: z.__name__ = f.__name__ @@ -4891,7 +5326,7 @@ def traceParseAction(f): # # global helpers # -def delimitedList( expr, delim=",", combine=False ): +def delimitedList(expr, delim=",", combine=False): """Helper to define a delimited list of expressions - the delimiter defaults to ','. By default, the list elements and delimiters can have intervening whitespace, and comments, but this can be @@ -4906,13 +5341,13 @@ def delimitedList( expr, delim=",", combine=False ): delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) -def countedArray( expr, intExpr=None ): +def countedArray(expr, intExpr=None): """Helper to define a counted list of expressions. This helper defines a pattern of the form:: @@ -4936,22 +5371,22 @@ def countedArray( expr, intExpr=None ): countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] """ arrayExpr = Forward() - def countFieldParseAction(s,l,t): + def countFieldParseAction(s, l, t): n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) return [] if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + intExpr = Word(nums).setParseAction(lambda t: int(t[0])) else: intExpr = intExpr.copy() intExpr.setName("arrayLen") intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...') def _flatten(L): ret = [] for i in L: - if isinstance(i,list): + if isinstance(i, list): ret.extend(_flatten(i)) else: ret.append(i) @@ -4973,7 +5408,7 @@ def matchPreviousLiteral(expr): enabled. """ rep = Forward() - def copyTokenToRepeater(s,l,t): + def copyTokenToRepeater(s, l, t): if t: if len(t) == 1: rep << t[0] @@ -5005,26 +5440,26 @@ def matchPreviousExpr(expr): rep = Forward() e2 = expr.copy() rep <<= e2 - def copyTokenToRepeater(s,l,t): + def copyTokenToRepeater(s, l, t): matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): + def mustMatchTheseTokens(s, l, t): theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + if theseTokens != matchTokens: + raise ParseException('', 0, '') + rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev) ' + _ustr(expr)) return rep def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") + # ~ escape these chars: ^-[] + for c in r"\^-[]": + s = s.replace(c, _bslash + c) + s = s.replace("\n", r"\n") + s = s.replace("\t", r"\t") return _ustr(s) -def oneOf( strs, caseless=False, useRegex=True ): +def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): """Helper to quickly define a set of alternative Literals, and makes sure to do longest-first testing when there is a conflict, regardless of the input order, but returns @@ -5038,8 +5473,10 @@ def oneOf( strs, caseless=False, useRegex=True ): caseless - useRegex - (default= ``True``) - as an optimization, will generate a Regex object; otherwise, will generate - a :class:`MatchFirst` object (if ``caseless=True``, or if + a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if creating a :class:`Regex` raises an exception) + - asKeyword - (default=``False``) - enforce Keyword-style matching on the + generated expressions Example:: @@ -5054,57 +5491,62 @@ def oneOf( strs, caseless=False, useRegex=True ): [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] """ + if isinstance(caseless, basestring): + warnings.warn("More than one string argument passed to oneOf, pass " + "choices as a list or space-delimited string", stacklevel=2) + if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral + isequal = (lambda a, b: a.upper() == b.upper()) + masks = (lambda a, b: b.upper().startswith(a.upper())) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal + isequal = (lambda a, b: a == b) + masks = (lambda a, b: b.startswith(a)) + parseElementClass = Keyword if asKeyword else Literal symbols = [] - if isinstance(strs,basestring): + if isinstance(strs, basestring): symbols = strs.split() elif isinstance(strs, Iterable): symbols = list(strs) else: warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) + SyntaxWarning, stacklevel=2) if not symbols: return NoMatch() - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 + if not asKeyword: + # if not producing keywords, need to reorder to take care to avoid masking + # longer choices with shorter ones + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[i + 1:]): + if isequal(other, cur): + del symbols[i + j + 1] + break + elif masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + if not (caseless or asKeyword) and useRegex: + # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols])) try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + if len(symbols) == len("".join(symbols)): + return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols)) else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols)) except Exception: warnings.warn("Exception creating Regex for oneOf, building MatchFirst", SyntaxWarning, stacklevel=2) - # last resort, just use MatchFirst return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) -def dictOf( key, value ): +def dictOf(key, value): """Helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value. Takes care of defining the :class:`Dict`, :class:`ZeroOrMore`, and @@ -5162,8 +5604,8 @@ def originalTextFor(expr, asString=True): Example:: src = "this is test <b> bold <i>text</i> </b> normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) + for tag in ("b", "i"): + opener, closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) @@ -5172,14 +5614,14 @@ def originalTextFor(expr, asString=True): ['<b> bold <i>text</i> </b>'] ['<i>text</i>'] """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) + locMarker = Empty().setParseAction(lambda s, loc, t: loc) endlocMarker = locMarker.copy() endlocMarker.callPreparse = False matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] + extractText = lambda s, l, t: s[t._original_start: t._original_end] else: - def extractText(s,l,t): + def extractText(s, l, t): t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] matchExpr.setParseAction(extractText) matchExpr.ignoreExprs = expr.ignoreExprs @@ -5189,7 +5631,7 @@ def ungroup(expr): """Helper to undo pyparsing's default grouping of And expressions, even if all but one are non-empty. """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) + return TokenConverter(expr).addParseAction(lambda t: t[0]) def locatedExpr(expr): """Helper to decorate a returned token with its starting and ending @@ -5216,7 +5658,7 @@ def locatedExpr(expr): [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] """ - locator = Empty().setParseAction(lambda s,l,t: l) + locator = Empty().setParseAction(lambda s, l, t: l) return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) @@ -5227,12 +5669,12 @@ lineEnd = LineEnd().setName("lineEnd") stringStart = StringStart().setName("stringStart") stringEnd = StringEnd().setName("stringEnd") -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8))) _singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) _charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" def srange(s): r"""Helper to easily define string ranges for use in Word @@ -5260,7 +5702,7 @@ def srange(s): - any combination of the above (``'aeiouy'``, ``'a-zA-Z0-9_$'``, etc.) """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) try: return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) except Exception: @@ -5270,9 +5712,9 @@ def matchOnlyAtCol(n): """Helper method for defining parse actions that require matching at a specific column in the input text. """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) + def verifyCol(strg, locn, toks): + if col(locn, strg) != n: + raise ParseException(strg, locn, "matched token not at column %d" % n) return verifyCol def replaceWith(replStr): @@ -5288,9 +5730,9 @@ def replaceWith(replStr): OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] """ - return lambda s,l,t: [replStr] + return lambda s, l, t: [replStr] -def removeQuotes(s,l,t): +def removeQuotes(s, l, t): """Helper parse action for removing quotation marks from parsed quoted strings. @@ -5341,7 +5783,7 @@ def tokenMap(func, *args): now is the winter of our discontent made glorious summer by this sun of york ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] """ - def pa(s,l,t): + def pa(s, l, t): return [func(tokn, *args) for tokn in t] try: @@ -5361,33 +5803,41 @@ downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) """(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" -def _makeTags(tagStr, xml): +def _makeTags(tagStr, xml, + suppress_LT=Suppress("<"), + suppress_GT=Suppress(">")): """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): + if isinstance(tagStr, basestring): resname = tagStr tagStr = Keyword(tagStr, caseless=not xml) else: resname = tagStr.name - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("</") + tagStr + ">") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") + openTag = (suppress_LT + + tagStr("tag") + + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + + Optional(Suppress("=") + tagAttrValue)))) + + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') + + suppress_GT) + closeTag = Combine(_L("</") + tagStr + ">", adjacent=False) + + openTag.setName("<%s>" % resname) + # add start<tagname> results name in parse action now that ungrouped names are not reported at two levels + openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) + closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("</%s>" % resname) openTag.tag = resname closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) return openTag, closeTag def makeHTMLTags(tagStr): @@ -5400,7 +5850,7 @@ def makeHTMLTags(tagStr): text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' # makeHTMLTags returns pyparsing expressions for the opening and # closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") + a, a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end for link in link_expr.searchString(text): @@ -5412,7 +5862,7 @@ def makeHTMLTags(tagStr): pyparsing -> https://github.com/pyparsing/pyparsing/wiki """ - return _makeTags( tagStr, False ) + return _makeTags(tagStr, False) def makeXMLTags(tagStr): """Helper to construct opening and closing tag expressions for XML, @@ -5420,9 +5870,9 @@ def makeXMLTags(tagStr): Example: similar to :class:`makeHTMLTags` """ - return _makeTags( tagStr, True ) + return _makeTags(tagStr, True) -def withAttribute(*args,**attrDict): +def withAttribute(*args, **attrDict): """Helper to create a validating parse action to be used with start tags created with :class:`makeXMLTags` or :class:`makeHTMLTags`. Use ``withAttribute`` to qualify @@ -5435,7 +5885,7 @@ def withAttribute(*args,**attrDict): - keyword arguments, as in ``(align="right")``, or - as an explicit dict with ``**`` operator, when an attribute name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` - - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align","right"))`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. @@ -5482,13 +5932,13 @@ def withAttribute(*args,**attrDict): attrs = args[:] else: attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: + attrs = [(k, v) for k, v in attrs] + def pa(s, l, tokens): + for attrName, attrValue in attrs: if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) + raise ParseException(s, l, "no matching attribute " + attrName) if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % (attrName, tokens[attrName], attrValue)) return pa withAttribute.ANY_VALUE = object() @@ -5529,13 +5979,13 @@ def withClass(classname, namespace=''): 1,3 2,3 1,1 """ classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) + return withAttribute(**{classattr: classname}) opAssoc = SimpleNamespace() opAssoc.LEFT = object() opAssoc.RIGHT = object() -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): +def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')): """Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be @@ -5613,9 +6063,9 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): return loc, [] ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + lastExpr = baseExpr | (lpar + ret + rpar) + for i, operDef in enumerate(opList): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4] termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr if arity == 3: if opExpr is None or len(opExpr) != 2: @@ -5625,15 +6075,15 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): thisExpr = Forward().setName(termName) if rightLeftAssoc == opAssoc.LEFT: if arity == 1: - matchExpr = _FB(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) elif arity == 2: if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) else: - matchExpr = _FB(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") elif rightLeftAssoc == opAssoc.RIGHT: @@ -5641,15 +6091,15 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): # try to avoid LR with this extra test if not isinstance(opExpr, Optional): opExpr = Optional(opExpr) - matchExpr = _FB(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) elif arity == 2: if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) else: - matchExpr = _FB(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) elif arity == 3: - matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") else: @@ -5659,7 +6109,7 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): matchExpr.setParseAction(*pa) else: matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + thisExpr <<= (matchExpr.setName(termName) | lastExpr) lastExpr = thisExpr ret <<= lastExpr return ret @@ -5668,10 +6118,10 @@ operatorPrecedence = infixNotation """(Deprecated) Former name of :class:`infixNotation`, will be dropped in a future release.""" -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' + | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): @@ -5707,7 +6157,7 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") + LPAR, RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) @@ -5742,33 +6192,40 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop if opener == closer: raise ValueError("opening and closing strings cannot be the same") if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: + if isinstance(opener, basestring) and isinstance(closer, basestring): + if len(opener) == 1 and len(closer) == 1: if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS, exact=1) + ) + ).setParseAction(lambda t: t[0].strip())) else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) + content = (empty.copy() + CharsNotIn(opener + + closer + + ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t: t[0].strip())) else: if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) + content = (Combine(OneOrMore(~Literal(opener) + + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) + ).setParseAction(lambda t: t[0].strip())) else: raise ValueError("opening and closing arguments must be strings if no content expression is given") ret = Forward() if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.setName('nested %s%s expression' % (opener, closer)) return ret def indentedBlock(blockStatementExpr, indentStack, indent=True): @@ -5783,7 +6240,7 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond - the the current level; set to False for block of left-most + the current level; set to False for block of left-most statements (default= ``True``) A valid block must contain at least one ``blockStatement``. @@ -5816,15 +6273,15 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): stmt = Forward() identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) + funcDef = Group(funcDecl + func_body) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) + stmt << (funcDef | assignment | identifier) module_body = OneOrMore(stmt) @@ -5852,47 +6309,56 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] """ - def checkPeerIndent(s,l,t): + backup_stack = indentStack[:] + + def reset_stack(): + indentStack[:] = backup_stack + + def checkPeerIndent(s, l, t): if l >= len(s): return - curCol = col(l,s) + curCol = col(l, s) if curCol != indentStack[-1]: if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") - def checkSubIndent(s,l,t): - curCol = col(l,s) + def checkSubIndent(s, l, t): + curCol = col(l, s) if curCol > indentStack[-1]: - indentStack.append( curCol ) + indentStack.append(curCol) else: - raise ParseException(s,l,"not a subentry") + raise ParseException(s, l, "not a subentry") - def checkUnindent(s,l,t): + def checkUnindent(s, l, t): if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() + curCol = col(l, s) + if not(indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') PEER = Empty().setParseAction(checkPeerIndent).setName('') UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + smExpr = Group(Optional(NL) + + INDENT + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + smExpr = Group(Optional(NL) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + + UNDENT) + smExpr.setFailAction(lambda a, b, c, d: reset_stack()) blockStatementExpr.ignore(_bslash + LineEnd()) return smExpr.setName('indented block') alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") def replaceHTMLEntity(t): """Helper parser action to replace common HTML entities with their special characters""" @@ -5909,7 +6375,7 @@ restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") "Comment of the form ``// ... (to end of line)``" -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment") "Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" javaStyleComment = cppStyleComment @@ -5918,10 +6384,10 @@ javaStyleComment = cppStyleComment pythonStyleComment = Regex(r"#.*").setName("Python style comment") "Comment of the form ``# ... (to end of line)``" -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional(Word(" \t") + + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") +commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList") """(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. @@ -6087,7 +6553,7 @@ class pyparsing_common: integer = Word(nums).setName("integer").setParseAction(convertToInteger) """expression that parses an unsigned integer, returns an int""" - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) """expression that parses a hexadecimal integer, returns an int""" signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) @@ -6101,10 +6567,10 @@ class pyparsing_common: """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" mixed_integer.addParseAction(sum) - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat) """expression that parses a floating point number and returns a float""" - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) """expression that parses a floating point number with optional scientific notation and returns a float""" @@ -6115,15 +6581,18 @@ class pyparsing_common: fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) """any int or real number, returned as float""" - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + identifier = Word(alphas + '_', alphanums + '_').setName("identifier") """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") "IPv4 address (``0.0.0.0 - 255.255.255.255``)" _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + + "::" + + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) + ).setName("short IPv6 address") _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") @@ -6150,7 +6619,7 @@ class pyparsing_common: [datetime.date(1999, 12, 31)] """ - def cvt_fn(s,l,t): + def cvt_fn(s, l, t): try: return datetime.strptime(t[0], fmt).date() except ValueError as ve: @@ -6175,7 +6644,7 @@ class pyparsing_common: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] """ - def cvt_fn(s,l,t): + def cvt_fn(s, l, t): try: return datetime.strptime(t[0], fmt) except ValueError as ve: @@ -6200,7 +6669,7 @@ class pyparsing_common: # strip HTML links from normal text text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>' - td,td_end = makeHTMLTags("TD") + td, td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) @@ -6210,9 +6679,13 @@ class pyparsing_common: """ return pyparsing_common._html_stripper.transformString(tokens[0]) - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + _commasepitem = Combine(OneOrMore(~Literal(",") + + ~LineEnd() + + Word(printables, excludeChars=',') + + Optional(White(" \t")))).streamline().setName("commaItem") + comma_separated_list = delimitedList(Optional(quotedString.copy() + | _commasepitem, default='') + ).setName("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) @@ -6231,7 +6704,8 @@ class _lazyclassproperty(object): def __get__(self, obj, cls): if cls is None: cls = type(obj) - if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) for superclass in cls.__mro__[1:]): + if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) + for superclass in cls.__mro__[1:]): cls._intern = {} attrname = self.fn.__name__ if attrname not in cls._intern: @@ -6262,7 +6736,7 @@ class unicode_set(object): if cc is unicode_set: break for rr in cc._ranges: - ret.extend(range(rr[0], rr[-1]+1)) + ret.extend(range(rr[0], rr[-1] + 1)) return [unichr(c) for c in sorted(set(ret))] @_lazyclassproperty @@ -6318,27 +6792,27 @@ class pyparsing_unicode(unicode_set): class Chinese(unicode_set): "Unicode set for Chinese Unicode Character Range" - _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f), ] + _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),] class Japanese(unicode_set): "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" - _ranges = [ ] + _ranges = [] class Kanji(unicode_set): "Unicode set for Kanji Unicode Character Range" - _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f), ] + _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),] class Hiragana(unicode_set): "Unicode set for Hiragana Unicode Character Range" - _ranges = [(0x3040, 0x309f), ] + _ranges = [(0x3040, 0x309f),] class Katakana(unicode_set): "Unicode set for Katakana Unicode Character Range" - _ranges = [(0x30a0, 0x30ff), ] + _ranges = [(0x30a0, 0x30ff),] class Korean(unicode_set): "Unicode set for Korean Unicode Character Range" - _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f), ] + _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),] class CJK(Chinese, Japanese, Korean): "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" @@ -6346,15 +6820,15 @@ class pyparsing_unicode(unicode_set): class Thai(unicode_set): "Unicode set for Thai Unicode Character Range" - _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b), ] + _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),] class Arabic(unicode_set): "Unicode set for Arabic Unicode Character Range" - _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f), ] + _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),] class Hebrew(unicode_set): "Unicode set for Hebrew Unicode Character Range" - _ranges = [(0x0590, 0x05ff), ] + _ranges = [(0x0590, 0x05ff),] class Devanagari(unicode_set): "Unicode set for Devanagari Unicode Character Range" @@ -6366,18 +6840,199 @@ pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges # define ranges in language character sets if PY_3: - setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic) - setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese) - setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic) - setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek) - setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew) - setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese) - setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji) - setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana) - setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana) - setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean) - setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai) - setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari) + setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, u"ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) + + +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example: + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.setDefaultWhitespaceChars( + self._save_context["default_whitespace"] + ) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.asList(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.asDict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (rpt[0], rpt[1], expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print("no validation for {!r}".format(test_string)) + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield if __name__ == "__main__": diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/__init__.py deleted file mode 100644 index 8ed060ff..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import TomlError -from .parser import load, loads -from .test import translate_to_test -from .writer import dump, dumps \ No newline at end of file diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/core.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/core.py deleted file mode 100644 index c182734e..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/core.py +++ /dev/null @@ -1,13 +0,0 @@ -class TomlError(RuntimeError): - def __init__(self, message, line, col, filename): - RuntimeError.__init__(self, message, line, col, filename) - self.message = message - self.line = line - self.col = col - self.filename = filename - - def __str__(self): - return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) - - def __repr__(self): - return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/parser.py deleted file mode 100644 index 3493aa64..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/parser.py +++ /dev/null @@ -1,341 +0,0 @@ -import string, re, sys, datetime -from .core import TomlError -from .utils import rfc3339_re, parse_rfc3339_re - -if sys.version_info[0] == 2: - _chr = unichr -else: - _chr = chr - -def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): - return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) - -def loads(s, filename='<string>', translate=lambda t, x, v: v, object_pairs_hook=dict): - if isinstance(s, bytes): - s = s.decode('utf-8') - - s = s.replace('\r\n', '\n') - - root = object_pairs_hook() - tables = object_pairs_hook() - scope = root - - src = _Source(s, filename=filename) - ast = _p_toml(src, object_pairs_hook=object_pairs_hook) - - def error(msg): - raise TomlError(msg, pos[0], pos[1], filename) - - def process_value(v, object_pairs_hook): - kind, text, value, pos = v - if kind == 'str' and value.startswith('\n'): - value = value[1:] - if kind == 'array': - if value and any(k != value[0][0] for k, t, v, p in value[1:]): - error('array-type-mismatch') - value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] - elif kind == 'table': - value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) - return translate(kind, text, value) - - for kind, value, pos in ast: - if kind == 'kv': - k, v = value - if k in scope: - error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) - scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) - else: - is_table_array = (kind == 'table_array') - cur = tables - for name in value[:-1]: - if isinstance(cur.get(name), list): - d, cur = cur[name][-1] - else: - d, cur = cur.setdefault(name, (None, object_pairs_hook())) - - scope = object_pairs_hook() - name = value[-1] - if name not in cur: - if is_table_array: - cur[name] = [(scope, object_pairs_hook())] - else: - cur[name] = (scope, object_pairs_hook()) - elif isinstance(cur[name], list): - if not is_table_array: - error('table_type_mismatch') - cur[name].append((scope, object_pairs_hook())) - else: - if is_table_array: - error('table_type_mismatch') - old_scope, next_table = cur[name] - if old_scope is not None: - error('duplicate_tables') - cur[name] = (scope, next_table) - - def merge_tables(scope, tables): - if scope is None: - scope = object_pairs_hook() - for k in tables: - if k in scope: - error('key_table_conflict') - v = tables[k] - if isinstance(v, list): - scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] - else: - scope[k] = merge_tables(v[0], v[1]) - return scope - - return merge_tables(root, tables) - -class _Source: - def __init__(self, s, filename=None): - self.s = s - self._pos = (1, 1) - self._last = None - self._filename = filename - self.backtrack_stack = [] - - def last(self): - return self._last - - def pos(self): - return self._pos - - def fail(self): - return self._expect(None) - - def consume_dot(self): - if self.s: - self._last = self.s[0] - self.s = self[1:] - self._advance(self._last) - return self._last - return None - - def expect_dot(self): - return self._expect(self.consume_dot()) - - def consume_eof(self): - if not self.s: - self._last = '' - return True - return False - - def expect_eof(self): - return self._expect(self.consume_eof()) - - def consume(self, s): - if self.s.startswith(s): - self.s = self.s[len(s):] - self._last = s - self._advance(s) - return True - return False - - def expect(self, s): - return self._expect(self.consume(s)) - - def consume_re(self, re): - m = re.match(self.s) - if m: - self.s = self.s[len(m.group(0)):] - self._last = m - self._advance(m.group(0)) - return m - return None - - def expect_re(self, re): - return self._expect(self.consume_re(re)) - - def __enter__(self): - self.backtrack_stack.append((self.s, self._pos)) - - def __exit__(self, type, value, traceback): - if type is None: - self.backtrack_stack.pop() - else: - self.s, self._pos = self.backtrack_stack.pop() - return type == TomlError - - def commit(self): - self.backtrack_stack[-1] = (self.s, self._pos) - - def _expect(self, r): - if not r: - raise TomlError('msg', self._pos[0], self._pos[1], self._filename) - return r - - def _advance(self, s): - suffix_pos = s.rfind('\n') - if suffix_pos == -1: - self._pos = (self._pos[0], self._pos[1] + len(s)) - else: - self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) - -_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') -def _p_ews(s): - s.expect_re(_ews_re) - -_ws_re = re.compile(r'[ \t]*') -def _p_ws(s): - s.expect_re(_ws_re) - -_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', - '\\': '\\', 'f': '\f' } - -_basicstr_re = re.compile(r'[^"\\\000-\037]*') -_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') -_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') -_escapes_re = re.compile(r'[btnfr\"\\]') -_newline_esc_re = re.compile('\n[ \t\n]*') -def _p_basicstr_content(s, content=_basicstr_re): - res = [] - while True: - res.append(s.expect_re(content).group(0)) - if not s.consume('\\'): - break - if s.consume_re(_newline_esc_re): - pass - elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): - v = int(s.last().group(1), 16) - if 0xd800 <= v < 0xe000: - s.fail() - res.append(_chr(v)) - else: - s.expect_re(_escapes_re) - res.append(_escapes[s.last().group(0)]) - return ''.join(res) - -_key_re = re.compile(r'[0-9a-zA-Z-_]+') -def _p_key(s): - with s: - s.expect('"') - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return r - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return r - return s.expect_re(_key_re).group(0) - -_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') - -_basicstr_ml_re = re.compile(r'(?:""?(?!")|[^"\\\000-\011\013-\037])*') -_litstr_re = re.compile(r"[^'\000\010\012-\037]*") -_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") -def _p_value(s, object_pairs_hook): - pos = s.pos() - - if s.consume('true'): - return 'bool', s.last(), True, pos - if s.consume('false'): - return 'bool', s.last(), False, pos - - if s.consume('"'): - if s.consume('""'): - r = _p_basicstr_content(s, _basicstr_ml_re) - s.expect('"""') - else: - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return 'str', r, r, pos - - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return 'str', r, r, pos - - if s.consume_re(rfc3339_re): - m = s.last() - return 'datetime', m.group(0), parse_rfc3339_re(m), pos - - if s.consume_re(_float_re): - m = s.last().group(0) - r = m.replace('_','') - if '.' in m or 'e' in m or 'E' in m: - return 'float', m, float(r), pos - else: - return 'int', m, int(r, 10), pos - - if s.consume('['): - items = [] - with s: - while True: - _p_ews(s) - items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) - s.commit() - _p_ews(s) - s.expect(',') - s.commit() - _p_ews(s) - s.expect(']') - return 'array', None, items, pos - - if s.consume('{'): - _p_ws(s) - items = object_pairs_hook() - if not s.consume('}'): - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - while s.consume(','): - _p_ws(s) - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - s.expect('}') - return 'table', None, items, pos - - s.fail() - -def _p_stmt(s, object_pairs_hook): - pos = s.pos() - if s.consume( '['): - is_array = s.consume('[') - _p_ws(s) - keys = [_p_key(s)] - _p_ws(s) - while s.consume('.'): - _p_ws(s) - keys.append(_p_key(s)) - _p_ws(s) - s.expect(']') - if is_array: - s.expect(']') - return 'table_array' if is_array else 'table', keys, pos - - key = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - value = _p_value(s, object_pairs_hook=object_pairs_hook) - return 'kv', (key, value), pos - -_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') -def _p_toml(s, object_pairs_hook): - stmts = [] - _p_ews(s) - with s: - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - while True: - s.commit() - s.expect_re(_stmtsep_re) - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - _p_ews(s) - s.expect_eof() - return stmts diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/test.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/test.py deleted file mode 100644 index ec8abfc6..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/test.py +++ /dev/null @@ -1,30 +0,0 @@ -import datetime -from .utils import format_rfc3339 - -try: - _string_types = (str, unicode) - _int_types = (int, long) -except NameError: - _string_types = str - _int_types = int - -def translate_to_test(v): - if isinstance(v, dict): - return { k: translate_to_test(v) for k, v in v.items() } - if isinstance(v, list): - a = [translate_to_test(x) for x in v] - if v and isinstance(v[0], dict): - return a - else: - return {'type': 'array', 'value': a} - if isinstance(v, datetime.datetime): - return {'type': 'datetime', 'value': format_rfc3339(v)} - if isinstance(v, bool): - return {'type': 'bool', 'value': 'true' if v else 'false'} - if isinstance(v, _int_types): - return {'type': 'integer', 'value': str(v)} - if isinstance(v, float): - return {'type': 'float', 'value': '{:.17}'.format(v)} - if isinstance(v, _string_types): - return {'type': 'string', 'value': v} - raise RuntimeError('unexpected value: {!r}'.format(v)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/utils.py deleted file mode 100644 index 636a680b..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/utils.py +++ /dev/null @@ -1,67 +0,0 @@ -import datetime -import re - -rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') - -def parse_rfc3339(v): - m = rfc3339_re.match(v) - if not m or m.group(0) != v: - return None - return parse_rfc3339_re(m) - -def parse_rfc3339_re(m): - r = map(int, m.groups()[:6]) - if m.group(7): - micro = float(m.group(7)) - else: - micro = 0 - - if m.group(8): - g = int(m.group(8), 10) * 60 + int(m.group(9), 10) - tz = _TimeZone(datetime.timedelta(0, g * 60)) - else: - tz = _TimeZone(datetime.timedelta(0, 0)) - - y, m, d, H, M, S = r - return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) - - -def format_rfc3339(v): - offs = v.utcoffset() - offs = int(offs.total_seconds()) // 60 if offs is not None else 0 - - if offs == 0: - suffix = 'Z' - else: - if offs > 0: - suffix = '+' - else: - suffix = '-' - offs = -offs - suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60) - - if v.microsecond: - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix - else: - return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix - -class _TimeZone(datetime.tzinfo): - def __init__(self, offset): - self._offset = offset - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return None - - def tzname(self, dt): - m = self._offset.total_seconds() // 60 - if m < 0: - res = '-' - m = -m - else: - res = '+' - h = m // 60 - m = m - h * 60 - return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/writer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/writer.py deleted file mode 100644 index 73b5089c..00000000 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/pytoml/writer.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import unicode_literals -import io, datetime, math, string, sys - -from .utils import format_rfc3339 - -if sys.version_info[0] == 3: - long = int - unicode = str - - -def dumps(obj, sort_keys=False): - fout = io.StringIO() - dump(obj, fout, sort_keys=sort_keys) - return fout.getvalue() - - -_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} - - -def _escape_string(s): - res = [] - start = 0 - - def flush(): - if start != i: - res.append(s[start:i]) - return i + 1 - - i = 0 - while i < len(s): - c = s[i] - if c in '"\\\n\r\t\b\f': - start = flush() - res.append('\\' + _escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append('\\u%04x' % ord(c)) - i += 1 - - flush() - return '"' + ''.join(res) + '"' - - -_key_chars = string.digits + string.ascii_letters + '-_' -def _escape_id(s): - if any(c not in _key_chars for c in s): - return _escape_string(s) - return s - - -def _format_value(v): - if isinstance(v, bool): - return 'true' if v else 'false' - if isinstance(v, int) or isinstance(v, long): - return unicode(v) - if isinstance(v, float): - if math.isnan(v) or math.isinf(v): - raise ValueError("{0} is not a valid TOML value".format(v)) - else: - return repr(v) - elif isinstance(v, unicode) or isinstance(v, bytes): - return _escape_string(v) - elif isinstance(v, datetime.datetime): - return format_rfc3339(v) - elif isinstance(v, list): - return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) - elif isinstance(v, dict): - return '{{{0}}}'.format(', '.join('{} = {}'.format(_escape_id(k), _format_value(obj)) for k, obj in v.items())) - else: - raise RuntimeError(v) - - -def dump(obj, fout, sort_keys=False): - tables = [((), obj, False)] - - while tables: - name, table, is_array = tables.pop() - if name: - section_name = '.'.join(_escape_id(c) for c in name) - if is_array: - fout.write('[[{0}]]\n'.format(section_name)) - else: - fout.write('[{0}]\n'.format(section_name)) - - table_keys = sorted(table.keys()) if sort_keys else table.keys() - new_tables = [] - has_kv = False - for k in table_keys: - v = table[k] - if isinstance(v, dict): - new_tables.append((name + (k,), v, False)) - elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): - new_tables.extend((name + (k,), d, True) for d in v) - elif v is None: - # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 - fout.write( - '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) - has_kv = True - else: - fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) - has_kv = True - - tables.extend(reversed(new_tables)) - - if (name or has_kv) and tables: - fout.write('\n') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__init__.py index 80c4ce1d..517458b5 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__init__.py @@ -9,14 +9,14 @@ Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ -Requests is an HTTP library, written in Python, for human beings. Basic GET -usage: +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 - >>> 'Python is a programming language' in r.content + >>> b'Python is a programming language' in r.content True ... or POST: @@ -27,14 +27,14 @@ usage: { ... "form": { - "key2": "value2", - "key1": "value1" + "key1": "value1", + "key2": "value2" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation -is at <http://python-requests.org>. +is at <https://requests.readthedocs.io>. :copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.24 + # urllib3 >= 1.21.1, <= 1.25 assert major == 1 assert minor >= 21 - assert minor <= 24 + assert minor <= 25 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] @@ -90,18 +90,29 @@ except (AssertionError, ValueError): "version!".format(urllib3.__version__, chardet.__version__), RequestsDependencyWarning) -# Attempt to enable urllib3's SNI support, if possible -from pip._internal.utils.compat import WINDOWS -if not WINDOWS: +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): from pip._vendor.urllib3.contrib import pyopenssl pyopenssl.inject_into_urllib3() # Check cryptography version from cryptography import __version__ as cryptography_version _check_cryptography(cryptography_version) - except ImportError: - pass +except ImportError: + pass # urllib3's DependencyWarnings should be silenced. from pip._vendor.urllib3.exceptions import DependencyWarning diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__version__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__version__.py index f5b5d036..531e26ce 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__version__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/__version__.py @@ -4,11 +4,11 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' -__url__ = 'http://python-requests.org' -__version__ = '2.21.0' -__build__ = 0x022100 +__url__ = 'https://requests.readthedocs.io' +__version__ = '2.24.0' +__build__ = 0x022400 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2018 Kenneth Reitz' +__copyright__ = 'Copyright 2020 Kenneth Reitz' __cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/api.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/api.py index abada96d..e978e203 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/api.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/api.py @@ -16,10 +16,10 @@ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. - :param method: method for the new :class:`Request` object. + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. @@ -50,6 +50,7 @@ def request(method, url, **kwargs): >>> import requests >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req <Response [200]> """ @@ -65,7 +66,7 @@ def get(url, params=None, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response @@ -92,7 +93,9 @@ def head(url, **kwargs): r"""Sends a HEAD request. :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). :return: :class:`Response <Response>` object :rtype: requests.Response """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/auth.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/auth.py index bdde51c7..eeface39 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/auth.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/auth.py @@ -50,7 +50,7 @@ def _basic_auth_str(username, password): "Non-string passwords will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({!r}) to " "a string or bytes object in the near future to avoid " - "problems.".format(password), + "problems.".format(type(password)), category=DeprecationWarning, ) password = str(password) @@ -239,7 +239,7 @@ class HTTPDigestAuth(AuthBase): """ # If response is not 4xx, do not auth - # See https://github.com/requests/requests/issues/3772 + # See https://github.com/psf/requests/issues/3772 if not 400 <= r.status_code < 500: self._thread_local.num_401_calls = 1 return r diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/compat.py index 6a86893d..9e293716 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/compat.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/compat.py @@ -47,6 +47,7 @@ if is_py2: import cookielib from Cookie import Morsel from StringIO import StringIO + # Keep OrderedDict for backwards compatibility. from collections import Callable, Mapping, MutableMapping, OrderedDict @@ -63,6 +64,7 @@ elif is_py3: from http import cookiejar as cookielib from http.cookies import Morsel from io import StringIO + # Keep OrderedDict for backwards compatibility. from collections import OrderedDict from collections.abc import Callable, Mapping, MutableMapping diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/exceptions.py index a91e1fd1..9ef9e6e9 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/exceptions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/exceptions.py @@ -94,11 +94,11 @@ class ChunkedEncodingError(RequestException): class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" + """Failed to decode response content.""" class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" + """The content for this response was already consumed.""" class RetryError(RequestException): @@ -106,21 +106,18 @@ class RetryError(RequestException): class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body""" + """Requests encountered an error when trying to rewind a body.""" # Warnings class RequestsWarning(Warning): """Base warning for Requests.""" - pass class FileModeWarning(RequestsWarning, DeprecationWarning): """A file was opened in text mode, but Requests determined its binary length.""" - pass class RequestsDependencyWarning(RequestsWarning): """An imported dependency doesn't match the expected version range.""" - pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/models.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/models.py index 08399574..015e715d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/models.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/models.py @@ -12,7 +12,7 @@ import sys # Import encoding now, to avoid implicit import later. # Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. import encodings.idna from pip._vendor.urllib3.fields import RequestField @@ -280,6 +280,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): >>> import requests >>> req = requests.Request('GET', 'https://httpbin.org/get') >>> r = req.prepare() + >>> r <PreparedRequest [GET]> >>> s = requests.Session() @@ -358,7 +359,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. - #: https://github.com/requests/requests/pull/2238 + #: https://github.com/psf/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: @@ -472,12 +473,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): not isinstance(data, (basestring, list, tuple, Mapping)) ]) - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + body = data if getattr(body, 'tell', None) is not None: @@ -608,7 +609,7 @@ class Response(object): #: File-like object representation of response (for advanced usage). #: Use of ``raw`` requires that ``stream=True`` be set on the request. - # This requirement does not apply for use internally to Requests. + #: This requirement does not apply for use internally to Requests. self.raw = None #: Final URL location of Response. @@ -915,7 +916,7 @@ class Response(object): return l def raise_for_status(self): - """Raises stored :class:`HTTPError`, if one occurred.""" + """Raises :class:`HTTPError`, if one occurred.""" http_error_msg = '' if isinstance(self.reason, bytes): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/sessions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/sessions.py index d73d700f..e8e2d609 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/sessions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/sessions.py @@ -11,9 +11,10 @@ import os import sys import time from datetime import timedelta +from collections import OrderedDict from .auth import _basic_auth_str -from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping +from .compat import cookielib, is_py3, urljoin, urlparse, Mapping from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT @@ -162,7 +163,7 @@ class SessionRedirectMixin(object): resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) + raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) # Release the connection back into the pool. resp.close() @@ -170,7 +171,7 @@ class SessionRedirectMixin(object): # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) - url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) + url = ':'.join([to_native_string(parsed_rurl.scheme), url]) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) @@ -192,19 +193,16 @@ class SessionRedirectMixin(object): self.rebuild_method(prepared_request, resp) - # https://github.com/requests/requests/issues/1084 + # https://github.com/psf/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): - # https://github.com/requests/requests/issues/3490 + # https://github.com/psf/requests/issues/3490 purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers - try: - del headers['Cookie'] - except KeyError: - pass + headers.pop('Cookie', None) # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared @@ -271,7 +269,6 @@ class SessionRedirectMixin(object): if new_auth is not None: prepared_request.prepare_auth(new_auth) - return def rebuild_proxies(self, prepared_request, proxies): """This method re-evaluates the proxy configuration by considering the @@ -352,13 +349,13 @@ class Session(SessionRedirectMixin): Or as a context manager:: >>> with requests.Session() as s: - >>> s.get('https://httpbin.org/get') + ... s.get('https://httpbin.org/get') <Response [200]> """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'cert', 'adapters', 'stream', 'trust_env', 'max_redirects', ] @@ -661,11 +658,13 @@ class Session(SessionRedirectMixin): extract_cookies_to_jar(self.cookies, request, r.raw) - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - # Resolve redirects if allowed. - history = [resp for resp in gen] if allow_redirects else [] + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] # Shuffle things around if there's history. if history: @@ -728,7 +727,7 @@ class Session(SessionRedirectMixin): return adapter # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for '%s'" % url) + raise InvalidSchema("No connection adapters were found for {!r}".format(url)) def close(self): """Closes all adapters and as such the session""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/status_codes.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/status_codes.py index 813e8c4e..d80a7cd4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/status_codes.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/status_codes.py @@ -5,12 +5,15 @@ The ``codes`` object defines a mapping from common names for HTTP statuses to their numerical codes, accessible either as attributes or as dictionary items. ->>> requests.codes['temporary_redirect'] -307 ->>> requests.codes.teapot -418 ->>> requests.codes['\o/'] -200 +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 Some codes have multiple names, and both upper- and lower-case versions of the names are allowed. For example, ``codes.ok``, ``codes.OK``, and diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/structures.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/structures.py index da930e28..8ee0ba7a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/structures.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/structures.py @@ -7,7 +7,9 @@ requests.structures Data structures that power Requests. """ -from .compat import OrderedDict, Mapping, MutableMapping +from collections import OrderedDict + +from .compat import Mapping, MutableMapping class CaseInsensitiveDict(MutableMapping): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/utils.py index 8170a8d2..c1700d7f 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/utils.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/requests/utils.py @@ -19,6 +19,7 @@ import sys import tempfile import warnings import zipfile +from collections import OrderedDict from .__version__ import __version__ from . import certs @@ -26,7 +27,7 @@ from . import certs from ._internal_utils import to_native_string from .compat import parse_http_list as _parse_list_header from .compat import ( - quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, + quote, urlparse, bytes, str, unquote, getproxies, proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment, getproxies_environment, Mapping) from .cookies import cookiejar_from_dict @@ -179,7 +180,7 @@ def get_netrc_auth(url, raise_errors=False): except KeyError: # os.path.expanduser can fail when $HOME is undefined and # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/requests/requests/issues/1846 + # https://github.com/psf/requests/issues/1846 return if os.path.exists(loc): @@ -266,6 +267,8 @@ def from_key_val_list(value): >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') + Traceback (most recent call last): + ... ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) @@ -292,7 +295,9 @@ def to_key_val_list(value): >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples. + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples :rtype: list """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 00000000..3b444545 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "0.4.0" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + Resolver, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 00000000..366cc5e2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Sequence"] + +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/providers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 00000000..68b7290d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,109 @@ +class AbstractProvider(object): + """Delegate class to provide requirement interface for the resolver. + """ + + def identify(self, dependency): + """Given a dependency, return an identifier for it. + + This is used in many places to identify the dependency, e.g. whether + two requirements should have their specifier parts merged, whether + two specifications would conflict with each other (because they the + same name but different versions). + """ + raise NotImplementedError + + def get_preference(self, resolution, candidates, information): + """Produce a sort key for given specification based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param resolution: Currently pinned candidate, or `None`. + :param candidates: A list of possible candidates. + :param information: A list of requirement information. + + Each information instance is a named tuple with two entries: + + * `requirement` specifies a requirement contributing to the current + candidate list + * `parent` specifies the candidate that provides (dependend on) the + requirement, or `None` to indicate a root requirement. + + The preference could depend on a various of issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the `key` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this specification is (i.e. the sorting function + is called with `reverse=False`). + """ + raise NotImplementedError + + def find_matches(self, requirements): + """Find all possible candidates that satisfy the given requirements. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + :param requirements: A collection of requirements which all of the the + returned candidates must match. All requirements are guaranteed to + have the same identifier. The collection is never empty. + :returns: An iterable that orders candidates by preference, e.g. the + most preferred candidate should come first. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guarenteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether `candidate` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work. + """ + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/reporters.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 00000000..a0a2a458 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,42 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver. + """ + + def starting(self): + """Called before the resolution actually starts. + """ + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully. + """ + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def backtracking(self, candidate): + """Called when rejecting a candidate during backtracking. + """ + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution. + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 00000000..4497f976 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,428 @@ +import collections + +from .compat import collections_abc +from .providers import AbstractResolver +from .structs import DirectedGraph + + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + @classmethod + def from_requirement(cls, provider, requirement, parent): + """Build an instance from a requirement. + """ + candidates = provider.find_matches([requirement]) + if not isinstance(candidates, collections_abc.Sequence): + candidates = list(candidates) + criterion = cls( + candidates=candidates, + information=[RequirementInformation(requirement, parent)], + incompatibilities=[], + ) + if not candidates: + raise RequirementsConflicted(criterion) + return criterion + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + def merged_with(self, provider, requirement, parent): + """Build a new instance from this and a new requirement. + """ + infos = list(self.information) + infos.append(RequirementInformation(requirement, parent)) + candidates = provider.find_matches([r for r, _ in infos]) + if not isinstance(candidates, collections_abc.Sequence): + candidates = list(candidates) + criterion = type(self)(candidates, infos, list(self.incompatibilities)) + if not candidates: + raise RequirementsConflicted(criterion) + return criterion + + def excluded_of(self, candidate): + """Build a new instance from this, but excluding specified candidate. + + Returns the new instance, or None if we still have no valid candidates. + """ + incompats = list(self.incompatibilities) + incompats.append(candidate) + candidates = [c for c in self.candidates if c != candidate] + if not candidates: + return None + criterion = type(self)(candidates, list(self.information), incompats) + return criterion + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + try: + base = self._states[-1] + except IndexError: + state = State(mapping=collections.OrderedDict(), criteria={}) + else: + state = State( + mapping=base.mapping.copy(), criteria=base.criteria.copy(), + ) + self._states.append(state) + + def _merge_into_criterion(self, requirement, parent): + self._r.adding_requirement(requirement, parent) + name = self._p.identify(requirement) + try: + crit = self.state.criteria[name] + except KeyError: + crit = Criterion.from_requirement(self._p, requirement, parent) + else: + crit = crit.merged_with(self._p, requirement, parent) + return name, crit + + def _get_criterion_item_preference(self, item): + name, criterion = item + try: + pinned = self.state.mapping[name] + except KeyError: + pinned = None + return self._p.get_preference( + pinned, criterion.candidates, criterion.information, + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(r, current_pin) + for r in criterion.iter_requirement() + ) + + def _get_criteria_to_update(self, candidate): + criteria = {} + for r in self._p.get_dependencies(candidate): + name, crit = self._merge_into_criterion(r, parent=candidate) + criteria[name] = crit + return criteria + + def _attempt_to_pin_criterion(self, name, criterion): + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_criteria_to_update(candidate) + except RequirementsConflicted as e: + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(r, candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self._r.pinning(candidate) + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + self.state.criteria.update(criteria) + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backtrack(self): + # Drop the current state, it's known not to work. + del self._states[-1] + + # We need at least 2 states here: + # (a) One to backtrack to. + # (b) One to restore state (a) to its state prior to candidate-pinning, + # so we can pin another one instead. + + while len(self._states) >= 2: + # Retract the last candidate pin. + prev_state = self._states.pop() + try: + name, candidate = prev_state.mapping.popitem() + except KeyError: + continue + self._r.backtracking(candidate) + + # Create a new state to work on, with the newly known not-working + # candidate excluded. + self._push_new_state() + + # Mark the retracted candidate as incompatible. + criterion = self.state.criteria[name].excluded_of(candidate) + if criterion is None: + # This state still does not work. Try the still previous state. + del self._states[-1] + continue + self.state.criteria[name] = criterion + + return True + + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._push_new_state() + for r in requirements: + try: + name, crit = self._merge_into_criterion(r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + self.state.criteria[name] = crit + + self._r.starting() + + for round_index in range(max_rounds): + self._r.starting_round(round_index) + + self._push_new_state() + curr = self.state + + unsatisfied_criterion_items = [ + item + for item in self.state.criteria.items() + if not self._is_current_pin_satisfying(*item) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_criterion_items: + del self._states[-1] + self._r.ending(curr) + return self.state + + # Choose the most preferred unpinned criterion to try. + name, criterion = min( + unsatisfied_criterion_items, + key=self._get_criterion_item_preference, + ) + failure_causes = self._attempt_to_pin_criterion(name, criterion) + + # Backtrack if pinning fails. + if failure_causes: + result = self._backtrack() + if not result: + causes = [ + i for crit in failure_causes for i in crit.information + ] + raise ResolutionImpossible(causes) + + self._r.ending_round(round_index, curr) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work. + """ + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/structs.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 00000000..1eee08b3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,68 @@ +class DirectedGraph(object): + """A graph structure with directed edges. + """ + + def __init__(self): + self._vertices = set() + self._forwards = {} # <key> -> Set[<key>] + self._backwards = {} # <key> -> Set[<key>] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph. + """ + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph. + """ + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it. + """ + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/six.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/six.py index 89b2188f..83f69783 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/six.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2018 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.12.0" +__version__ = "1.15.0" # Useful for very coarse version differentiation. @@ -255,9 +255,11 @@ _moved_attributes = [ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), @@ -637,13 +639,16 @@ if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): return s @@ -665,6 +670,7 @@ else: _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") @@ -681,6 +687,10 @@ def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") @@ -716,16 +726,7 @@ else: """) -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): +if sys.version_info[:2] > (3,): exec_("""def raise_from(value, from_value): try: raise value from from_value @@ -805,13 +806,33 @@ if sys.version_info[:2] < (3, 3): _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + else: wraps = functools.wraps @@ -824,7 +845,15 @@ def with_metaclass(meta, *bases): class metaclass(type): def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): @@ -861,12 +890,11 @@ def ensure_binary(s, encoding='utf-8', errors='strict'): - `str` -> encoded to `bytes` - `bytes` -> `bytes` """ + if isinstance(s, binary_type): + return s if isinstance(s, text_type): return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError("not expecting type '%s'" % type(s)) def ensure_str(s, encoding='utf-8', errors='strict'): @@ -880,12 +908,15 @@ def ensure_str(s, encoding='utf-8', errors='strict'): - `str` -> `str` - `bytes` -> decoded to `str` """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) + # Optimization: Fast return for the common case. + if type(s) is str: + return s if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) + return s.encode(encoding, errors) elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) return s @@ -908,10 +939,9 @@ def ensure_text(s, encoding='utf-8', errors='strict'): raise TypeError("not expecting type '%s'" % type(s)) - def python_2_unicode_compatible(klass): """ - A decorator that defines __unicode__ and __str__ methods under Python 2. + A class decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/__init__.py new file mode 100644 index 00000000..7a08fe72 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/__init__.py @@ -0,0 +1,25 @@ +"""Python module which parses and emits TOML. + +Released under the MIT license. +""" + +from pip._vendor.toml import encoder +from pip._vendor.toml import decoder + +__version__ = "0.10.1" +_spec_ = "0.5.0" + +load = decoder.load +loads = decoder.loads +TomlDecoder = decoder.TomlDecoder +TomlDecodeError = decoder.TomlDecodeError +TomlPreserveCommentDecoder = decoder.TomlPreserveCommentDecoder + +dump = encoder.dump +dumps = encoder.dumps +TomlEncoder = encoder.TomlEncoder +TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder +TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder +TomlNumpyEncoder = encoder.TomlNumpyEncoder +TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder +TomlPathlibEncoder = encoder.TomlPathlibEncoder diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/common.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/common.py new file mode 100644 index 00000000..a5d673da --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/common.py @@ -0,0 +1,6 @@ +# content after the \ +escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/decoder.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/decoder.py new file mode 100644 index 00000000..e4887770 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/decoder.py @@ -0,0 +1,1052 @@ +import datetime +import io +from os import linesep +import re +import sys + +from pip._vendor.toml.tz import TomlTz + +if sys.version_info < (3,): + _range = xrange # noqa: F821 +else: + unicode = str + _range = range + basestring = str + unichr = chr + + +def _detect_pathlib_path(p): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(p, pathlib.PurePath): + return True + return False + + +def _ispath(p): + if isinstance(p, (bytes, basestring)): + return True + return _detect_pathlib_path(p) + + +def _getpath(p): + if (3, 6) <= sys.version_info: + import os + return os.fspath(p) + if _detect_pathlib_path(p): + return str(p) + return p + + +try: + FNFError = FileNotFoundError +except NameError: + FNFError = IOError + + +TIME_RE = re.compile(r"([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") + + +class TomlDecodeError(ValueError): + """Base toml Exception / Error.""" + + def __init__(self, msg, doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + colno = pos - doc.rfind('\n', 0, pos) + emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos) + ValueError.__init__(self, emsg) + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno + + +# Matches a TOML number, which allows underscores for readability +_number_with_underscores = re.compile('([0-9])(_([0-9]))*') + + +class CommentValue(object): + def __init__(self, val, comment, beginline, _dict): + self.val = val + separator = "\n" if beginline else " " + self.comment = separator + comment + self._dict = _dict + + def __getitem__(self, key): + return self.val[key] + + def __setitem__(self, key, value): + self.val[key] = value + + def dump(self, dump_value_func): + retstr = dump_value_func(self.val) + if isinstance(self.val, self._dict): + return self.comment + "\n" + unicode(retstr) + else: + return unicode(retstr) + self.comment + + +def _strictly_valid_num(n): + n = n.strip() + if not n: + return False + if n[0] == '_': + return False + if n[-1] == '_': + return False + if "_." in n or "._" in n: + return False + if len(n) == 1: + return True + if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']: + return False + if n[0] == '+' or n[0] == '-': + n = n[1:] + if len(n) > 1 and n[0] == '0' and n[1] != '.': + return False + if '__' in n: + return False + return True + + +def load(f, _dict=dict, decoder=None): + """Parses named file or files as toml and returns a dictionary + + Args: + f: Path to the file to open, array of files to read into single dict + or a file descriptor + _dict: (optional) Specifies the class of the returned toml dictionary + decoder: The decoder to use + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError -- When f is invalid type + TomlDecodeError: Error while decoding toml + IOError / FileNotFoundError -- When an array with no valid (existing) + (Python 2 / Python 3) file paths is passed + """ + + if _ispath(f): + with io.open(_getpath(f), encoding='utf-8') as ffile: + return loads(ffile.read(), _dict, decoder) + elif isinstance(f, list): + from os import path as op + from warnings import warn + if not [path for path in f if op.exists(path)]: + error_msg = "Load expects a list to contain filenames only." + error_msg += linesep + error_msg += ("The list needs to contain the path of at least one " + "existing file.") + raise FNFError(error_msg) + if decoder is None: + decoder = TomlDecoder(_dict) + d = decoder.get_empty_table() + for l in f: # noqa: E741 + if op.exists(l): + d.update(load(l, _dict, decoder)) + else: + warn("Non-existent filename in list with at least one valid " + "filename") + return d + else: + try: + return loads(f.read(), _dict, decoder) + except AttributeError: + raise TypeError("You can only load a file descriptor, filename or " + "list") + + +_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$') + + +def loads(s, _dict=dict, decoder=None): + """Parses string as toml + + Args: + s: String to be parsed + _dict: (optional) Specifies the class of the returned toml dictionary + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError: When a non-string is passed + TomlDecodeError: Error while decoding toml + """ + + implicitgroups = [] + if decoder is None: + decoder = TomlDecoder(_dict) + retval = decoder.get_empty_table() + currentlevel = retval + if not isinstance(s, basestring): + raise TypeError("Expecting something like a string") + + if not isinstance(s, unicode): + s = s.decode('utf8') + + original = s + sl = list(s) + openarr = 0 + openstring = False + openstrchar = "" + multilinestr = False + arrayoftables = False + beginline = True + keygroup = False + dottedkey = False + keyname = 0 + key = '' + prev_key = '' + line_no = 1 + + for i, item in enumerate(sl): + if item == '\r' and sl[i + 1] == '\n': + sl[i] = ' ' + continue + if keyname: + key += item + if item == '\n': + raise TomlDecodeError("Key name found without value." + " Reached end of line.", original, i) + if openstring: + if item == openstrchar: + oddbackslash = False + k = 1 + while i >= k and sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + if not oddbackslash: + keyname = 2 + openstring = False + openstrchar = "" + continue + elif keyname == 1: + if item.isspace(): + keyname = 2 + continue + elif item == '.': + dottedkey = True + continue + elif item.isalnum() or item == '_' or item == '-': + continue + elif (dottedkey and sl[i - 1] == '.' and + (item == '"' or item == "'")): + openstring = True + openstrchar = item + continue + elif keyname == 2: + if item.isspace(): + if dottedkey: + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '.': + dottedkey = True + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '=': + keyname = 0 + prev_key = key[:-1].rstrip() + key = '' + dottedkey = False + else: + raise TomlDecodeError("Found invalid character in key name: '" + + item + "'. Try quoting the key name.", + original, i) + if item == "'" and openstrchar != '"': + k = 1 + try: + while sl[i - k] == "'": + k += 1 + if k == 3: + break + except IndexError: + pass + if k == 3: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = "'" + else: + openstrchar = "" + if item == '"' and openstrchar != "'": + oddbackslash = False + k = 1 + tripquote = False + try: + while sl[i - k] == '"': + k += 1 + if k == 3: + tripquote = True + break + if k == 1 or (k == 3 and tripquote): + while sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + except IndexError: + pass + if not oddbackslash: + if tripquote: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = '"' + else: + openstrchar = "" + if item == '#' and (not openstring and not keygroup and + not arrayoftables): + j = i + comment = "" + try: + while sl[j] != '\n': + comment += s[j] + sl[j] = ' ' + j += 1 + except IndexError: + break + if not openarr: + decoder.preserve_comment(line_no, prev_key, comment, beginline) + if item == '[' and (not openstring and not keygroup and + not arrayoftables): + if beginline: + if len(sl) > i + 1 and sl[i + 1] == '[': + arrayoftables = True + else: + keygroup = True + else: + openarr += 1 + if item == ']' and not openstring: + if keygroup: + keygroup = False + elif arrayoftables: + if sl[i - 1] == ']': + arrayoftables = False + else: + openarr -= 1 + if item == '\n': + if openstring or multilinestr: + if not multilinestr: + raise TomlDecodeError("Unbalanced quotes", original, i) + if ((sl[i - 1] == "'" or sl[i - 1] == '"') and ( + sl[i - 2] == sl[i - 1])): + sl[i] = sl[i - 1] + if sl[i - 3] == sl[i - 1]: + sl[i - 3] = ' ' + elif openarr: + sl[i] = ' ' + else: + beginline = True + line_no += 1 + elif beginline and sl[i] != ' ' and sl[i] != '\t': + beginline = False + if not keygroup and not arrayoftables: + if sl[i] == '=': + raise TomlDecodeError("Found empty keyname. ", original, i) + keyname = 1 + key += item + if keyname: + raise TomlDecodeError("Key name found without value." + " Reached end of file.", original, len(s)) + if openstring: # reached EOF and have an unterminated string + raise TomlDecodeError("Unterminated string found." + " Reached end of file.", original, len(s)) + s = ''.join(sl) + s = s.split('\n') + multikey = None + multilinestr = "" + multibackslash = False + pos = 0 + for idx, line in enumerate(s): + if idx > 0: + pos += len(s[idx - 1]) + 1 + + decoder.embed_comments(idx, currentlevel) + + if not multilinestr or multibackslash or '\n' not in multilinestr: + line = line.strip() + if line == "" and (not multikey or multibackslash): + continue + if multikey: + if multibackslash: + multilinestr += line + else: + multilinestr += line + multibackslash = False + closed = False + if multilinestr[0] == '[': + closed = line[-1] == ']' + elif len(line) > 2: + closed = (line[-1] == multilinestr[0] and + line[-2] == multilinestr[0] and + line[-3] == multilinestr[0]) + if closed: + try: + value, vtype = decoder.load_value(multilinestr) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + currentlevel[multikey] = value + multikey = None + multilinestr = "" + else: + k = len(multilinestr) - 1 + while k > -1 and multilinestr[k] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = multilinestr[:-1] + else: + multilinestr += "\n" + continue + if line[0] == '[': + arrayoftables = False + if len(line) == 1: + raise TomlDecodeError("Opening key group bracket on line by " + "itself.", original, pos) + if line[1] == '[': + arrayoftables = True + line = line[2:] + splitstr = ']]' + else: + line = line[1:] + splitstr = ']' + i = 1 + quotesplits = decoder._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and splitstr in quotesplit: + break + i += quotesplit.count(splitstr) + quoted = not quoted + line = line.split(splitstr, i) + if len(line) < i + 1 or line[-1].strip() != "": + raise TomlDecodeError("Key group not on a line by itself.", + original, pos) + groups = splitstr.join(line[:-1]).split('.') + i = 0 + while i < len(groups): + groups[i] = groups[i].strip() + if len(groups[i]) > 0 and (groups[i][0] == '"' or + groups[i][0] == "'"): + groupstr = groups[i] + j = i + 1 + while not groupstr[0] == groupstr[-1]: + j += 1 + if j > len(groups) + 2: + raise TomlDecodeError("Invalid group name '" + + groupstr + "' Something " + + "went wrong.", original, pos) + groupstr = '.'.join(groups[i:j]).strip() + groups[i] = groupstr[1:-1] + groups[i + 1:j] = [] + else: + if not _groupname_re.match(groups[i]): + raise TomlDecodeError("Invalid group name '" + + groups[i] + "'. Try quoting it.", + original, pos) + i += 1 + currentlevel = retval + for i in _range(len(groups)): + group = groups[i] + if group == "": + raise TomlDecodeError("Can't have a keygroup with an empty " + "name", original, pos) + try: + currentlevel[group] + if i == len(groups) - 1: + if group in implicitgroups: + implicitgroups.remove(group) + if arrayoftables: + raise TomlDecodeError("An implicitly defined " + "table can't be an array", + original, pos) + elif arrayoftables: + currentlevel[group].append(decoder.get_empty_table() + ) + else: + raise TomlDecodeError("What? " + group + + " already exists?" + + str(currentlevel), + original, pos) + except TypeError: + currentlevel = currentlevel[-1] + if group not in currentlevel: + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + except KeyError: + if i != len(groups) - 1: + implicitgroups.append(group) + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + currentlevel = currentlevel[group] + if arrayoftables: + try: + currentlevel = currentlevel[-1] + except KeyError: + pass + elif line[0] == "{": + if line[-1] != "}": + raise TomlDecodeError("Line breaks are not allowed in inline" + "objects", original, pos) + try: + decoder.load_inline_object(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + elif "=" in line: + try: + ret = decoder.load_line(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + if ret is not None: + multikey, multilinestr, multibackslash = ret + return retval + + +def _load_date(val): + microsecond = 0 + tz = None + try: + if len(val) > 19: + if val[19] == '.': + if val[-1].upper() == 'Z': + subsecondval = val[20:-1] + tzval = "Z" + else: + subsecondvalandtz = val[20:] + if '+' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('+') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + elif '-' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('-') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + else: + tzval = None + subsecondval = subsecondvalandtz + if tzval is not None: + tz = TomlTz(tzval) + microsecond = int(int(subsecondval) * + (10 ** (6 - len(subsecondval)))) + else: + tz = TomlTz(val[19:]) + except ValueError: + tz = None + if "-" not in val[1:]: + return None + try: + if len(val) == 10: + d = datetime.date( + int(val[:4]), int(val[5:7]), + int(val[8:10])) + else: + d = datetime.datetime( + int(val[:4]), int(val[5:7]), + int(val[8:10]), int(val[11:13]), + int(val[14:16]), int(val[17:19]), microsecond, tz) + except ValueError: + return None + return d + + +def _load_unicode_escapes(v, hexbytes, prefix): + skip = False + i = len(v) - 1 + while i > -1 and v[i] == '\\': + skip = not skip + i -= 1 + for hx in hexbytes: + if skip: + skip = False + i = len(hx) - 1 + while i > -1 and hx[i] == '\\': + skip = not skip + i -= 1 + v += prefix + v += hx + continue + hxb = "" + i = 0 + hxblen = 4 + if prefix == "\\U": + hxblen = 8 + hxb = ''.join(hx[i:i + hxblen]).lower() + if hxb.strip('0123456789abcdef'): + raise ValueError("Invalid escape sequence: " + hxb) + if hxb[0] == "d" and hxb[1].strip('01234567'): + raise ValueError("Invalid escape sequence: " + hxb + + ". Only scalar unicode points are allowed.") + v += unichr(int(hxb, 16)) + v += unicode(hx[len(hxb):]) + return v + + +# Unescape TOML string values. + +# content after the \ +_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +_escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) + + +def _unescape(v): + """Unescape characters in a TOML string.""" + i = 0 + backslash = False + while i < len(v): + if backslash: + backslash = False + if v[i] in _escapes: + v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:] + elif v[i] == '\\': + v = v[:i - 1] + v[i:] + elif v[i] == 'u' or v[i] == 'U': + i += 1 + else: + raise ValueError("Reserved escape sequence used") + continue + elif v[i] == '\\': + backslash = True + i += 1 + return v + + +class InlineTableDict(object): + """Sentinel subclass of dict for inline tables.""" + + +class TomlDecoder(object): + + def __init__(self, _dict=dict): + self._dict = _dict + + def get_empty_table(self): + return self._dict() + + def get_empty_inline_table(self): + class DynamicInlineTableDict(self._dict, InlineTableDict): + """Concrete sentinel subclass for inline tables. + It is a subclass of _dict which is passed in dynamically at load + time + + It is also a subclass of InlineTableDict + """ + + return DynamicInlineTableDict() + + def load_inline_object(self, line, currentlevel, multikey=False, + multibackslash=False): + candidate_groups = line[1:-1].split(",") + groups = [] + if len(candidate_groups) == 1 and not candidate_groups[0].strip(): + candidate_groups.pop() + while len(candidate_groups) > 0: + candidate_group = candidate_groups.pop(0) + try: + _, value = candidate_group.split('=', 1) + except ValueError: + raise ValueError("Invalid inline table encountered") + value = value.strip() + if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( + value[0] in '-0123456789' or + value in ('true', 'false') or + (value[0] == "[" and value[-1] == "]") or + (value[0] == '{' and value[-1] == '}'))): + groups.append(candidate_group) + elif len(candidate_groups) > 0: + candidate_groups[0] = (candidate_group + "," + + candidate_groups[0]) + else: + raise ValueError("Invalid inline table value encountered") + for group in groups: + status = self.load_line(group, currentlevel, multikey, + multibackslash) + if status is not None: + break + + def _get_split_on_quotes(self, line): + doublequotesplits = line.split('"') + quoted = False + quotesplits = [] + if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: + singlequotesplits = doublequotesplits[0].split("'") + doublequotesplits = doublequotesplits[1:] + while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): + singlequotesplits[-1] += '"' + doublequotesplits[0] + doublequotesplits = doublequotesplits[1:] + if "'" in singlequotesplits[-1]: + singlequotesplits = (singlequotesplits[:-1] + + singlequotesplits[-1].split("'")) + quotesplits += singlequotesplits + for doublequotesplit in doublequotesplits: + if quoted: + quotesplits.append(doublequotesplit) + else: + quotesplits += doublequotesplit.split("'") + quoted = not quoted + return quotesplits + + def load_line(self, line, currentlevel, multikey, multibackslash): + i = 1 + quotesplits = self._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and '=' in quotesplit: + break + i += quotesplit.count('=') + quoted = not quoted + pair = line.split('=', i) + strictly_valid = _strictly_valid_num(pair[-1]) + if _number_with_underscores.match(pair[-1]): + pair[-1] = pair[-1].replace('_', '') + while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and + pair[-1][0] != "'" and pair[-1][0] != '"' and + pair[-1][0] != '[' and pair[-1][0] != '{' and + pair[-1].strip() != 'true' and + pair[-1].strip() != 'false'): + try: + float(pair[-1]) + break + except ValueError: + pass + if _load_date(pair[-1]) is not None: + break + if TIME_RE.match(pair[-1]): + break + i += 1 + prev_val = pair[-1] + pair = line.split('=', i) + if prev_val == pair[-1]: + raise ValueError("Invalid date or number") + if strictly_valid: + strictly_valid = _strictly_valid_num(pair[-1]) + pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] + if '.' in pair[0]: + if '"' in pair[0] or "'" in pair[0]: + quotesplits = self._get_split_on_quotes(pair[0]) + quoted = False + levels = [] + for quotesplit in quotesplits: + if quoted: + levels.append(quotesplit) + else: + levels += [level.strip() for level in + quotesplit.split('.')] + quoted = not quoted + else: + levels = pair[0].split('.') + while levels[-1] == "": + levels = levels[:-1] + for level in levels[:-1]: + if level == "": + continue + if level not in currentlevel: + currentlevel[level] = self.get_empty_table() + currentlevel = currentlevel[level] + pair[0] = levels[-1].strip() + elif (pair[0][0] == '"' or pair[0][0] == "'") and \ + (pair[0][-1] == pair[0][0]): + pair[0] = _unescape(pair[0][1:-1]) + k, koffset = self._load_line_multiline_str(pair[1]) + if k > -1: + while k > -1 and pair[1][k + koffset] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = pair[1][:-1] + else: + multilinestr = pair[1] + "\n" + multikey = pair[0] + else: + value, vtype = self.load_value(pair[1], strictly_valid) + try: + currentlevel[pair[0]] + raise ValueError("Duplicate keys!") + except TypeError: + raise ValueError("Duplicate keys!") + except KeyError: + if multikey: + return multikey, multilinestr, multibackslash + else: + currentlevel[pair[0]] = value + + def _load_line_multiline_str(self, p): + poffset = 0 + if len(p) < 3: + return -1, poffset + if p[0] == '[' and (p.strip()[-1] != ']' and + self._load_array_isstrarray(p)): + newp = p[1:].strip().split(',') + while len(newp) > 1 and newp[-1][0] != '"' and newp[-1][0] != "'": + newp = newp[:-2] + [newp[-2] + ',' + newp[-1]] + newp = newp[-1] + poffset = len(p) - len(newp) + p = newp + if p[0] != '"' and p[0] != "'": + return -1, poffset + if p[1] != p[0] or p[2] != p[0]: + return -1, poffset + if len(p) > 5 and p[-1] == p[0] and p[-2] == p[0] and p[-3] == p[0]: + return -1, poffset + return len(p) - 1, poffset + + def load_value(self, v, strictly_valid=True): + if not v: + raise ValueError("Empty value is invalid") + if v == 'true': + return (True, "bool") + elif v == 'false': + return (False, "bool") + elif v[0] == '"' or v[0] == "'": + quotechar = v[0] + testv = v[1:].split(quotechar) + triplequote = False + triplequotecount = 0 + if len(testv) > 1 and testv[0] == '' and testv[1] == '': + testv = testv[2:] + triplequote = True + closed = False + for tv in testv: + if tv == '': + if triplequote: + triplequotecount += 1 + else: + closed = True + else: + oddbackslash = False + try: + i = -1 + j = tv[i] + while j == '\\': + oddbackslash = not oddbackslash + i -= 1 + j = tv[i] + except IndexError: + pass + if not oddbackslash: + if closed: + raise ValueError("Found tokens after a closed " + + "string. Invalid TOML.") + else: + if not triplequote or triplequotecount > 1: + closed = True + else: + triplequotecount = 0 + if quotechar == '"': + escapeseqs = v.split('\\')[1:] + backslash = False + for i in escapeseqs: + if i == '': + backslash = not backslash + else: + if i[0] not in _escapes and (i[0] != 'u' and + i[0] != 'U' and + not backslash): + raise ValueError("Reserved escape sequence used") + if backslash: + backslash = False + for prefix in ["\\u", "\\U"]: + if prefix in v: + hexbytes = v.split(prefix) + v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], + prefix) + v = _unescape(v) + if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or + v[1] == v[2]): + v = v[2:-2] + return (v[1:-1], "str") + elif v[0] == '[': + return (self.load_array(v), "array") + elif v[0] == '{': + inline_object = self.get_empty_inline_table() + self.load_inline_object(v, inline_object) + return (inline_object, "inline_object") + elif TIME_RE.match(v): + h, m, s, _, ms = TIME_RE.match(v).groups() + time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0) + return (time, "time") + else: + parsed_date = _load_date(v) + if parsed_date is not None: + return (parsed_date, "date") + if not strictly_valid: + raise ValueError("Weirdness with leading zeroes or " + "underscores in your number.") + itype = "int" + neg = False + if v[0] == '-': + neg = True + v = v[1:] + elif v[0] == '+': + v = v[1:] + v = v.replace('_', '') + lowerv = v.lower() + if '.' in v or ('x' not in v and ('e' in v or 'E' in v)): + if '.' in v and v.split('.', 1)[1] == '': + raise ValueError("This float is missing digits after " + "the point") + if v[0] not in '0123456789': + raise ValueError("This float doesn't have a leading " + "digit") + v = float(v) + itype = "float" + elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'): + v = float(v) + itype = "float" + if itype == "int": + v = int(v, 0) + if neg: + return (0 - v, itype) + return (v, itype) + + def bounded_string(self, s): + if len(s) == 0: + return True + if s[-1] != s[0]: + return False + i = -2 + backslash = False + while len(s) + i > 0: + if s[i] == "\\": + backslash = not backslash + i -= 1 + else: + break + return not backslash + + def _load_array_isstrarray(self, a): + a = a[1:-1].strip() + if a != '' and (a[0] == '"' or a[0] == "'"): + return True + return False + + def load_array(self, a): + atype = None + retval = [] + a = a.strip() + if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): + strarray = self._load_array_isstrarray(a) + if not a[1:-1].strip().startswith('{'): + a = a[1:-1].split(',') + else: + # a is an inline object, we must find the matching parenthesis + # to define groups + new_a = [] + start_group_index = 1 + end_group_index = 2 + open_bracket_count = 1 if a[start_group_index] == '{' else 0 + in_str = False + while end_group_index < len(a[1:]): + if a[end_group_index] == '"' or a[end_group_index] == "'": + if in_str: + backslash_index = end_group_index - 1 + while (backslash_index > -1 and + a[backslash_index] == '\\'): + in_str = not in_str + backslash_index -= 1 + in_str = not in_str + if not in_str and a[end_group_index] == '{': + open_bracket_count += 1 + if in_str or a[end_group_index] != '}': + end_group_index += 1 + continue + elif a[end_group_index] == '}' and open_bracket_count > 1: + open_bracket_count -= 1 + end_group_index += 1 + continue + + # Increase end_group_index by 1 to get the closing bracket + end_group_index += 1 + + new_a.append(a[start_group_index:end_group_index]) + + # The next start index is at least after the closing + # bracket, a closing bracket can be followed by a comma + # since we are in an array. + start_group_index = end_group_index + 1 + while (start_group_index < len(a[1:]) and + a[start_group_index] != '{'): + start_group_index += 1 + end_group_index = start_group_index + 1 + a = new_a + b = 0 + if strarray: + while b < len(a) - 1: + ab = a[b].strip() + while (not self.bounded_string(ab) or + (len(ab) > 2 and + ab[0] == ab[1] == ab[2] and + ab[-2] != ab[0] and + ab[-3] != ab[0])): + a[b] = a[b] + ',' + a[b + 1] + ab = a[b].strip() + if b < len(a) - 2: + a = a[:b + 1] + a[b + 2:] + else: + a = a[:b + 1] + b += 1 + else: + al = list(a[1:-1]) + a = [] + openarr = 0 + j = 0 + for i in _range(len(al)): + if al[i] == '[': + openarr += 1 + elif al[i] == ']': + openarr -= 1 + elif al[i] == ',' and not openarr: + a.append(''.join(al[j:i])) + j = i + 1 + a.append(''.join(al[j:])) + for i in _range(len(a)): + a[i] = a[i].strip() + if a[i] != '': + nval, ntype = self.load_value(a[i]) + if atype: + if ntype != atype: + raise ValueError("Not a homogeneous array") + else: + atype = ntype + retval.append(nval) + return retval + + def preserve_comment(self, line_no, key, comment, beginline): + pass + + def embed_comments(self, idx, currentlevel): + pass + + +class TomlPreserveCommentDecoder(TomlDecoder): + + def __init__(self, _dict=dict): + self.saved_comments = {} + super(TomlPreserveCommentDecoder, self).__init__(_dict) + + def preserve_comment(self, line_no, key, comment, beginline): + self.saved_comments[line_no] = (key, comment, beginline) + + def embed_comments(self, idx, currentlevel): + if idx not in self.saved_comments: + return + + key, comment, beginline = self.saved_comments[idx] + currentlevel[key] = CommentValue(currentlevel[key], comment, beginline, + self._dict) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/encoder.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/encoder.py new file mode 100644 index 00000000..a8b03c7b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/encoder.py @@ -0,0 +1,304 @@ +import datetime +import re +import sys +from decimal import Decimal + +from pip._vendor.toml.decoder import InlineTableDict + +if sys.version_info >= (3,): + unicode = str + + +def dump(o, f, encoder=None): + """Writes out dict as toml to a file + + Args: + o: Object to dump into toml + f: File descriptor where the toml should be stored + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dictionary + + Raises: + TypeError: When anything other than file descriptor is passed + """ + + if not f.write: + raise TypeError("You can only dump an object to a file descriptor") + d = dumps(o, encoder=encoder) + f.write(d) + return d + + +def dumps(o, encoder=None): + """Stringifies input dict as toml + + Args: + o: Object to dump into toml + encoder: The ``TomlEncoder`` to use for constructing the output string + + Returns: + String containing the toml corresponding to dict + + Examples: + ```python + >>> import toml + >>> output = { + ... 'a': "I'm a string", + ... 'b': ["I'm", "a", "list"], + ... 'c': 2400 + ... } + >>> toml.dumps(output) + 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' + ``` + """ + + retval = "" + if encoder is None: + encoder = TomlEncoder(o.__class__) + addtoretval, sections = encoder.dump_sections(o, "") + retval += addtoretval + outer_objs = [id(o)] + while sections: + section_ids = [id(section) for section in sections] + for outer_obj in outer_objs: + if outer_obj in section_ids: + raise ValueError("Circular reference detected") + outer_objs += section_ids + newsections = encoder.get_empty_table() + for section in sections: + addtoretval, addtosections = encoder.dump_sections( + sections[section], section) + + if addtoretval or (not addtoretval and not addtosections): + if retval and retval[-2:] != "\n\n": + retval += "\n" + retval += "[" + section + "]\n" + if addtoretval: + retval += addtoretval + for s in addtosections: + newsections[section + "." + s] = addtosections[s] + sections = newsections + return retval + + +def _dump_str(v): + if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): + v = v.decode('utf-8') + v = "%r" % v + if v[0] == 'u': + v = v[1:] + singlequote = v.startswith("'") + if singlequote or v.startswith('"'): + v = v[1:-1] + if singlequote: + v = v.replace("\\'", "'") + v = v.replace('"', '\\"') + v = v.split("\\x") + while len(v) > 1: + i = -1 + if not v[0]: + v = v[1:] + v[0] = v[0].replace("\\\\", "\\") + # No, I don't know why != works and == breaks + joinx = v[0][i] != "\\" + while v[0][:i] and v[0][i] == "\\": + joinx = not joinx + i -= 1 + if joinx: + joiner = "x" + else: + joiner = "u00" + v = [v[0] + joiner + v[1]] + v[2:] + return unicode('"' + v[0] + '"') + + +def _dump_float(v): + return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") + + +def _dump_time(v): + utcoffset = v.utcoffset() + if utcoffset is None: + return v.isoformat() + # The TOML norm specifies that it's local time thus we drop the offset + return v.isoformat()[:-6] + + +class TomlEncoder(object): + + def __init__(self, _dict=dict, preserve=False): + self._dict = _dict + self.preserve = preserve + self.dump_funcs = { + str: _dump_str, + unicode: _dump_str, + list: self.dump_list, + bool: lambda v: unicode(v).lower(), + int: lambda v: v, + float: _dump_float, + Decimal: _dump_float, + datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), + datetime.time: _dump_time, + datetime.date: lambda v: v.isoformat() + } + + def get_empty_table(self): + return self._dict() + + def dump_list(self, v): + retval = "[" + for u in v: + retval += " " + unicode(self.dump_value(u)) + "," + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + + https://github.com/toml-lang/toml#user-content-inline-table + """ + retval = "" + if isinstance(section, dict): + val_list = [] + for k, v in section.items(): + val = self.dump_inline_table(v) + val_list.append(k + " = " + val) + retval += "{ " + ", ".join(val_list) + " }\n" + return retval + else: + return unicode(self.dump_value(section)) + + def dump_value(self, v): + # Lookup function corresponding to v's type + dump_fn = self.dump_funcs.get(type(v)) + if dump_fn is None and hasattr(v, '__iter__'): + dump_fn = self.dump_funcs[list] + # Evaluate function (if it exists) else return v + return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + section = unicode(section) + qsection = section + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = _dump_str(section) + if not isinstance(o[section], dict): + arrayoftables = False + if isinstance(o[section], list): + for a in o[section]: + if isinstance(a, dict): + arrayoftables = True + if arrayoftables: + for a in o[section]: + arraytabstr = "\n" + arraystr += "[[" + sup + qsection + "]]\n" + s, d = self.dump_sections(a, sup + qsection) + if s: + if s[0] == "[": + arraytabstr += s + else: + arraystr += s + while d: + newd = self._dict() + for dsec in d: + s1, d1 = self.dump_sections(d[dsec], sup + + qsection + "." + + dsec) + if s1: + arraytabstr += ("[" + sup + qsection + + "." + dsec + "]\n") + arraytabstr += s1 + for s1 in d1: + newd[dsec + "." + s1] = d1[s1] + d = newd + arraystr += arraytabstr + else: + if o[section] is not None: + retstr += (qsection + " = " + + unicode(self.dump_value(o[section])) + '\n') + elif self.preserve and isinstance(o[section], InlineTableDict): + retstr += (qsection + " = " + + self.dump_inline_table(o[section])) + else: + retdict[qsection] = o[section] + retstr += arraystr + return (retstr, retdict) + + +class TomlPreserveInlineDictEncoder(TomlEncoder): + + def __init__(self, _dict=dict): + super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) + + +class TomlArraySeparatorEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False, separator=","): + super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) + if separator.strip() == "": + separator = "," + separator + elif separator.strip(' \t\n\r,'): + raise ValueError("Invalid separator for arrays") + self.separator = separator + + def dump_list(self, v): + t = [] + retval = "[" + for u in v: + t.append(self.dump_value(u)) + while t != []: + s = [] + for u in t: + if isinstance(u, list): + for r in u: + s.append(r) + else: + retval += " " + unicode(u) + self.separator + t = s + retval += "]" + return retval + + +class TomlNumpyEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + import numpy as np + super(TomlNumpyEncoder, self).__init__(_dict, preserve) + self.dump_funcs[np.float16] = _dump_float + self.dump_funcs[np.float32] = _dump_float + self.dump_funcs[np.float64] = _dump_float + self.dump_funcs[np.int16] = self._dump_int + self.dump_funcs[np.int32] = self._dump_int + self.dump_funcs[np.int64] = self._dump_int + + def _dump_int(self, v): + return "{}".format(int(v)) + + +class TomlPreserveCommentEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False): + from pip._vendor.toml.decoder import CommentValue + super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) + self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) + + +class TomlPathlibEncoder(TomlEncoder): + + def _dump_pathlib_path(self, v): + return _dump_str(str(v)) + + def dump_value(self, v): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(v, pathlib.PurePath): + v = str(v) + return super(TomlPathlibEncoder, self).dump_value(v) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/ordered.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/ordered.py new file mode 100644 index 00000000..6052016e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/ordered.py @@ -0,0 +1,15 @@ +from collections import OrderedDict +from pip._vendor.toml import TomlEncoder +from pip._vendor.toml import TomlDecoder + + +class TomlOrderedDecoder(TomlDecoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) + + +class TomlOrderedEncoder(TomlEncoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/tz.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/tz.py new file mode 100644 index 00000000..93c3c8ad --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/toml/tz.py @@ -0,0 +1,21 @@ +from datetime import tzinfo, timedelta + + +class TomlTz(tzinfo): + def __init__(self, toml_offset): + if toml_offset == "Z": + self._raw_offset = "+00:00" + else: + self._raw_offset = toml_offset + self._sign = -1 if self._raw_offset[0] == '-' else 1 + self._hours = int(self._raw_offset[1:3]) + self._minutes = int(self._raw_offset[4:6]) + + def tzname(self, dt): + return "UTC" + self._raw_offset + + def utcoffset(self, dt): + return self._sign * timedelta(hours=self._hours, minutes=self._minutes) + + def dst(self, dt): + return timedelta(0) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/__init__.py index 148a9c31..667e9bce 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/__init__.py @@ -1,15 +1,10 @@ """ urllib3 - Thread-safe connection pooling and re-using. """ - from __future__ import absolute_import import warnings -from .connectionpool import ( - HTTPConnectionPool, - HTTPSConnectionPool, - connection_from_url -) +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url from . import exceptions from .filepost import encode_multipart_formdata @@ -25,25 +20,25 @@ from .util.retry import Retry import logging from logging import NullHandler -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '1.24.1' +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = "1.25.9" __all__ = ( - 'HTTPConnectionPool', - 'HTTPSConnectionPool', - 'PoolManager', - 'ProxyManager', - 'HTTPResponse', - 'Retry', - 'Timeout', - 'add_stderr_logger', - 'connection_from_url', - 'disable_warnings', - 'encode_multipart_formdata', - 'get_host', - 'make_headers', - 'proxy_from_url', + "HTTPConnectionPool", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "get_host", + "make_headers", + "proxy_from_url", ) logging.getLogger(__name__).addHandler(NullHandler()) @@ -60,10 +55,10 @@ def add_stderr_logger(level=logging.DEBUG): # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) logger.addHandler(handler) logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s', __name__) + logger.debug("Added a stderr logging handler to logger: %s", __name__) return handler @@ -75,18 +70,17 @@ del NullHandler # shouldn't be: otherwise, it's very hard for users to use most Python # mechanisms to silence them. # SecurityWarning's always go off by default. -warnings.simplefilter('always', exceptions.SecurityWarning, append=True) +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) # SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) +warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) # InsecurePlatformWarning's don't vary between requests, so we keep it default. -warnings.simplefilter('default', exceptions.InsecurePlatformWarning, - append=True) +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) # SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) +warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) def disable_warnings(category=exceptions.HTTPWarning): """ Helper for quickly disabling all urllib3 warnings. """ - warnings.simplefilter('ignore', category) + warnings.simplefilter("ignore", category) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/_collections.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/_collections.py index 34f23811..019d1511 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/_collections.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/_collections.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + try: from collections.abc import Mapping, MutableMapping except ImportError: @@ -6,6 +7,7 @@ except ImportError: try: from threading import RLock except ImportError: # Platform-specific: No threads available + class RLock: def __enter__(self): pass @@ -19,7 +21,7 @@ from .exceptions import InvalidHeader from .packages.six import iterkeys, itervalues, PY3 -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] _Null = object() @@ -82,7 +84,9 @@ class RecentlyUsedContainer(MutableMapping): return len(self._container) def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) def clear(self): with self.lock: @@ -150,7 +154,7 @@ class HTTPHeaderDict(MutableMapping): def __getitem__(self, key): val = self._container[key.lower()] - return ', '.join(val[1:]) + return ", ".join(val[1:]) def __delitem__(self, key): del self._container[key.lower()] @@ -159,12 +163,13 @@ class HTTPHeaderDict(MutableMapping): return key.lower() in self._container def __eq__(self, other): - if not isinstance(other, Mapping) and not hasattr(other, 'keys'): + if not isinstance(other, Mapping) and not hasattr(other, "keys"): return False if not isinstance(other, type(self)): other = type(self)(other) - return (dict((k.lower(), v) for k, v in self.itermerged()) == - dict((k.lower(), v) for k, v in other.itermerged())) + return dict((k.lower(), v) for k, v in self.itermerged()) == dict( + (k.lower(), v) for k, v in other.itermerged() + ) def __ne__(self, other): return not self.__eq__(other) @@ -184,9 +189,9 @@ class HTTPHeaderDict(MutableMapping): yield vals[0] def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. - ''' + """ # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. # So let's reinvent the wheel. @@ -228,8 +233,10 @@ class HTTPHeaderDict(MutableMapping): with self.add instead of self.__setitem__ """ if len(args) > 1: - raise TypeError("extend() takes at most 1 positional " - "arguments ({0} given)".format(len(args))) + raise TypeError( + "extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args)) + ) other = args[0] if len(args) >= 1 else () if isinstance(other, HTTPHeaderDict): @@ -295,7 +302,7 @@ class HTTPHeaderDict(MutableMapping): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] - yield val[0], ', '.join(val[1:]) + yield val[0], ", ".join(val[1:]) def items(self): return list(self.iteritems()) @@ -306,7 +313,7 @@ class HTTPHeaderDict(MutableMapping): # python2.7 does not expose a proper API for exporting multiheaders # efficiently. This function re-reads raw lines from the message # object and extracts the multiheaders properly. - obs_fold_continued_leaders = (' ', '\t') + obs_fold_continued_leaders = (" ", "\t") headers = [] for line in message.headers: @@ -316,14 +323,14 @@ class HTTPHeaderDict(MutableMapping): # in RFC-7230 S3.2.4. This indicates a multiline header, but # there exists no previous header to which we can attach it. raise InvalidHeader( - 'Header continuation with no previous header: %s' % line + "Header continuation with no previous header: %s" % line ) else: key, value = headers[-1] - headers[-1] = (key, value + ' ' + line.strip()) + headers[-1] = (key, value + " " + line.strip()) continue - key, value = line.split(':', 1) + key, value = line.split(":", 1) headers.append((key, value.strip())) return cls(headers) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connection.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connection.py index 02b36654..6da1cf4b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connection.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connection.py @@ -1,4 +1,5 @@ from __future__ import absolute_import +import re import datetime import logging import os @@ -11,6 +12,7 @@ from .packages.six.moves.http_client import HTTPException # noqa: F401 try: # Compiled with SSL? import ssl + BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. ssl = None @@ -19,10 +21,11 @@ except (ImportError, AttributeError): # Platform-specific: No SSL. pass -try: # Python 3: - # Not a no-op, we're adding this to the namespace so it can be imported. +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError -except NameError: # Python 2: +except NameError: + # Python 2 class ConnectionError(Exception): pass @@ -40,7 +43,7 @@ from .util.ssl_ import ( resolve_ssl_version, assert_fingerprint, create_urllib3_context, - ssl_wrap_socket + ssl_wrap_socket, ) @@ -50,20 +53,18 @@ from ._collections import HTTPHeaderDict log = logging.getLogger(__name__) -port_by_scheme = { - 'http': 80, - 'https': 443, -} +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2019, 1, 1) -# When updating RECENT_DATE, move it to within two years of the current date, -# and not less than 6 months ago. -# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or -# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) -RECENT_DATE = datetime.date(2017, 6, 30) +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") class DummyConnection(object): """Used to detect a failed ConnectionCls import.""" + pass @@ -91,7 +92,7 @@ class HTTPConnection(_HTTPConnection, object): Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ - default_port = port_by_scheme['http'] + default_port = port_by_scheme["http"] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` @@ -101,15 +102,15 @@ class HTTPConnection(_HTTPConnection, object): is_verified = False def __init__(self, *args, **kw): - if six.PY3: # Python 3 - kw.pop('strict', None) + if not six.PY2: + kw.pop("strict", None) # Pre-set source_address. - self.source_address = kw.get('source_address') + self.source_address = kw.get("source_address") #: The socket options provided by the user. If no options are #: provided, we use the default options. - self.socket_options = kw.pop('socket_options', self.default_socket_options) + self.socket_options = kw.pop("socket_options", self.default_socket_options) _HTTPConnection.__init__(self, *args, **kw) @@ -130,7 +131,7 @@ class HTTPConnection(_HTTPConnection, object): those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). """ - return self._dns_host.rstrip('.') + return self._dns_host.rstrip(".") @host.setter def host(self, value): @@ -149,29 +150,34 @@ class HTTPConnection(_HTTPConnection, object): """ extra_kw = {} if self.source_address: - extra_kw['source_address'] = self.source_address + extra_kw["source_address"] = self.source_address if self.socket_options: - extra_kw['socket_options'] = self.socket_options + extra_kw["socket_options"] = self.socket_options try: conn = connection.create_connection( - (self._dns_host, self.port), self.timeout, **extra_kw) + (self._dns_host, self.port), self.timeout, **extra_kw + ) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) except SocketError as e: raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) + self, "Failed to establish a new connection: %s" % e + ) return conn def _prepare_conn(self, conn): self.sock = conn - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, "_tunnel_host", None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -181,24 +187,32 @@ class HTTPConnection(_HTTPConnection, object): conn = self._new_conn() self._prepare_conn(conn) + def putrequest(self, method, url, *args, **kwargs): + """Send a request to the server""" + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + "Method cannot contain non-token characters %r (found at least %r)" + % (method, match.group()) + ) + + return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = 'accept-encoding' in headers - skip_host = 'host' in headers + skip_accept_encoding = "accept-encoding" in headers + skip_host = "host" in headers self.putrequest( - method, - url, - skip_accept_encoding=skip_accept_encoding, - skip_host=skip_host + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) for header, value in headers.items(): self.putheader(header, value) - if 'transfer-encoding' not in headers: - self.putheader('Transfer-Encoding', 'chunked') + if "transfer-encoding" not in headers: + self.putheader("Transfer-Encoding", "chunked") self.endheaders() if body is not None: @@ -209,99 +223,93 @@ class HTTPConnection(_HTTPConnection, object): if not chunk: continue if not isinstance(chunk, bytes): - chunk = chunk.encode('utf8') + chunk = chunk.encode("utf8") len_str = hex(len(chunk))[2:] - self.send(len_str.encode('utf-8')) - self.send(b'\r\n') + self.send(len_str.encode("utf-8")) + self.send(b"\r\n") self.send(chunk) - self.send(b'\r\n') + self.send(b"\r\n") # After the if clause, to always have a closed body - self.send(b'0\r\n\r\n') + self.send(b"0\r\n\r\n") class HTTPSConnection(HTTPConnection): - default_port = port_by_scheme['https'] + default_port = port_by_scheme["https"] + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ca_cert_data = None ssl_version = None + assert_fingerprint = None - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, server_hostname=None, **kw): - - HTTPConnection.__init__(self, host, port, strict=strict, - timeout=timeout, **kw) + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + key_password=None, + strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, + server_hostname=None, + **kw + ): + + HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) self.key_file = key_file self.cert_file = cert_file + self.key_password = key_password self.ssl_context = ssl_context self.server_hostname = server_hostname # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) - self._protocol = 'https' - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(None), - cert_reqs=resolve_cert_reqs(None), - ) - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ssl_context=self.ssl_context, - server_hostname=self.server_hostname - ) - - -class VerifiedHTTPSConnection(HTTPSConnection): - """ - Based on httplib.HTTPSConnection but wraps the socket with - SSL certification. - """ - cert_reqs = None - ca_certs = None - ca_cert_dir = None - ssl_version = None - assert_fingerprint = None - - def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None): + self._protocol = "https" + + def set_cert( + self, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + ca_cert_data=None, + ): """ This method should only be called once, before the connection is used. """ - # If cert_reqs is not provided, we can try to guess. If the user gave - # us a cert database, we assume they want to use it: otherwise, if - # they gave us an SSL Context object we should use whatever is set for - # it. + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. if cert_reqs is None: - if ca_certs or ca_cert_dir: - cert_reqs = 'CERT_REQUIRED' - elif self.ssl_context is not None: + if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, "_tunnel_host", None): self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. @@ -318,15 +326,19 @@ class VerifiedHTTPSConnection(HTTPSConnection): is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: - warnings.warn(( - 'System time is way off (before {0}). This will probably ' - 'lead to SSL verification errors').format(RECENT_DATE), - SystemTimeWarning + warnings.warn( + ( + "System time is way off (before {0}). This will probably " + "lead to SSL verification errors" + ).format(RECENT_DATE), + SystemTimeWarning, ) # Wrap socket using verification with the root certs in # trusted_root_certs + default_ssl_context = False if self.ssl_context is None: + default_ssl_context = True self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(self.ssl_version), cert_reqs=resolve_cert_reqs(self.cert_reqs), @@ -334,38 +346,58 @@ class VerifiedHTTPSConnection(HTTPSConnection): context = self.ssl_context context.verify_mode = resolve_cert_reqs(self.cert_reqs) + + # Try to load OS default certs if none are given. + # Works well on Windows (requires Python3.4+) + if ( + not self.ca_certs + and not self.ca_cert_dir + and not self.ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, + key_password=self.key_password, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, server_hostname=server_hostname, - ssl_context=context) + ssl_context=context, + ) if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif context.verify_mode != ssl.CERT_NONE \ - and not getattr(context, 'check_hostname', False) \ - and self.assert_hostname is not False: + assert_fingerprint( + self.sock.getpeercert(binary_form=True), self.assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not getattr(context, "check_hostname", False) + and self.assert_hostname is not False + ): # While urllib3 attempts to always turn off hostname matching from # the TLS library, this cannot always be done. So we check whether # the TLS Library still thinks it's matching hostnames. cert = self.sock.getpeercert() - if not cert.get('subjectAltName', ()): - warnings.warn(( - 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' - '`commonName` for now. This feature is being removed by major browsers and ' - 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' - 'for details.)'.format(hostname)), - SubjectAltNameWarning + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, ) _match_hostname(cert, self.assert_hostname or server_hostname) self.is_verified = ( - context.verify_mode == ssl.CERT_REQUIRED or - self.assert_fingerprint is not None + context.verify_mode == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None ) @@ -373,9 +405,10 @@ def _match_hostname(cert, asserted_hostname): try: match_hostname(cert, asserted_hostname) except CertificateError as e: - log.error( - 'Certificate did not match expected hostname: %s. ' - 'Certificate: %s', asserted_hostname, cert + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, ) # Add cert to exception and reraise so client code can inspect # the cert when catching the exception, if they want to @@ -383,9 +416,8 @@ def _match_hostname(cert, asserted_hostname): raise -if ssl: - # Make a copy for testing. - UnverifiedHTTPSConnection = HTTPSConnection - HTTPSConnection = VerifiedHTTPSConnection -else: - HTTPSConnection = DummyConnection +if not ssl: + HTTPSConnection = DummyConnection # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py index f7a8f193..5f044dbd 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/connectionpool.py @@ -29,8 +29,11 @@ from .packages.six.moves import queue from .connection import ( port_by_scheme, DummyConnection, - HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, + HTTPConnection, + HTTPSConnection, + VerifiedHTTPSConnection, + HTTPException, + BaseSSLError, ) from .request import RequestMethods from .response import HTTPResponse @@ -40,7 +43,13 @@ from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import get_host, Url, NORMALIZABLE_SCHEMES +from .util.url import ( + get_host, + parse_url, + Url, + _normalize_host as normalize_host, + _encode_target, +) from .util.queue import LifoQueue @@ -56,6 +65,11 @@ class ConnectionPool(object): """ Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. """ scheme = None @@ -65,13 +79,12 @@ class ConnectionPool(object): if not host: raise LocationValueError("No host specified.") - self.host = _ipv6_host(host, self.scheme) + self.host = _normalize_host(host, scheme=self.scheme) self._proxy_host = host.lower() self.port = port def __str__(self): - return '%s(host=%r, port=%r)' % (type(self).__name__, - self.host, self.port) + return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) def __enter__(self): return self @@ -152,15 +165,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :class:`urllib3.connection.HTTPSConnection` instances. """ - scheme = 'http' + scheme = "http" ConnectionCls = HTTPConnection ResponseCls = HTTPResponse - def __init__(self, host, port=None, strict=False, - timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, retries=None, - _proxy=None, _proxy_headers=None, - **conn_kw): + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + **conn_kw + ): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) @@ -194,19 +216,27 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. # We cannot know if the user has added default socket options, so we cannot replace the # list. - self.conn_kw.setdefault('socket_options', []) + self.conn_kw.setdefault("socket_options", []) def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s:%s", - self.num_connections, self.host, self.port or "80") - - conn = self.ConnectionCls(host=self.host, port=self.port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + **self.conn_kw + ) return conn def _get_conn(self, timeout=None): @@ -230,16 +260,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): except queue.Empty: if self.block: - raise EmptyPoolError(self, - "Pool reached maximum size and no more " - "connections are allowed.") + raise EmptyPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) pass # Oh well, we'll create a new connection then # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): log.debug("Resetting dropped connection: %s", self.host) conn.close() - if getattr(conn, 'auto_open', 1) == 0: + if getattr(conn, "auto_open", 1) == 0: # This is a proxied connection that has been mutated by # httplib._tunnel() and cannot be reused (since it would # attempt to bypass the proxy) @@ -269,9 +300,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): pass except queue.Full: # This should never happen if self.block == True - log.warning( - "Connection pool is full, discarding connection: %s", - self.host) + log.warning("Connection pool is full, discarding connection: %s", self.host) # Connection never got put back into the pool, close it. if conn: @@ -303,21 +332,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): """Is the error actually a timeout? Will raise a ReadTimeout or pass""" if isinstance(err, SocketTimeout): - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) # See the above comment about EAGAIN in Python 3. In Python 2 we have # to specifically catch it and throw the timeout error - if hasattr(err, 'errno') and err.errno in _blocking_errnos: - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - def _make_request(self, conn, method, url, timeout=_Default, chunked=False, - **httplib_request_kw): + if "timed out" in str(err) or "did not complete (read)" in str( + err + ): # Python < 2.7.4 + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + def _make_request( + self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw + ): """ Perform a request on a given urllib connection object taken from our pool. @@ -357,7 +395,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr - if getattr(conn, 'sock', None): + if getattr(conn, "sock", None): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching @@ -365,7 +403,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) + self, url, "Read timed out. (read timeout=%s)" % read_timeout + ) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) else: # None or a value @@ -373,31 +412,45 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Receive the response from the server try: - try: # Python 2.7, use buffering of HTTP responses + try: + # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 3 + except TypeError: + # Python 3 try: httplib_response = conn.getresponse() - except Exception as e: - # Remove the TypeError from the exception chain in Python 3; - # otherwise it looks like a programming error was the cause. + except BaseException as e: + # Remove the TypeError from the exception chain in + # Python 3 (including for exceptions like SystemExit). + # Otherwise it looks like a bug in the code. six.raise_from(e, None) except (SocketTimeout, BaseSSLError, SocketError) as e: self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise # AppEngine doesn't have a version attr. - http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, - method, url, http_version, httplib_response.status, - httplib_response.length) + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + http_version, + httplib_response.status, + httplib_response.length, + ) try: assert_header_parsing(httplib_response.msg) except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 log.warning( - 'Failed to parse headers (url=%s): %s', - self._absolute_url(url), hpe, exc_info=True) + "Failed to parse headers (url=%s): %s", + self._absolute_url(url), + hpe, + exc_info=True, + ) return httplib_response @@ -427,13 +480,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): Check if the given ``url`` is a member of the same host as this connection pool. """ - if url.startswith('/'): + if url.startswith("/"): return True # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) - - host = _ipv6_host(host, self.scheme) + if host is not None: + host = _normalize_host(host, scheme=scheme) # Use explicit default port for comparison when none is given if self.port and not port: @@ -443,10 +496,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): return (scheme, host, port) == (self.scheme, self.host, self.port) - def urlopen(self, method, url, body=None, headers=None, retries=None, - redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, chunked=False, - body_pos=None, **response_kw): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + assert_same_host=True, + timeout=_Default, + pool_timeout=None, + release_conn=None, + chunked=False, + body_pos=None, + **response_kw + ): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all @@ -544,12 +609,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: - release_conn = response_kw.get('preload_content', True) + release_conn = response_kw.get("preload_content", True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = six.ensure_str(_encode_target(url)) + else: + url = six.ensure_str(parse_url(url).url) + conn = None # Track whether `conn` needs to be released before @@ -560,13 +631,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # # See issue #651 [1] for details. # - # [1] <https://github.com/shazow/urllib3/issues/651> + # [1] <https://github.com/urllib3/urllib3/issues/651> release_this_conn = release_conn # Merge the proxy headers. Only do this in HTTP. We have to copy the # headers dict so we can safely change it without those changes being # reflected in anyone else's copy. - if self.scheme == 'http': + if self.scheme == "http": headers = headers.copy() headers.update(self.proxy_headers) @@ -589,15 +660,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): conn.timeout = timeout_obj.connect_timeout - is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) + is_new_proxy_conn = self.proxy is not None and not getattr( + conn, "sock", None + ) if is_new_proxy_conn: self._prepare_proxy(conn) # Make the request on the httplib connection object. - httplib_response = self._make_request(conn, method, url, - timeout=timeout_obj, - body=body, headers=headers, - chunked=chunked) + httplib_response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + ) # If we're going to release the connection in ``finally:``, then # the response doesn't need to know about the connection. Otherwise @@ -606,14 +684,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): response_conn = conn if not release_conn else None # Pass method to Response for length checking - response_kw['request_method'] = method + response_kw["request_method"] = method # Import httplib's response into our own wrapper object - response = self.ResponseCls.from_httplib(httplib_response, - pool=self, - connection=response_conn, - retries=retries, - **response_kw) + response = self.ResponseCls.from_httplib( + httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw + ) # Everything went great! clean_exit = True @@ -622,20 +702,28 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Timed out by queue. raise EmptyPoolError(self, "No pool connections are available.") - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError, CertificateError) as e: + except ( + TimeoutError, + HTTPException, + SocketError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ) as e: # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False if isinstance(e, (BaseSSLError, CertificateError)): e = SSLError(e) elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: - e = ProxyError('Cannot connect to proxy.', e) + e = ProxyError("Cannot connect to proxy.", e) elif isinstance(e, (SocketError, HTTPException)): - e = ProtocolError('Connection aborted.', e) + e = ProtocolError("Connection aborted.", e) - retries = retries.increment(method, url, error=e, _pool=self, - _stacktrace=sys.exc_info()[2]) + retries = retries.increment( + method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] + ) retries.sleep() # Keep track of the error for the retry warning. @@ -658,77 +746,87 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if not conn: # Try again - log.warning("Retrying (%r) after connection " - "broken by '%r': %s", retries, err, url) - return self.urlopen(method, url, body, headers, retries, - redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - def drain_and_release_conn(response): - try: - # discard any remaining response body, the connection will be - # released back to the pool once the entire response is read - response.read() - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError) as e: - pass + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) # Handle redirect? redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: - method = 'GET' + method = "GET" try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) + response.drain_conn() raise return response - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - + response.drain_conn() retries.sleep_for_retry(response) log.debug("Redirecting %s -> %s", url, redirect_location) return self.urlopen( - method, redirect_location, body, headers, - retries=retries, redirect=redirect, + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader('Retry-After')) + has_retry_after = bool(response.getheader("Retry-After")) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_status: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) + response.drain_conn() raise return response - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - + response.drain_conn() retries.sleep(response) log.debug("Retry: %s", url) return self.urlopen( - method, url, body, headers, - retries=retries, redirect=redirect, + method, + url, + body, + headers, + retries=retries, + redirect=redirect, assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, + timeout=timeout, + pool_timeout=pool_timeout, release_conn=release_conn, - body_pos=body_pos, **response_kw) + chunked=chunked, + body_pos=body_pos, + **response_kw + ) return response @@ -746,33 +844,57 @@ class HTTPSConnectionPool(HTTPConnectionPool): If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is - available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ - scheme = 'https' + scheme = "https" ConnectionCls = HTTPSConnection - def __init__(self, host, port=None, - strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, - block=False, headers=None, retries=None, - _proxy=None, _proxy_headers=None, - key_file=None, cert_file=None, cert_reqs=None, - ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None, **conn_kw): - - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, retries, _proxy, _proxy_headers, - **conn_kw) - - if ca_certs and cert_reqs is None: - cert_reqs = 'CERT_REQUIRED' + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + ssl_version=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + **conn_kw + ): + + HTTPConnectionPool.__init__( + self, + host, + port, + strict, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw + ) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version @@ -786,13 +908,16 @@ class HTTPSConnectionPool(HTTPConnectionPool): """ if isinstance(conn, VerifiedHTTPSConnection): - conn.set_cert(key_file=self.key_file, - cert_file=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint) + conn.set_cert( + key_file=self.key_file, + key_password=self.key_password, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) conn.ssl_version = self.ssl_version return conn @@ -809,12 +934,17 @@ class HTTPSConnectionPool(HTTPConnectionPool): Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s:%s", - self.num_connections, self.host, self.port or "443") + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - raise SSLError("Can't connect to HTTPS URL because the SSL " - "module is not available.") + raise SSLError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) actual_host = self.host actual_port = self.port @@ -822,9 +952,16 @@ class HTTPSConnectionPool(HTTPConnectionPool): actual_host = self.proxy.host actual_port = self.proxy.port - conn = self.ConnectionCls(host=actual_host, port=actual_port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) + conn = self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + **self.conn_kw + ) return self._prepare_conn(conn) @@ -835,16 +972,19 @@ class HTTPSConnectionPool(HTTPConnectionPool): super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. - if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + if not getattr(conn, "sock", None): # AppEngine might not have `.sock` conn.connect() if not conn.is_verified: - warnings.warn(( - 'Unverified HTTPS request is being made. ' - 'Adding certificate verification is strongly advised. See: ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings'), - InsecureRequestWarning) + warnings.warn( + ( + "Unverified HTTPS request is being made to host '%s'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings" % conn.host + ), + InsecureRequestWarning, + ) def connection_from_url(url, **kw): @@ -869,28 +1009,25 @@ def connection_from_url(url, **kw): """ scheme, host, port = get_host(url) port = port or port_by_scheme.get(scheme, 80) - if scheme == 'https': + if scheme == "https": return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw) -def _ipv6_host(host, scheme): +def _normalize_host(host, scheme): """ - Process IPv6 address literals + Normalize hosts for comparisons and use with sockets. """ + host = normalize_host(host, scheme) + # httplib doesn't like it when we include brackets in IPv6 addresses # Specifically, if we include brackets but also pass the port then # httplib crazily doubles up the square brackets on the Host header. # Instead, we need to make sure we never pass ``None`` as the port. # However, for backward compatibility reasons we can't actually # *assert* that. See http://bugs.python.org/issue28539 - # - # Also if an IPv6 address literal has a zone identifier, the - # percent sign might be URIencoded, convert it back into ASCII - if host.startswith('[') and host.endswith(']'): - host = host.replace('%25', '%').strip('[]') - if scheme in NORMALIZABLE_SCHEMES: - host = host.lower() + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] return host diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py index f3e00942..8765b907 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py @@ -6,25 +6,31 @@ import os def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) + return is_local_appengine() or is_prod_appengine() def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Development/") def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Google App Engine/") def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' + """Deprecated.""" + return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py index bcf41c02..d9b67333 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py @@ -34,29 +34,35 @@ from __future__ import absolute_import import platform from ctypes.util import find_library from ctypes import ( - c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, - c_bool + c_void_p, + c_int32, + c_char_p, + c_size_t, + c_byte, + c_uint32, + c_ulong, + c_long, + c_bool, ) from ctypes import CDLL, POINTER, CFUNCTYPE -security_path = find_library('Security') +security_path = find_library("Security") if not security_path: - raise ImportError('The library Security could not be found') + raise ImportError("The library Security could not be found") -core_foundation_path = find_library('CoreFoundation') +core_foundation_path = find_library("CoreFoundation") if not core_foundation_path: - raise ImportError('The library CoreFoundation could not be found') + raise ImportError("The library CoreFoundation could not be found") version = platform.mac_ver()[0] -version_info = tuple(map(int, version.split('.'))) +version_info = tuple(map(int, version.split("."))) if version_info < (10, 8): raise OSError( - 'Only OS X 10.8 and newer are supported, not %s.%s' % ( - version_info[0], version_info[1] - ) + "Only OS X 10.8 and newer are supported, not %s.%s" + % (version_info[0], version_info[1]) ) Security = CDLL(security_path, use_errno=True) @@ -129,27 +135,19 @@ try: Security.SecKeyGetTypeID.argtypes = [] Security.SecKeyGetTypeID.restype = CFTypeID - Security.SecCertificateCreateWithData.argtypes = [ - CFAllocatorRef, - CFDataRef - ] + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] Security.SecCertificateCreateWithData.restype = SecCertificateRef - Security.SecCertificateCopyData.argtypes = [ - SecCertificateRef - ] + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] Security.SecCertificateCopyData.restype = CFDataRef - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SecIdentityCreateWithCertificate.argtypes = [ CFTypeRef, SecCertificateRef, - POINTER(SecIdentityRef) + POINTER(SecIdentityRef), ] Security.SecIdentityCreateWithCertificate.restype = OSStatus @@ -159,201 +157,126 @@ try: c_void_p, Boolean, c_void_p, - POINTER(SecKeychainRef) + POINTER(SecKeychainRef), ] Security.SecKeychainCreate.restype = OSStatus - Security.SecKeychainDelete.argtypes = [ - SecKeychainRef - ] + Security.SecKeychainDelete.argtypes = [SecKeychainRef] Security.SecKeychainDelete.restype = OSStatus Security.SecPKCS12Import.argtypes = [ CFDataRef, CFDictionaryRef, - POINTER(CFArrayRef) + POINTER(CFArrayRef), ] Security.SecPKCS12Import.restype = OSStatus SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) - SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE( + OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) + ) - Security.SSLSetIOFuncs.argtypes = [ - SSLContextRef, - SSLReadFunc, - SSLWriteFunc - ] + Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] Security.SSLSetIOFuncs.restype = OSStatus - Security.SSLSetPeerID.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] + Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerID.restype = OSStatus - Security.SSLSetCertificate.argtypes = [ - SSLContextRef, - CFArrayRef - ] + Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] Security.SSLSetCertificate.restype = OSStatus - Security.SSLSetCertificateAuthorities.argtypes = [ - SSLContextRef, - CFTypeRef, - Boolean - ] + Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] Security.SSLSetCertificateAuthorities.restype = OSStatus - Security.SSLSetConnection.argtypes = [ - SSLContextRef, - SSLConnectionRef - ] + Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] Security.SSLSetConnection.restype = OSStatus - Security.SSLSetPeerDomainName.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] + Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerDomainName.restype = OSStatus - Security.SSLHandshake.argtypes = [ - SSLContextRef - ] + Security.SSLHandshake.argtypes = [SSLContextRef] Security.SSLHandshake.restype = OSStatus - Security.SSLRead.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] + Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLRead.restype = OSStatus - Security.SSLWrite.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] + Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLWrite.restype = OSStatus - Security.SSLClose.argtypes = [ - SSLContextRef - ] + Security.SSLClose.argtypes = [SSLContextRef] Security.SSLClose.restype = OSStatus - Security.SSLGetNumberSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(c_size_t) - ] + Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberSupportedCiphers.restype = OSStatus Security.SSLGetSupportedCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - POINTER(c_size_t) + POINTER(c_size_t), ] Security.SSLGetSupportedCiphers.restype = OSStatus Security.SSLSetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - c_size_t + c_size_t, ] Security.SSLSetEnabledCiphers.restype = OSStatus - Security.SSLGetNumberEnabledCiphers.argtype = [ - SSLContextRef, - POINTER(c_size_t) - ] + Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberEnabledCiphers.restype = OSStatus Security.SSLGetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), - POINTER(c_size_t) + POINTER(c_size_t), ] Security.SSLGetEnabledCiphers.restype = OSStatus - Security.SSLGetNegotiatedCipher.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite) - ] + Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] Security.SSLGetNegotiatedCipher.restype = OSStatus Security.SSLGetNegotiatedProtocolVersion.argtypes = [ SSLContextRef, - POINTER(SSLProtocol) + POINTER(SSLProtocol), ] Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus - Security.SSLCopyPeerTrust.argtypes = [ - SSLContextRef, - POINTER(SecTrustRef) - ] + Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] Security.SSLCopyPeerTrust.restype = OSStatus - Security.SecTrustSetAnchorCertificates.argtypes = [ - SecTrustRef, - CFArrayRef - ] + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] Security.SecTrustSetAnchorCertificates.restype = OSStatus - Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ - SecTrustRef, - Boolean - ] + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus - Security.SecTrustEvaluate.argtypes = [ - SecTrustRef, - POINTER(SecTrustResultType) - ] + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] Security.SecTrustEvaluate.restype = OSStatus - Security.SecTrustGetCertificateCount.argtypes = [ - SecTrustRef - ] + Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] Security.SecTrustGetCertificateCount.restype = CFIndex - Security.SecTrustGetCertificateAtIndex.argtypes = [ - SecTrustRef, - CFIndex - ] + Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef Security.SSLCreateContext.argtypes = [ CFAllocatorRef, SSLProtocolSide, - SSLConnectionType + SSLConnectionType, ] Security.SSLCreateContext.restype = SSLContextRef - Security.SSLSetSessionOption.argtypes = [ - SSLContextRef, - SSLSessionOption, - Boolean - ] + Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] Security.SSLSetSessionOption.restype = OSStatus - Security.SSLSetProtocolVersionMin.argtypes = [ - SSLContextRef, - SSLProtocol - ] + Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMin.restype = OSStatus - Security.SSLSetProtocolVersionMax.argtypes = [ - SSLContextRef, - SSLProtocol - ] + Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMax.restype = OSStatus - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SSLReadFunc = SSLReadFunc @@ -369,64 +292,47 @@ try: Security.OSStatus = OSStatus Security.kSecImportExportPassphrase = CFStringRef.in_dll( - Security, 'kSecImportExportPassphrase' + Security, "kSecImportExportPassphrase" ) Security.kSecImportItemIdentity = CFStringRef.in_dll( - Security, 'kSecImportItemIdentity' + Security, "kSecImportItemIdentity" ) # CoreFoundation time! - CoreFoundation.CFRetain.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFRetain.argtypes = [CFTypeRef] CoreFoundation.CFRetain.restype = CFTypeRef - CoreFoundation.CFRelease.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFRelease.argtypes = [CFTypeRef] CoreFoundation.CFRelease.restype = None - CoreFoundation.CFGetTypeID.argtypes = [ - CFTypeRef - ] + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] CoreFoundation.CFGetTypeID.restype = CFTypeID CoreFoundation.CFStringCreateWithCString.argtypes = [ CFAllocatorRef, c_char_p, - CFStringEncoding + CFStringEncoding, ] CoreFoundation.CFStringCreateWithCString.restype = CFStringRef - CoreFoundation.CFStringGetCStringPtr.argtypes = [ - CFStringRef, - CFStringEncoding - ] + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] CoreFoundation.CFStringGetCStringPtr.restype = c_char_p CoreFoundation.CFStringGetCString.argtypes = [ CFStringRef, c_char_p, CFIndex, - CFStringEncoding + CFStringEncoding, ] CoreFoundation.CFStringGetCString.restype = c_bool - CoreFoundation.CFDataCreate.argtypes = [ - CFAllocatorRef, - c_char_p, - CFIndex - ] + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] CoreFoundation.CFDataCreate.restype = CFDataRef - CoreFoundation.CFDataGetLength.argtypes = [ - CFDataRef - ] + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] CoreFoundation.CFDataGetLength.restype = CFIndex - CoreFoundation.CFDataGetBytePtr.argtypes = [ - CFDataRef - ] + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] CoreFoundation.CFDataGetBytePtr.restype = c_void_p CoreFoundation.CFDictionaryCreate.argtypes = [ @@ -435,14 +341,11 @@ try: POINTER(CFTypeRef), CFIndex, CFDictionaryKeyCallBacks, - CFDictionaryValueCallBacks + CFDictionaryValueCallBacks, ] CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef - CoreFoundation.CFDictionaryGetValue.argtypes = [ - CFDictionaryRef, - CFTypeRef - ] + CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef CoreFoundation.CFArrayCreate.argtypes = [ @@ -456,36 +359,30 @@ try: CoreFoundation.CFArrayCreateMutable.argtypes = [ CFAllocatorRef, CFIndex, - CFArrayCallBacks + CFArrayCallBacks, ] CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef - CoreFoundation.CFArrayAppendValue.argtypes = [ - CFMutableArrayRef, - c_void_p - ] + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] CoreFoundation.CFArrayAppendValue.restype = None - CoreFoundation.CFArrayGetCount.argtypes = [ - CFArrayRef - ] + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] CoreFoundation.CFArrayGetCount.restype = CFIndex - CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ - CFArrayRef, - CFIndex - ] + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( - CoreFoundation, 'kCFAllocatorDefault' + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeArrayCallBacks" ) - CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' + CoreFoundation, "kCFTypeDictionaryKeyCallBacks" ) CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryValueCallBacks' + CoreFoundation, "kCFTypeDictionaryValueCallBacks" ) CoreFoundation.CFTypeRef = CFTypeRef @@ -494,7 +391,7 @@ try: CoreFoundation.CFDictionaryRef = CFDictionaryRef except (AttributeError): - raise ImportError('Error initializing ctypes') + raise ImportError("Error initializing ctypes") class CFConst(object): @@ -502,6 +399,7 @@ class CFConst(object): A class object that acts as essentially a namespace for CoreFoundation constants. """ + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) @@ -509,6 +407,7 @@ class SecurityConst(object): """ A class object that acts as essentially a namespace for Security constants. """ + kSSLSessionOptionBreakOnServerAuth = 0 kSSLProtocol2 = 1 @@ -516,6 +415,9 @@ class SecurityConst(object): kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 kSSLClientSide = 1 kSSLStreamType = 0 @@ -558,30 +460,27 @@ class SecurityConst(object): errSecInvalidTrustSettings = -25262 # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D @@ -590,4 +489,5 @@ class SecurityConst(object): TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F TLS_AES_128_GCM_SHA256 = 0x1301 TLS_AES_256_GCM_SHA384 = 0x1302 - TLS_CHACHA20_POLY1305_SHA256 = 0x1303 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py index b13cd9e7..e60168ca 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py @@ -66,22 +66,18 @@ def _cf_string_to_unicode(value): value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) string = CoreFoundation.CFStringGetCStringPtr( - value_as_void_p, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, CFConst.kCFStringEncodingUTF8 ) if string is None: buffer = ctypes.create_string_buffer(1024) result = CoreFoundation.CFStringGetCString( - value_as_void_p, - buffer, - 1024, - CFConst.kCFStringEncodingUTF8 + value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 ) if not result: - raise OSError('Error copying C string from CFStringRef') + raise OSError("Error copying C string from CFStringRef") string = buffer.value if string is not None: - string = string.decode('utf-8') + string = string.decode("utf-8") return string @@ -97,8 +93,8 @@ def _assert_no_error(error, exception_class=None): output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) - if output is None or output == u'': - output = u'OSStatus %s' % error + if output is None or output == u"": + output = u"OSStatus %s" % error if exception_class is None: exception_class = ssl.SSLError @@ -115,8 +111,7 @@ def _cert_array_from_pem(pem_bundle): pem_bundle = pem_bundle.replace(b"\r\n", b"\n") der_certs = [ - base64.b64decode(match.group(1)) - for match in _PEM_CERTS_RE.finditer(pem_bundle) + base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") @@ -124,7 +119,7 @@ def _cert_array_from_pem(pem_bundle): cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") @@ -186,21 +181,16 @@ def _temporary_keychain(): # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) - filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + filename = base64.b16encode(random_bytes[:8]).decode("utf-8") password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() - keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') + keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") # We now want to create the keychain itself. keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( - keychain_path, - len(password), - password, - False, - None, - ctypes.byref(keychain) + keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) @@ -219,14 +209,12 @@ def _load_items_from_file(keychain, path): identities = [] result_array = None - with open(path, 'rb') as f: + with open(path, "rb") as f: raw_filedata = f.read() try: filedata = CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, - raw_filedata, - len(raw_filedata) + CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) ) result_array = CoreFoundation.CFArrayRef() result = Security.SecItemImport( @@ -237,7 +225,7 @@ def _load_items_from_file(keychain, path): 0, # import flags None, # key params, can include passphrase in the future keychain, # The keychain to insert into - ctypes.byref(result_array) # Results + ctypes.byref(result_array), # Results ) _assert_no_error(result) @@ -247,9 +235,7 @@ def _load_items_from_file(keychain, path): # keychain already has them! result_count = CoreFoundation.CFArrayGetCount(result_array) for index in range(result_count): - item = CoreFoundation.CFArrayGetValueAtIndex( - result_array, index - ) + item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) item = ctypes.cast(item, CoreFoundation.CFTypeRef) if _is_cert(item): @@ -307,9 +293,7 @@ def _load_client_cert_chain(keychain, *paths): try: for file_path in paths: - new_identities, new_certs = _load_items_from_file( - keychain, file_path - ) + new_identities, new_certs = _load_items_from_file(keychain, file_path) identities.extend(new_identities) certificates.extend(new_certs) @@ -318,9 +302,7 @@ def _load_client_cert_chain(keychain, *paths): if not identities: new_identity = Security.SecIdentityRef() status = Security.SecIdentityCreateWithCertificate( - keychain, - certificates[0], - ctypes.byref(new_identity) + keychain, certificates[0], ctypes.byref(new_identity) ) _assert_no_error(status) identities.append(new_identity) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py index 9b42952d..d09d2be6 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/appengine.py @@ -50,7 +50,7 @@ from ..exceptions import ( MaxRetryError, ProtocolError, TimeoutError, - SSLError + SSLError, ) from ..request import RequestMethods @@ -96,23 +96,24 @@ class AppEngineManager(RequestMethods): Beyond those cases, it will raise normal urllib3 errors. """ - def __init__(self, headers=None, retries=None, validate_certificate=True, - urlfetch_retries=True): + def __init__( + self, + headers=None, + retries=None, + validate_certificate=True, + urlfetch_retries=True, + ): if not urlfetch: raise AppEnginePlatformError( - "URLFetch is not available in this environment.") - - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment.") + "URLFetch is not available in this environment." + ) warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate @@ -127,17 +128,22 @@ class AppEngineManager(RequestMethods): # Return False to re-raise any potential exceptions return False - def urlopen(self, method, url, body=None, headers=None, - retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, - **response_kw): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw + ): retries = self._get_retries(retries, redirect) try: - follow_redirects = ( - redirect and - retries.redirect != 0 and - retries.total) + follow_redirects = redirect and retries.redirect != 0 and retries.total response = urlfetch.fetch( url, payload=body, @@ -152,44 +158,52 @@ class AppEngineManager(RequestMethods): raise TimeoutError(self, e) except urlfetch.InvalidURLError as e: - if 'too large' in str(e): + if "too large" in str(e): raise AppEnginePlatformError( "URLFetch request too large, URLFetch only " - "supports requests up to 10mb in size.", e) + "supports requests up to 10mb in size.", + e, + ) raise ProtocolError(e) except urlfetch.DownloadError as e: - if 'Too many redirects' in str(e): + if "Too many redirects" in str(e): raise MaxRetryError(self, url, reason=e) raise ProtocolError(e) except urlfetch.ResponseTooLargeError as e: raise AppEnginePlatformError( "URLFetch response too large, URLFetch only supports" - "responses up to 32mb in size.", e) + "responses up to 32mb in size.", + e, + ) except urlfetch.SSLCertificateError as e: raise SSLError(e) except urlfetch.InvalidMethodError as e: raise AppEnginePlatformError( - "URLFetch does not support method: %s" % method, e) + "URLFetch does not support method: %s" % method, e + ) http_response = self._urlfetch_response_to_http_response( - response, retries=retries, **response_kw) + response, retries=retries, **response_kw + ) # Handle redirect? redirect_location = redirect and http_response.get_redirect_location() if redirect_location: # Check for redirect response - if (self.urlfetch_retries and retries.raise_on_redirect): + if self.urlfetch_retries and retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") else: if http_response.status == 303: - method = 'GET' + method = "GET" try: - retries = retries.increment(method, url, response=http_response, _pool=self) + retries = retries.increment( + method, url, response=http_response, _pool=self + ) except MaxRetryError: if retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") @@ -199,22 +213,32 @@ class AppEngineManager(RequestMethods): log.debug("Redirecting %s -> %s", url, redirect_location) redirect_url = urljoin(url, redirect_location) return self.urlopen( - method, redirect_url, body, headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + redirect_url, + body, + headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader('Retry-After')) + has_retry_after = bool(http_response.getheader("Retry-After")) if retries.is_retry(method, http_response.status, has_retry_after): - retries = retries.increment( - method, url, response=http_response, _pool=self) + retries = retries.increment(method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) retries.sleep(http_response) return self.urlopen( - method, url, - body=body, headers=headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) + method, + url, + body=body, + headers=headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) return http_response @@ -223,18 +247,18 @@ class AppEngineManager(RequestMethods): if is_prod_appengine(): # Production GAE handles deflate encoding automatically, but does # not remove the encoding header. - content_encoding = urlfetch_resp.headers.get('content-encoding') + content_encoding = urlfetch_resp.headers.get("content-encoding") - if content_encoding == 'deflate': - del urlfetch_resp.headers['content-encoding'] + if content_encoding == "deflate": + del urlfetch_resp.headers["content-encoding"] - transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') + transfer_encoding = urlfetch_resp.headers.get("transfer-encoding") # We have a full response's content, # so let's make sure we don't report ourselves as chunked data. - if transfer_encoding == 'chunked': + if transfer_encoding == "chunked": encodings = transfer_encoding.split(",") - encodings.remove('chunked') - urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) + encodings.remove("chunked") + urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings) original_response = HTTPResponse( # In order for decoding to work, we must present the content as @@ -262,20 +286,21 @@ class AppEngineManager(RequestMethods): warnings.warn( "URLFetch does not support granular timeout settings, " "reverting to total or default URLFetch timeout.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return timeout.total return timeout def _get_retries(self, retries, redirect): if not isinstance(retries, Retry): - retries = Retry.from_int( - retries, redirect=redirect, default=self.retries) + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if retries.connect or retries.read or retries.redirect: warnings.warn( "URLFetch only supports total retries and does not " "recognize connect, read, or redirect retry parameters.", - AppEnginePlatformWarning) + AppEnginePlatformWarning, + ) return retries diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py index 8ea127c5..1fd242a6 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -20,7 +20,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): Implements an NTLM authentication version of an urllib3 connection pool """ - scheme = 'https' + scheme = "https" def __init__(self, user, pw, authurl, *args, **kwargs): """ @@ -31,7 +31,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user - user_parts = user.split('\\', 1) + user_parts = user.split("\\", 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw @@ -40,72 +40,82 @@ class NTLMConnectionPool(HTTPSConnectionPool): # Performs the NTLM handshake that secures the connection. The socket # must be kept open while requests are performed. self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', - self.num_connections, self.host, self.authurl) + log.debug( + "Starting NTLM HTTPS connection no. %d: https://%s%s", + self.num_connections, + self.host, + self.authurl, + ) - headers = {'Connection': 'Keep-Alive'} - req_header = 'Authorization' - resp_header = 'www-authenticate' + headers = {"Connection": "Keep-Alive"} + req_header = "Authorization" + resp_header = "www-authenticate" conn = HTTPSConnection(host=self.host, port=self.port) # Send negotiation message - headers[req_header] = ( - 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) + headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( + self.rawuser + ) + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) res = conn.getresponse() reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', reshdr) - log.debug('Response data: %s [...]', res.read(100)) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", reshdr) + log.debug("Response data: %s [...]", res.read(100)) # Remove the reference to the socket, so that it can not be closed by # the response object (we want to keep the socket open) res.fp = None # Server should respond with a challenge message - auth_header_values = reshdr[resp_header].split(', ') + auth_header_values = reshdr[resp_header].split(", ") auth_header_value = None for s in auth_header_values: - if s[:5] == 'NTLM ': + if s[:5] == "NTLM ": auth_header_value = s[5:] if auth_header_value is None: - raise Exception('Unexpected %s response header: %s' % - (resp_header, reshdr[resp_header])) + raise Exception( + "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) + ) # Send authentication message - ServerChallenge, NegotiateFlags = \ - ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) - auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, - self.user, - self.domain, - self.pw, - NegotiateFlags) - headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) + ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( + auth_header_value + ) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( + ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags + ) + headers[req_header] = "NTLM %s" % auth_msg + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) res = conn.getresponse() - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', dict(res.getheaders())) - log.debug('Response data: %s [...]', res.read()[:100]) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response data: %s [...]", res.read()[:100]) if res.status != 200: if res.status == 401: - raise Exception('Server rejected request: wrong ' - 'username or password') - raise Exception('Wrong server response: %s %s' % - (res.status, res.reason)) + raise Exception("Server rejected request: wrong username or password") + raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) res.fp = None - log.debug('Connection established') + log.debug("Connection established") return conn - def urlopen(self, method, url, body=None, headers=None, retries=3, - redirect=True, assert_same_host=True): + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=3, + redirect=True, + assert_same_host=True, + ): if headers is None: headers = {} - headers['Connection'] = 'Keep-Alive' - return super(NTLMConnectionPool, self).urlopen(method, url, body, - headers, retries, - redirect, - assert_same_host) + headers["Connection"] = "Keep-Alive" + return super(NTLMConnectionPool, self).urlopen( + method, url, body, headers, retries, redirect, assert_same_host + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py index 363667cb..d8fe0629 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -47,6 +47,7 @@ import OpenSSL.SSL from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend from cryptography.hazmat.backends.openssl.x509 import _Certificate + try: from cryptography.x509 import UnsupportedExtension except ImportError: @@ -54,6 +55,7 @@ except ImportError: class UnsupportedExtension(Exception): pass + from socket import timeout, error as SocketError from io import BytesIO @@ -70,37 +72,35 @@ import sys from .. import util -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works. HAS_SNI = True # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): +if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"): + _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) +_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items()) # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 @@ -113,10 +113,11 @@ log = logging.getLogger(__name__) def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." _validate_dependencies_met() + util.SSLContext = PyOpenSSLContext util.ssl_.SSLContext = PyOpenSSLContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -125,8 +126,9 @@ def inject_into_urllib3(): def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' + "Undo monkey-patching by :func:`inject_into_urllib3`." + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -141,17 +143,23 @@ def _validate_dependencies_met(): """ # Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions + if getattr(Extensions, "get_extension_for_class", None) is None: - raise ImportError("'cryptography' module missing required functionality. " - "Try upgrading to v1.3.4 or newer.") + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 + x509 = X509() if getattr(x509, "_x509", None) is None: - raise ImportError("'pyOpenSSL' module missing required functionality. " - "Try upgrading to v0.14 or newer.") + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) def _dnsname_to_stdlib(name): @@ -167,6 +175,7 @@ def _dnsname_to_stdlib(name): If the name cannot be idna-encoded then we return None signalling that the name given should be skipped. """ + def idna_encode(name): """ Borrowed wholesale from the Python Cryptography Project. It turns out @@ -176,19 +185,23 @@ def _dnsname_to_stdlib(name): from pip._vendor import idna try: - for prefix in [u'*.', u'.']: + for prefix in [u"*.", u"."]: if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) return idna.encode(name) except idna.core.IDNAError: return None + # Don't send IPv6 addresses through the IDNA encoder. + if ":" in name: + return name + name = idna_encode(name) if name is None: return None elif sys.version_info >= (3, 0): - name = name.decode('utf-8') + name = name.decode("utf-8") return name @@ -207,14 +220,16 @@ def get_subj_alt_name(peer_cert): # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] - except (x509.DuplicateExtension, UnsupportedExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( @@ -233,23 +248,23 @@ def get_subj_alt_name(peer_cert): # does with certificates, and so we need to attempt to do the same. # We also want to skip over names which cannot be idna encoded. names = [ - ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) if name is not None ] names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. + """API-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. - ''' + """ def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection @@ -272,20 +287,24 @@ class WrappedSocket(object): try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' + return b"" else: raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) else: return data @@ -293,21 +312,25 @@ class WrappedSocket(object): try: return self.connection.recv_into(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): return 0 else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return 0 else: raise except OpenSSL.SSL.WantReadError: if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') + raise timeout("The read operation timed out") else: return self.recv_into(*args, **kwargs) + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + def settimeout(self, timeout): return self.socket.settimeout(timeout) @@ -325,7 +348,9 @@ class WrappedSocket(object): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) total_sent += sent def shutdown(self): @@ -349,17 +374,16 @@ class WrappedSocket(object): return x509 if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) + "subject": ((("commonName", x509.get_subject().CN),),), + "subjectAltName": get_subj_alt_name(x509), } + def version(self): + return self.connection.get_protocol_version_name() + def _reuse(self): self._makefile_refs += 1 @@ -371,9 +395,12 @@ class WrappedSocket(object): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 makefile = backport_makefile @@ -386,6 +413,7 @@ class PyOpenSSLContext(object): for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. """ + def __init__(self, protocol): self.protocol = _openssl_versions[protocol] self._ctx = OpenSSL.SSL.Context(self.protocol) @@ -407,41 +435,48 @@ class PyOpenSSLContext(object): @verify_mode.setter def verify_mode(self, value): - self._ctx.set_verify( - _stdlib_to_openssl_verify[value], - _verify_callback - ) + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) def set_default_verify_paths(self): self._ctx.set_default_verify_paths() def set_ciphers(self, ciphers): if isinstance(ciphers, six.text_type): - ciphers = ciphers.encode('utf-8') + ciphers = ciphers.encode("utf-8") self._ctx.set_cipher_list(ciphers) def load_verify_locations(self, cafile=None, capath=None, cadata=None): if cafile is not None: - cafile = cafile.encode('utf-8') + cafile = cafile.encode("utf-8") if capath is not None: - capath = capath.encode('utf-8') - self._ctx.load_verify_locations(cafile, capath) - if cadata is not None: - self._ctx.load_verify_locations(BytesIO(cadata)) + capath = capath.encode("utf-8") + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("unable to load trusted certificates: %r" % e) def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.use_certificate_chain_file(certfile) if password is not None: - self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) + if not isinstance(password, six.binary_type): + password = password.encode("utf-8") + self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): cnx = OpenSSL.SSL.Connection(self._ctx, sock) if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") if server_hostname is not None: cnx.set_tlsext_host_name(server_hostname) @@ -453,10 +488,10 @@ class PyOpenSSLContext(object): cnx.do_handshake() except OpenSSL.SSL.WantReadError: if not util.wait_for_read(sock, sock.gettimeout()): - raise timeout('select timed out') + raise timeout("select timed out") continue except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) + raise ssl.SSLError("bad handshake: %r" % e) break return WrappedSocket(cnx, sock) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py index 77cb59ed..a6b7e94a 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/securetransport.py @@ -23,6 +23,31 @@ To use this module, simply import and inject it:: urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond <will@wbond.net> + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. """ from __future__ import absolute_import @@ -37,12 +62,12 @@ import threading import weakref from .. import util -from ._securetransport.bindings import ( - Security, SecurityConst, CoreFoundation -) +from ._securetransport.bindings import Security, SecurityConst, CoreFoundation from ._securetransport.low_level import ( - _assert_no_error, _cert_array_from_pem, _temporary_keychain, - _load_client_cert_chain + _assert_no_error, + _cert_array_from_pem, + _temporary_keychain, + _load_client_cert_chain, ) try: # Platform-specific: Python 2 @@ -51,7 +76,7 @@ except ImportError: # Platform-specific: Python 3 _fileobject = None from ..packages.backports.makefile import backport_makefile -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works HAS_SNI = True @@ -86,35 +111,32 @@ SSL_WRITE_BLOCKSIZE = 16384 # individual cipher suites. We need to do this because this is how # SecureTransport wants them. CIPHER_SUITES = [ - SecurityConst.TLS_AES_256_GCM_SHA384, - SecurityConst.TLS_CHACHA20_POLY1305_SHA256, - SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_AES_128_CCM_8_SHA256, + SecurityConst.TLS_AES_128_CCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, @@ -123,38 +145,43 @@ CIPHER_SUITES = [ # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { - ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12) } if hasattr(ssl, "PROTOCOL_SSLv2"): _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( - SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 + SecurityConst.kSSLProtocol2, + SecurityConst.kSSLProtocol2, ) if hasattr(ssl, "PROTOCOL_SSLv3"): _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( - SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 + SecurityConst.kSSLProtocol3, + SecurityConst.kSSLProtocol3, ) if hasattr(ssl, "PROTOCOL_TLSv1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( - SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 + SecurityConst.kTLSProtocol1, + SecurityConst.kTLSProtocol1, ) if hasattr(ssl, "PROTOCOL_TLSv1_1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( - SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 + SecurityConst.kTLSProtocol11, + SecurityConst.kTLSProtocol11, ) if hasattr(ssl, "PROTOCOL_TLSv1_2"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( - SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 + SecurityConst.kTLSProtocol12, + SecurityConst.kTLSProtocol12, ) -if hasattr(ssl, "PROTOCOL_TLS"): - _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] def inject_into_urllib3(): """ Monkey-patch urllib3 with SecureTransport-backed SSL-support. """ + util.SSLContext = SecureTransportContext util.ssl_.SSLContext = SecureTransportContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -166,6 +193,7 @@ def extract_from_urllib3(): """ Undo monkey-patching by :func:`inject_into_urllib3`. """ + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -195,7 +223,7 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): while read_count < requested_length: if timeout is None or timeout >= 0: if not util.wait_for_read(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") remaining = requested_length - read_count buffer = (ctypes.c_char * remaining).from_address( @@ -251,7 +279,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): while sent < bytes_to_write: if timeout is None or timeout >= 0: if not util.wait_for_write(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') + raise socket.error(errno.EAGAIN, "timed out") chunk_sent = base_socket.send(data) sent += chunk_sent @@ -293,6 +321,7 @@ class WrappedSocket(object): Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. """ + def __init__(self, socket): self.socket = socket self.context = None @@ -357,7 +386,7 @@ class WrappedSocket(object): # We want data in memory, so load it up. if os.path.isfile(trust_bundle): - with open(trust_bundle, 'rb') as f: + with open(trust_bundle, "rb") as f: trust_bundle = f.read() cert_array = None @@ -371,9 +400,7 @@ class WrappedSocket(object): # created for this connection, shove our CAs into it, tell ST to # ignore everything else it knows, and then ask if it can build a # chain. This is a buuuunch of code. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: raise ssl.SSLError("Failed to copy trust reference") @@ -385,9 +412,7 @@ class WrappedSocket(object): _assert_no_error(result) trust_result = Security.SecTrustResultType() - result = Security.SecTrustEvaluate( - trust, ctypes.byref(trust_result) - ) + result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) _assert_no_error(result) finally: if trust: @@ -399,23 +424,24 @@ class WrappedSocket(object): # Ok, now we can look at what the result was. successes = ( SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed + SecurityConst.kSecTrustResultProceed, ) if trust_result.value not in successes: raise ssl.SSLError( - "certificate verify failed, error code: %d" % - trust_result.value + "certificate verify failed, error code: %d" % trust_result.value ) - def handshake(self, - server_hostname, - verify, - trust_bundle, - min_version, - max_version, - client_cert, - client_key, - client_key_passphrase): + def handshake( + self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase, + ): """ Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. @@ -445,7 +471,7 @@ class WrappedSocket(object): # If we have a server hostname, we should set that too. if server_hostname: if not isinstance(server_hostname, bytes): - server_hostname = server_hostname.encode('utf-8') + server_hostname = server_hostname.encode("utf-8") result = Security.SSLSetPeerDomainName( self.context, server_hostname, len(server_hostname) @@ -458,6 +484,7 @@ class WrappedSocket(object): # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) + result = Security.SSLSetProtocolVersionMax(self.context, max_version) _assert_no_error(result) @@ -467,9 +494,7 @@ class WrappedSocket(object): # authing in that case. if not verify or trust_bundle is not None: result = Security.SSLSetSessionOption( - self.context, - SecurityConst.kSSLSessionOptionBreakOnServerAuth, - True + self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True ) _assert_no_error(result) @@ -479,9 +504,7 @@ class WrappedSocket(object): self._client_cert_chain = _load_client_cert_chain( self._keychain, client_cert, client_key ) - result = Security.SSLSetCertificate( - self.context, self._client_cert_chain - ) + result = Security.SSLSetCertificate(self.context, self._client_cert_chain) _assert_no_error(result) while True: @@ -532,7 +555,7 @@ class WrappedSocket(object): # There are some result codes that we want to treat as "not always # errors". Specifically, those are errSSLWouldBlock, # errSSLClosedGraceful, and errSSLClosedNoNotify. - if (result == SecurityConst.errSSLWouldBlock): + if result == SecurityConst.errSSLWouldBlock: # If we didn't process any bytes, then this was just a time out. # However, we can get errSSLWouldBlock in situations when we *did* # read some data, and in those cases we should just read "short" @@ -540,7 +563,10 @@ class WrappedSocket(object): if processed_bytes.value == 0: # Timed out, no data read. raise socket.timeout("recv timed out") - elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): + elif result in ( + SecurityConst.errSSLClosedGraceful, + SecurityConst.errSSLClosedNoNotify, + ): # The remote peer has closed this connection. We should do so as # well. Note that we don't actually return here because in # principle this could actually be fired along with return data. @@ -579,7 +605,7 @@ class WrappedSocket(object): def sendall(self, data): total_sent = 0 while total_sent < len(data): - sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) + sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): @@ -626,18 +652,14 @@ class WrappedSocket(object): # instead to just flag to urllib3 that it shouldn't do its own hostname # validation when using SecureTransport. if not binary_form: - raise ValueError( - "SecureTransport only supports dumping binary certs" - ) + raise ValueError("SecureTransport only supports dumping binary certs") trust = Security.SecTrustRef() certdata = None der_bytes = None try: # Grab the trust store. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: # Probably we haven't done the handshake yet. No biggie. @@ -667,6 +689,27 @@ class WrappedSocket(object): return der_bytes + def version(self): + protocol = Security.SSLProtocol() + result = Security.SSLGetNegotiatedProtocolVersion( + self.context, ctypes.byref(protocol) + ) + _assert_no_error(result) + if protocol.value == SecurityConst.kTLSProtocol13: + raise ssl.SSLError("SecureTransport does not support TLS 1.3") + elif protocol.value == SecurityConst.kTLSProtocol12: + return "TLSv1.2" + elif protocol.value == SecurityConst.kTLSProtocol11: + return "TLSv1.1" + elif protocol.value == SecurityConst.kTLSProtocol1: + return "TLSv1" + elif protocol.value == SecurityConst.kSSLProtocol3: + return "SSLv3" + elif protocol.value == SecurityConst.kSSLProtocol2: + return "SSLv2" + else: + raise ssl.SSLError("Unknown TLS version: %r" % protocol) + def _reuse(self): self._makefile_refs += 1 @@ -678,16 +721,21 @@ class WrappedSocket(object): if _fileobject: # Platform-specific: Python 2 + def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) + + else: # Platform-specific: Python 3 + def makefile(self, mode="r", buffering=None, *args, **kwargs): # We disable buffering with SecureTransport because it conflicts with # the buffering that ST does internally (see issue #1153 for more). buffering = 0 return backport_makefile(self, mode, buffering, *args, **kwargs) + WrappedSocket.makefile = makefile @@ -697,6 +745,7 @@ class SecureTransportContext(object): interface of the standard library ``SSLContext`` object to calls into SecureTransport. """ + def __init__(self, protocol): self._min_version, self._max_version = _protocol_to_min_max[protocol] self._options = 0 @@ -763,16 +812,17 @@ class SecureTransportContext(object): def set_ciphers(self, ciphers): # For now, we just require the default cipher string. if ciphers != util.ssl_.DEFAULT_CIPHERS: - raise ValueError( - "SecureTransport doesn't support custom cipher strings" - ) + raise ValueError("SecureTransport doesn't support custom cipher strings") def load_verify_locations(self, cafile=None, capath=None, cadata=None): # OK, we only really support cadata and cafile. if capath is not None: - raise ValueError( - "SecureTransport does not support cert directories" - ) + raise ValueError("SecureTransport does not support cert directories") + + # Raise if cafile does not exist. + if cafile is not None: + with open(cafile): + pass self._trust_bundle = cafile or cadata @@ -781,9 +831,14 @@ class SecureTransportContext(object): self._client_key = keyfile self._client_cert_passphrase = password - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. @@ -797,8 +852,13 @@ class SecureTransportContext(object): # Now we can handshake wrapped_socket.handshake( - server_hostname, self._verify, self._trust_bundle, - self._min_version, self._max_version, self._client_cert, - self._client_key, self._client_key_passphrase + server_hostname, + self._verify, + self._trust_bundle, + self._min_version, + self._max_version, + self._client_cert, + self._client_key, + self._client_key_passphrase, ) return wrapped_socket diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py index 811e312e..9e97f7aa 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/contrib/socks.py @@ -1,25 +1,38 @@ # -*- coding: utf-8 -*- """ This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: -- SOCKS4 -- SOCKS4a -- SOCKS5 +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy -Known Limitations: + .. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request:: + + proxy_url="socks4a://<userid>@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy:: + + proxy_url="socks5h://<username>:<password>@proxy-host" -- Currently PySocks does not support contacting remote websites via literal - IPv6 addresses. Any such connection attempt will fail. You must use a domain - name. -- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any - such connection attempt will fail. """ from __future__ import absolute_import @@ -29,23 +42,20 @@ except ImportError: import warnings from ..exceptions import DependencyWarning - warnings.warn(( - 'SOCKS support in urllib3 requires the installation of optional ' - 'dependencies: specifically, PySocks. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies" ), - DependencyWarning + DependencyWarning, ) raise from socket import error as SocketError, timeout as SocketTimeout -from ..connection import ( - HTTPConnection, HTTPSConnection -) -from ..connectionpool import ( - HTTPConnectionPool, HTTPSConnectionPool -) +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool from ..exceptions import ConnectTimeoutError, NewConnectionError from ..poolmanager import PoolManager from ..util.url import parse_url @@ -60,8 +70,9 @@ class SOCKSConnection(HTTPConnection): """ A plain-text HTTP connection that connects via a SOCKS proxy. """ + def __init__(self, *args, **kwargs): - self._socks_options = kwargs.pop('_socks_options') + self._socks_options = kwargs.pop("_socks_options") super(SOCKSConnection, self).__init__(*args, **kwargs) def _new_conn(self): @@ -70,28 +81,30 @@ class SOCKSConnection(HTTPConnection): """ extra_kw = {} if self.source_address: - extra_kw['source_address'] = self.source_address + extra_kw["source_address"] = self.source_address if self.socket_options: - extra_kw['socket_options'] = self.socket_options + extra_kw["socket_options"] = self.socket_options try: conn = socks.create_connection( (self.host, self.port), - proxy_type=self._socks_options['socks_version'], - proxy_addr=self._socks_options['proxy_host'], - proxy_port=self._socks_options['proxy_port'], - proxy_username=self._socks_options['username'], - proxy_password=self._socks_options['password'], - proxy_rdns=self._socks_options['rdns'], + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], timeout=self.timeout, **extra_kw ) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) except socks.ProxyError as e: # This is fragile as hell, but it seems to be the only way to raise @@ -101,23 +114,22 @@ class SOCKSConnection(HTTPConnection): if isinstance(error, SocketTimeout): raise ConnectTimeoutError( self, - "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout) + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), ) else: raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % error + self, "Failed to establish a new connection: %s" % error ) else: raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % e + self, "Failed to establish a new connection: %s" % e ) except SocketError as e: # Defensive: PySocks should catch all these. raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) + self, "Failed to establish a new connection: %s" % e + ) return conn @@ -143,47 +155,53 @@ class SOCKSProxyManager(PoolManager): A version of the urllib3 ProxyManager that routes connections via the defined SOCKS proxy. """ + pool_classes_by_scheme = { - 'http': SOCKSHTTPConnectionPool, - 'https': SOCKSHTTPSConnectionPool, + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, } - def __init__(self, proxy_url, username=None, password=None, - num_pools=10, headers=None, **connection_pool_kw): + def __init__( + self, + proxy_url, + username=None, + password=None, + num_pools=10, + headers=None, + **connection_pool_kw + ): parsed = parse_url(proxy_url) if username is None and password is None and parsed.auth is not None: - split = parsed.auth.split(':') + split = parsed.auth.split(":") if len(split) == 2: username, password = split - if parsed.scheme == 'socks5': + if parsed.scheme == "socks5": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = False - elif parsed.scheme == 'socks5h': + elif parsed.scheme == "socks5h": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = True - elif parsed.scheme == 'socks4': + elif parsed.scheme == "socks4": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = False - elif parsed.scheme == 'socks4a': + elif parsed.scheme == "socks4a": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = True else: - raise ValueError( - "Unable to determine SOCKS version from %s" % proxy_url - ) + raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) self.proxy_url = proxy_url socks_options = { - 'socks_version': socks_version, - 'proxy_host': parsed.host, - 'proxy_port': parsed.port, - 'username': username, - 'password': password, - 'rdns': rdns + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, } - connection_pool_kw['_socks_options'] = socks_options + connection_pool_kw["_socks_options"] = socks_options super(SOCKSProxyManager, self).__init__( num_pools, headers, **connection_pool_kw diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/exceptions.py index 7bbaa987..5cc4d8a4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/exceptions.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/exceptions.py @@ -1,7 +1,6 @@ from __future__ import absolute_import -from .packages.six.moves.http_client import ( - IncompleteRead as httplib_IncompleteRead -) +from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead + # Base Exceptions @@ -17,6 +16,7 @@ class HTTPWarning(Warning): class PoolError(HTTPError): "Base exception for errors caused within a pool." + def __init__(self, pool, message): self.pool = pool HTTPError.__init__(self, "%s: %s" % (pool, message)) @@ -28,6 +28,7 @@ class PoolError(HTTPError): class RequestError(PoolError): "Base exception for PoolErrors that have associated URLs." + def __init__(self, pool, url, message): self.url = url PoolError.__init__(self, pool, message) @@ -44,7 +45,10 @@ class SSLError(HTTPError): class ProxyError(HTTPError): "Raised when the connection to a proxy fails." - pass + + def __init__(self, message, error, *args): + super(ProxyError, self).__init__(message, error, *args) + self.original_error = error class DecodeError(HTTPError): @@ -63,6 +67,7 @@ ConnectionError = ProtocolError # Leaf Exceptions + class MaxRetryError(RequestError): """Raised when the maximum number of retries is exceeded. @@ -76,8 +81,7 @@ class MaxRetryError(RequestError): def __init__(self, pool, url, reason=None): self.reason = reason - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) + message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) RequestError.__init__(self, pool, url, message) @@ -93,6 +97,7 @@ class HostChangedError(RequestError): class TimeoutStateError(HTTPError): """ Raised when passing an invalid state to a timeout """ + pass @@ -102,6 +107,7 @@ class TimeoutError(HTTPError): Catching this error will catch both :exc:`ReadTimeoutErrors <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. """ + pass @@ -149,8 +155,8 @@ class LocationParseError(LocationValueError): class ResponseError(HTTPError): "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" class SecurityWarning(HTTPWarning): @@ -188,6 +194,21 @@ class DependencyWarning(HTTPWarning): Warned when an attempt is made to import a module with missing optional dependencies. """ + + pass + + +class InvalidProxyConfigurationWarning(HTTPWarning): + """ + Warned when using an HTTPS proxy and an HTTPS URL. Currently + urllib3 doesn't support HTTPS proxies and the proxy will be + contacted via HTTP instead. This warning can be fixed by + changing your HTTPS proxy URL into an HTTP proxy URL. + + If you encounter this warning read this: + https://github.com/urllib3/urllib3/issues/1850 + """ + pass @@ -201,6 +222,7 @@ class BodyNotHttplibCompatible(HTTPError): Body should be httplib.HTTPResponse like (have an fp attribute which returns raw chunks) for read_chunked(). """ + pass @@ -212,12 +234,15 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead): for `partial` to avoid creating large objects on streamed reads. """ + def __init__(self, partial, expected): super(IncompleteRead, self).__init__(partial, expected) def __repr__(self): - return ('IncompleteRead(%i bytes read, ' - '%i more expected)' % (self.partial, self.expected)) + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) class InvalidHeader(HTTPError): @@ -236,8 +261,9 @@ class ProxySchemeUnknown(AssertionError, ValueError): class HeaderParsingError(HTTPError): "Raised by assert_header_parsing, but we convert it to a log.warning statement." + def __init__(self, defects, unparsed_data): - message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) + message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) super(HeaderParsingError, self).__init__(message) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/fields.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/fields.py index 37fe64a3..8715b220 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/fields.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/fields.py @@ -1,11 +1,12 @@ from __future__ import absolute_import import email.utils import mimetypes +import re from .packages import six -def guess_content_type(filename, default='application/octet-stream'): +def guess_content_type(filename, default="application/octet-stream"): """ Guess the "Content-Type" of a file. @@ -19,57 +20,143 @@ def guess_content_type(filename, default='application/octet-stream'): return default -def format_header_param(name, value): +def format_header_param_rfc2231(name, value): """ - Helper function to format and quote a single header parameter. + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2231, as - suggested by RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: - The value of the parameter, provided as a unicode string. + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + An RFC-2231-formatted unicode string. """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + if not any(ch in value for ch in '"\\\r\n'): - result = '%s="%s"' % (name, value) + result = u'%s="%s"' % (name, value) try: - result.encode('ascii') + result.encode("ascii") except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result - if not six.PY3 and isinstance(value, six.text_type): # Python 2: - value = value.encode('utf-8') - value = email.utils.encode_rfc2231(value, 'utf-8') - value = '%s*=%s' % (name, value) + + if six.PY2: # Python 2: + value = value.encode("utf-8") + + # encode_rfc2231 accepts an encoded string and returns an ascii-encoded + # string in Python 2 but accepts and returns unicode strings in Python 3 + value = email.utils.encode_rfc2231(value, "utf-8") + value = "%s*=%s" % (name, value) + + if six.PY2: # Python 2: + value = value.decode("utf-8") + return value +_HTML5_REPLACEMENTS = { + u"\u0022": u"%22", + # Replace "\" with "\\". + u"\u005C": u"\u005C\u005C", + u"\u005C": u"\u005C\u005C", +} + +# All control characters from 0x00 to 0x1F *except* 0x1B. +_HTML5_REPLACEMENTS.update( + { + six.unichr(cc): u"%{:02X}".format(cc) + for cc in range(0x00, 0x1F + 1) + if cc not in (0x1B,) + } +) + + +def _replace_multiple(value, needles_and_replacements): + def replacer(match): + return needles_and_replacements[match.group(0)] + + pattern = re.compile( + r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()]) + ) + + result = pattern.sub(replacer, value) + + return result + + +def format_header_param_html5(name, value): + """ + Helper function to format and quote a single header parameter using the + HTML5 strategy. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows the `HTML5 Working Draft + Section 4.10.22.7`_ and matches the behavior of curl and modern browsers. + + .. _HTML5 Working Draft Section 4.10.22.7: + https://w3c.github.io/html/sec-forms.html#multipart-form-data + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + A unicode string, stripped of troublesome characters. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + value = _replace_multiple(value, _HTML5_REPLACEMENTS) + + return u'%s="%s"' % (name, value) + + +# For backwards-compatibility. +format_header_param = format_header_param_html5 + + class RequestField(object): """ A data container for request body parameters. :param name: - The name of this request field. + The name of this request field. Must be unicode. :param data: The data/value body. :param filename: - An optional filename of the request field. + An optional filename of the request field. Must be unicode. :param headers: An optional dict-like object of headers to initially use for the field. + :param header_formatter: + An optional callable that is used to encode and format the headers. By + default, this is :func:`format_header_param_html5`. """ - def __init__(self, name, data, filename=None, headers=None): + + def __init__( + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5, + ): self._name = name self._filename = filename self.data = data self.headers = {} if headers: self.headers = dict(headers) + self.header_formatter = header_formatter @classmethod - def from_tuples(cls, fieldname, value): + def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. @@ -97,21 +184,25 @@ class RequestField(object): content_type = None data = value - request_param = cls(fieldname, data, filename=filename) + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name, value): """ - Overridable helper function to format a single header parameter. + Overridable helper function to format a single header parameter. By + default, this calls ``self.header_formatter``. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ - return format_header_param(name, value) + + return self.header_formatter(name, value) def _render_parts(self, header_parts): """ @@ -133,7 +224,7 @@ class RequestField(object): if value is not None: parts.append(self._render_part(name, value)) - return '; '.join(parts) + return u"; ".join(parts) def render_headers(self): """ @@ -141,21 +232,22 @@ class RequestField(object): """ lines = [] - sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] for sort_key in sort_keys: if self.headers.get(sort_key, False): - lines.append('%s: %s' % (sort_key, self.headers[sort_key])) + lines.append(u"%s: %s" % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: - lines.append('%s: %s' % (header_name, header_value)) + lines.append(u"%s: %s" % (header_name, header_value)) - lines.append('\r\n') - return '\r\n'.join(lines) + lines.append(u"\r\n") + return u"\r\n".join(lines) - def make_multipart(self, content_disposition=None, content_type=None, - content_location=None): + def make_multipart( + self, content_disposition=None, content_type=None, content_location=None + ): """ Makes this request field into a multipart request field. @@ -168,11 +260,14 @@ class RequestField(object): The 'Content-Location' of the request body. """ - self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join([ - '', self._render_parts( - (('name', self._name), ('filename', self._filename)) - ) - ]) - self.headers['Content-Type'] = content_type - self.headers['Content-Location'] = content_location + self.headers["Content-Disposition"] = content_disposition or u"form-data" + self.headers["Content-Disposition"] += u"; ".join( + [ + u"", + self._render_parts( + ((u"name", self._name), (u"filename", self._filename)) + ), + ] + ) + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/filepost.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/filepost.py index 78f1e19b..b7b00992 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/filepost.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/filepost.py @@ -9,7 +9,7 @@ from .packages import six from .packages.six import b from .fields import RequestField -writer = codecs.lookup('utf-8')[3] +writer = codecs.lookup("utf-8")[3] def choose_boundary(): @@ -17,8 +17,8 @@ def choose_boundary(): Our embarrassingly-simple replacement for mimetools.choose_boundary. """ boundary = binascii.hexlify(os.urandom(16)) - if six.PY3: - boundary = boundary.decode('ascii') + if not six.PY2: + boundary = boundary.decode("ascii") return boundary @@ -76,7 +76,7 @@ def encode_multipart_formdata(fields, boundary=None): boundary = choose_boundary() for field in iter_field_objects(fields): - body.write(b('--%s\r\n' % (boundary))) + body.write(b("--%s\r\n" % (boundary))) writer(body).write(field.render_headers()) data = field.data @@ -89,10 +89,10 @@ def encode_multipart_formdata(fields, boundary=None): else: body.write(data) - body.write(b'\r\n') + body.write(b"\r\n") - body.write(b('--%s--\r\n' % (boundary))) + body.write(b("--%s--\r\n" % (boundary))) - content_type = str('multipart/form-data; boundary=%s' % boundary) + content_type = str("multipart/form-data; boundary=%s" % boundary) return body.getvalue(), content_type diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py index 170e974c..fce4caa6 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/__init__.py @@ -2,4 +2,4 @@ from __future__ import absolute_import from . import ssl_match_hostname -__all__ = ('ssl_match_hostname', ) +__all__ = ("ssl_match_hostname",) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py index 740db377..a3156a69 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py @@ -11,15 +11,14 @@ import io from socket import SocketIO -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): +def backport_makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None +): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= {"r", "w", "b"}: - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/six.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/six.py index 190c0239..31442409 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/six.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/six.py @@ -1,6 +1,4 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson +# Copyright (c) 2010-2019 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,6 +18,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Utilities for writing code that runs on Python 2 and 3""" + from __future__ import absolute_import import functools @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.10.0" +__version__ = "1.12.0" # Useful for very coarse version differentiation. @@ -38,15 +38,15 @@ PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: - string_types = str, - integer_types = int, - class_types = type, + string_types = (str,) + integer_types = (int,) + class_types = (type,) text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: - string_types = basestring, + string_types = (basestring,) integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode @@ -58,9 +58,9 @@ else: else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): - def __len__(self): return 1 << 31 + try: len(X()) except OverflowError: @@ -84,7 +84,6 @@ def _import_module(name): class _LazyDescr(object): - def __init__(self, name): self.name = name @@ -101,7 +100,6 @@ class _LazyDescr(object): class MovedModule(_LazyDescr): - def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: @@ -122,7 +120,6 @@ class MovedModule(_LazyDescr): class _LazyModule(types.ModuleType): - def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ @@ -137,7 +134,6 @@ class _LazyModule(types.ModuleType): class MovedAttribute(_LazyDescr): - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: @@ -221,28 +217,36 @@ class _SixMetaPathImporter(object): Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None + get_source = get_code # same as get_code + _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" + __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute( + "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" + ), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), @@ -251,7 +255,9 @@ _moved_attributes = [ MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), @@ -262,10 +268,13 @@ _moved_attributes = [ MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule( + "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" + ), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), @@ -283,15 +292,12 @@ _moved_attributes = [ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), @@ -301,9 +307,7 @@ _moved_attributes = [ ] # Add windows specific modules. if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] + _moved_attributes += [MovedModule("winreg", "_winreg")] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) @@ -337,10 +341,14 @@ _urllib_parse_moved_attributes = [ MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute( + "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes" + ), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), @@ -353,8 +361,11 @@ del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") +_importer._add_module( + Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) class Module_six_moves_urllib_error(_LazyModule): @@ -373,8 +384,11 @@ del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") +_importer._add_module( + Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) class Module_six_moves_urllib_request(_LazyModule): @@ -416,6 +430,8 @@ _urllib_request_moved_attributes = [ MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) @@ -423,8 +439,11 @@ del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") +_importer._add_module( + Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) class Module_six_moves_urllib_response(_LazyModule): @@ -444,8 +463,11 @@ del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") +_importer._add_module( + Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) class Module_six_moves_urllib_robotparser(_LazyModule): @@ -454,21 +476,27 @@ class Module_six_moves_urllib_robotparser(_LazyModule): _urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser") ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes +Module_six_moves_urllib_robotparser._moved_attributes = ( + _urllib_robotparser_moved_attributes +) -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") +_importer._add_module( + Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") @@ -477,10 +505,12 @@ class Module_six_moves_urllib(types.ModuleType): robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] + return ["parse", "error", "request", "response", "robotparser"] + -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") +_importer._add_module( + Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib" +) def add_move(move): @@ -520,19 +550,24 @@ else: try: advance_iterator = next except NameError: + def advance_iterator(it): return it.next() + + next = advance_iterator try: callable = callable except NameError: + def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: + def get_unbound_function(unbound): return unbound @@ -543,6 +578,7 @@ if PY3: Iterator = object else: + def get_unbound_function(unbound): return unbound.im_func @@ -553,13 +589,13 @@ else: return types.MethodType(func, None, cls) class Iterator(object): - def next(self): return type(self).__next__(self) callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") +_add_doc( + get_unbound_function, """Get the function out of a possibly unbound function""" +) get_method_function = operator.attrgetter(_meth_func) @@ -571,6 +607,7 @@ get_function_globals = operator.attrgetter(_func_globals) if PY3: + def iterkeys(d, **kw): return iter(d.keys(**kw)) @@ -589,6 +626,7 @@ if PY3: viewitems = operator.methodcaller("items") else: + def iterkeys(d, **kw): return d.iterkeys(**kw) @@ -609,28 +647,33 @@ else: _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc( + iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary." +) if PY3: + def b(s): return s.encode("latin-1") def u(s): return s + unichr = chr import struct + int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io + StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" @@ -639,12 +682,15 @@ if PY3: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: + def b(s): return s + # Workaround for standalone backslash def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + unichr = unichr int2byte = chr @@ -653,8 +699,10 @@ else: def indexbytes(buf, i): return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) import StringIO + StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" @@ -679,13 +727,19 @@ if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + else: + def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: @@ -698,28 +752,45 @@ else: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") + exec_( + """def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""" + ) if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") + exec_( + """def raise_from(value, from_value): + try: + if from_value is None: + raise value + raise value from from_value + finally: + value = None +""" + ) elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") + exec_( + """def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""" + ) else: + def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: + def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) @@ -730,14 +801,17 @@ if print_ is None: if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): + if ( + isinstance(fp, file) + and isinstance(data, unicode) + and fp.encoding is not None + ): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) + want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: @@ -773,6 +847,8 @@ if print_ is None: write(sep) write(arg) write(end) + + if sys.version_info[:2] < (3, 3): _print = print_ @@ -783,16 +859,24 @@ if sys.version_info[:2] < (3, 3): if flush and fp is not None: fp.flush() + _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): + + def wraps( + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f + return wrapper + + else: wraps = functools.wraps @@ -802,29 +886,95 @@ def with_metaclass(meta, *bases): # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): - + class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') + slots = orig_vars.get("__slots__") if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper +def ensure_binary(s, encoding="utf-8", errors="strict"): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. @@ -834,12 +984,13 @@ def python_2_unicode_compatible(klass): returning text and apply this decorator to the class. """ if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) + if "__str__" not in klass.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % klass.__name__ + ) klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") return klass @@ -859,8 +1010,10 @@ if sys.meta_path: # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): + if ( + type(importer).__name__ == "_SixMetaPathImporter" + and importer.name == __name__ + ): del sys.meta_path[i] break del i, importer diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py index d6594eb2..75b6bb1c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py @@ -16,4 +16,4 @@ except ImportError: from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. -__all__ = ('CertificateError', 'match_hostname') +__all__ = ("CertificateError", "match_hostname") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 970cf653..5831c2e0 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -15,7 +15,7 @@ try: except ImportError: ipaddress = None -__version__ = '3.5.0.1' +__version__ = "3.5.0.1" class CertificateError(ValueError): @@ -33,18 +33,19 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # Ported from python3-syntax: # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') + parts = dn.split(r".") leftmost = parts[0] remainder = parts[1:] - wildcards = leftmost.count('*') + wildcards = leftmost.count("*") if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) + "too many wildcards in certificate DNS name: " + repr(dn) + ) # speed up common case w/o wildcards if not wildcards: @@ -53,11 +54,11 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': + if leftmost == "*": # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or @@ -65,21 +66,22 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) return pat.match(hostname) def _to_unicode(obj): if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding='ascii', errors='strict') + obj = unicode(obj, encoding="ascii", errors="strict") return obj + def _ipaddress_match(ipname, host_ip): """Exact matching of IP addresses. @@ -101,9 +103,11 @@ def match_hostname(cert, hostname): returns nothing. """ if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) @@ -122,35 +126,35 @@ def match_hostname(cert, hostname): else: raise dnsnames = [] - san = cert.get('subjectAltName', ()) + san = cert.get("subjectAltName", ()) for key, value in san: - if key == 'DNS': + if key == "DNS": if host_ip is None and _dnsname_match(value, hostname): return dnsnames.append(value) - elif key == 'IP Address': + elif key == "IP Address": if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) if not dnsnames: # The subject is only checked when there is no dNSName entry # in subjectAltName - for sub in cert.get('subject', ()): + for sub in cert.get("subject", ()): for key, value in sub: # XXX according to RFC 2818, the most specific Common Name # must be used. - if key == 'commonName': + if key == "commonName": if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") + raise CertificateError( + "no appropriate commonName or subjectAltName fields were found" + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py index fe5491cf..e2bd3bd8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/poolmanager.py @@ -2,57 +2,73 @@ from __future__ import absolute_import import collections import functools import logging +import warnings from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + InvalidProxyConfigurationWarning, +) +from .packages import six from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry -__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] log = logging.getLogger(__name__) -SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context') +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ssl_version", + "ca_cert_dir", + "ssl_context", + "key_password", +) # All known keyword arguments that could be provided to the pool manager, its # pools, or the underlying connections. This is used to construct a pool key. _key_fields = ( - 'key_scheme', # str - 'key_host', # str - 'key_port', # int - 'key_timeout', # int or float or Timeout - 'key_retries', # int or Retry - 'key_strict', # bool - 'key_block', # bool - 'key_source_address', # str - 'key_key_file', # str - 'key_cert_file', # str - 'key_cert_reqs', # str - 'key_ca_certs', # str - 'key_ssl_version', # str - 'key_ca_cert_dir', # str - 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext - 'key_maxsize', # int - 'key_headers', # dict - 'key__proxy', # parsed proxy url - 'key__proxy_headers', # dict - 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples - 'key__socks_options', # dict - 'key_assert_hostname', # bool or string - 'key_assert_fingerprint', # str - 'key_server_hostname', #str + "key_scheme", # str + "key_host", # str + "key_port", # int + "key_timeout", # int or float or Timeout + "key_retries", # int or Retry + "key_strict", # bool + "key_block", # bool + "key_source_address", # str + "key_key_file", # str + "key_key_password", # str + "key_cert_file", # str + "key_cert_reqs", # str + "key_ca_certs", # str + "key_ssl_version", # str + "key_ca_cert_dir", # str + "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + "key_maxsize", # int + "key_headers", # dict + "key__proxy", # parsed proxy url + "key__proxy_headers", # dict + "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples + "key__socks_options", # dict + "key_assert_hostname", # bool or string + "key_assert_fingerprint", # str + "key_server_hostname", # str ) #: The namedtuple class used to construct keys for the connection pool. #: All custom key schemes should include the fields in this key at a minimum. -PoolKey = collections.namedtuple('PoolKey', _key_fields) +PoolKey = collections.namedtuple("PoolKey", _key_fields) def _default_key_normalizer(key_class, request_context): @@ -77,24 +93,24 @@ def _default_key_normalizer(key_class, request_context): """ # Since we mutate the dictionary, make a copy first context = request_context.copy() - context['scheme'] = context['scheme'].lower() - context['host'] = context['host'].lower() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() # These are both dictionaries and need to be transformed into frozensets - for key in ('headers', '_proxy_headers', '_socks_options'): + for key in ("headers", "_proxy_headers", "_socks_options"): if key in context and context[key] is not None: context[key] = frozenset(context[key].items()) # The socket_options key may be a list and needs to be transformed into a # tuple. - socket_opts = context.get('socket_options') + socket_opts = context.get("socket_options") if socket_opts is not None: - context['socket_options'] = tuple(socket_opts) + context["socket_options"] = tuple(socket_opts) # Map the kwargs to the names in the namedtuple - this is necessary since # namedtuples can't have fields starting with '_'. for key in list(context.keys()): - context['key_' + key] = context.pop(key) + context["key_" + key] = context.pop(key) # Default to ``None`` for keys missing from the context for field in key_class._fields: @@ -109,14 +125,11 @@ def _default_key_normalizer(key_class, request_context): #: Each PoolManager makes a copy of this dictionary so they can be configured #: globally here, or individually on the instance. key_fn_by_scheme = { - 'http': functools.partial(_default_key_normalizer, PoolKey), - 'https': functools.partial(_default_key_normalizer, PoolKey), + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), } -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} class PoolManager(RequestMethods): @@ -152,8 +165,7 @@ class PoolManager(RequestMethods): def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, - dispose_func=lambda p: p.close()) + self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) # Locally set the pool classes and keys so other PoolManagers can # override them. @@ -186,10 +198,10 @@ class PoolManager(RequestMethods): # this function has historically only used the scheme, host, and port # in the positional args. When an API change is acceptable these can # be removed. - for key in ('scheme', 'host', 'port'): + for key in ("scheme", "host", "port"): request_context.pop(key, None) - if scheme == 'http': + if scheme == "http": for kw in SSL_KEYWORDS: request_context.pop(kw, None) @@ -204,7 +216,7 @@ class PoolManager(RequestMethods): """ self.pools.clear() - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. @@ -219,11 +231,11 @@ class PoolManager(RequestMethods): raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) - request_context['scheme'] = scheme or 'http' + request_context["scheme"] = scheme or "http" if not port: - port = port_by_scheme.get(request_context['scheme'].lower(), 80) - request_context['port'] = port - request_context['host'] = host + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host return self.connection_from_context(request_context) @@ -234,7 +246,7 @@ class PoolManager(RequestMethods): ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ - scheme = request_context['scheme'].lower() + scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme[scheme] pool_key = pool_key_constructor(request_context) @@ -256,9 +268,9 @@ class PoolManager(RequestMethods): return pool # Make a fresh ConnectionPool of the desired type - scheme = request_context['scheme'] - host = request_context['host'] - port = request_context['port'] + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool @@ -276,8 +288,9 @@ class PoolManager(RequestMethods): not used. """ u = parse_url(url) - return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, - pool_kwargs=pool_kwargs) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) def _merge_pool_kwargs(self, override): """ @@ -311,11 +324,11 @@ class PoolManager(RequestMethods): u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) - kw['assert_same_host'] = False - kw['redirect'] = False + kw["assert_same_host"] = False + kw["redirect"] = False - if 'headers' not in kw: - kw['headers'] = self.headers.copy() + if "headers" not in kw: + kw["headers"] = self.headers.copy() if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) @@ -331,31 +344,37 @@ class PoolManager(RequestMethods): # RFC 7231, Section 6.4.4 if response.status == 303: - method = 'GET' + method = "GET" - retries = kw.get('retries') + retries = kw.get("retries") if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) # Strip headers marked as unsafe to forward to the redirected location. # Check remove_headers_on_redirect to avoid a potential network call within # conn.is_same_host() which may use socket.gethostbyname() in the future. - if (retries.remove_headers_on_redirect - and not conn.is_same_host(redirect_location)): - for header in retries.remove_headers_on_redirect: - kw['headers'].pop(header, None) + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + headers = list(six.iterkeys(kw["headers"])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw["headers"].pop(header, None) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: + response.drain_conn() raise return response - kw['retries'] = retries - kw['redirect'] = redirect + kw["retries"] = retries + kw["redirect"] = redirect log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() return self.urlopen(method, redirect_location, **kw) @@ -386,12 +405,21 @@ class ProxyManager(PoolManager): """ - def __init__(self, proxy_url, num_pools=10, headers=None, - proxy_headers=None, **connection_pool_kw): + def __init__( + self, + proxy_url, + num_pools=10, + headers=None, + proxy_headers=None, + **connection_pool_kw + ): if isinstance(proxy_url, HTTPConnectionPool): - proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, - proxy_url.port) + proxy_url = "%s://%s:%i" % ( + proxy_url.scheme, + proxy_url.host, + proxy_url.port, + ) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) @@ -403,45 +431,59 @@ class ProxyManager(PoolManager): self.proxy = proxy self.proxy_headers = proxy_headers or {} - connection_pool_kw['_proxy'] = self.proxy - connection_pool_kw['_proxy_headers'] = self.proxy_headers + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers - super(ProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw) + super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): if scheme == "https": return super(ProxyManager, self).connection_from_host( - host, port, scheme, pool_kwargs=pool_kwargs) + host, port, scheme, pool_kwargs=pool_kwargs + ) return super(ProxyManager, self).connection_from_host( - self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs + ) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ - headers_ = {'Accept': '*/*'} + headers_ = {"Accept": "*/*"} netloc = parse_url(url).netloc if netloc: - headers_['Host'] = netloc + headers_["Host"] = netloc if headers: headers_.update(headers) return headers_ + def _validate_proxy_scheme_url_selection(self, url_scheme): + if url_scheme == "https" and self.proxy.scheme == "https": + warnings.warn( + "Your proxy configuration specified an HTTPS scheme for the proxy. " + "Are you sure you want to use HTTPS to contact the proxy? " + "This most likely indicates an error in your configuration. " + "Read this issue for more info: " + "https://github.com/urllib3/urllib3/issues/1850", + InvalidProxyConfigurationWarning, + stacklevel=3, + ) + def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. - headers = kw.get('headers', self.headers) - kw['headers'] = self._set_proxy_headers(url, headers) + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/request.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/request.py index 8f2f44bb..55f160bb 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/request.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/request.py @@ -4,7 +4,7 @@ from .filepost import encode_multipart_formdata from .packages.six.moves.urllib.parse import urlencode -__all__ = ['RequestMethods'] +__all__ = ["RequestMethods"] class RequestMethods(object): @@ -36,16 +36,25 @@ class RequestMethods(object): explicitly. """ - _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} def __init__(self, headers=None): self.headers = headers or {} - def urlopen(self, method, url, body=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **kw): # Abstract - raise NotImplementedError("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") + def urlopen( + self, + method, + url, + body=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **kw + ): # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) def request(self, method, url, fields=None, headers=None, **urlopen_kw): """ @@ -60,19 +69,18 @@ class RequestMethods(object): """ method = method.upper() - urlopen_kw['request_url'] = url + urlopen_kw["request_url"] = url if method in self._encode_url_methods: - return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) + return self.request_encode_url( + method, url, fields=fields, headers=headers, **urlopen_kw + ) else: - return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) - def request_encode_url(self, method, url, fields=None, headers=None, - **urlopen_kw): + def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. @@ -80,17 +88,24 @@ class RequestMethods(object): if headers is None: headers = self.headers - extra_kw = {'headers': headers} + extra_kw = {"headers": headers} extra_kw.update(urlopen_kw) if fields: - url += '?' + urlencode(fields) + url += "?" + urlencode(fields) return self.urlopen(method, url, **extra_kw) - def request_encode_body(self, method, url, fields=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **urlopen_kw): + def request_encode_body( + self, + method, + url, + fields=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **urlopen_kw + ): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. @@ -129,22 +144,28 @@ class RequestMethods(object): if headers is None: headers = self.headers - extra_kw = {'headers': {}} + extra_kw = {"headers": {}} if fields: - if 'body' in urlopen_kw: + if "body" in urlopen_kw: raise TypeError( - "request got values for both 'fields' and 'body', can only specify one.") + "request got values for both 'fields' and 'body', can only specify one." + ) if encode_multipart: - body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) else: - body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' + body, content_type = ( + urlencode(fields), + "application/x-www-form-urlencoded", + ) - extra_kw['body'] = body - extra_kw['headers'] = {'Content-Type': content_type} + extra_kw["body"] = body + extra_kw["headers"] = {"Content-Type": content_type} - extra_kw['headers'].update(headers) + extra_kw["headers"].update(headers) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/response.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/response.py index c112690b..7dc9b93c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/response.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/response.py @@ -6,10 +6,21 @@ import logging from socket import timeout as SocketTimeout from socket import error as SocketError +try: + import brotli +except ImportError: + brotli = None + from ._collections import HTTPHeaderDict from .exceptions import ( - BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, - ResponseNotChunked, IncompleteRead, InvalidHeader + BodyNotHttplibCompatible, + ProtocolError, + DecodeError, + ReadTimeoutError, + ResponseNotChunked, + IncompleteRead, + InvalidHeader, + HTTPError, ) from .packages.six import string_types as basestring, PY3 from .packages.six.moves import http_client as httplib @@ -20,10 +31,9 @@ log = logging.getLogger(__name__) class DeflateDecoder(object): - def __init__(self): self._first_try = True - self._data = b'' + self._data = b"" self._obj = zlib.decompressobj() def __getattr__(self, name): @@ -60,7 +70,6 @@ class GzipDecoderState(object): class GzipDecoder(object): - def __init__(self): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) self._state = GzipDecoderState.FIRST_MEMBER @@ -90,6 +99,26 @@ class GzipDecoder(object): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) +if brotli is not None: + + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + + def decompress(self, data): + if hasattr(self._obj, "decompress"): + return self._obj.decompress(data) + return self._obj.process(data) + + def flush(self): + if hasattr(self._obj, "flush"): + return self._obj.flush() + return b"" + + class MultiDecoder(object): """ From RFC7231: @@ -100,7 +129,7 @@ class MultiDecoder(object): """ def __init__(self, modes): - self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] def flush(self): return self._decoders[0].flush() @@ -112,12 +141,15 @@ class MultiDecoder(object): def _get_decoder(mode): - if ',' in mode: + if "," in mode: return MultiDecoder(mode) - if mode == 'gzip': + if mode == "gzip": return GzipDecoder() + if brotli is not None and mode == "br": + return BrotliDecoder() + return DeflateDecoder() @@ -154,14 +186,31 @@ class HTTPResponse(io.IOBase): value of Content-Length header, if present. Otherwise, raise error. """ - CONTENT_DECODERS = ['gzip', 'deflate'] + CONTENT_DECODERS = ["gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] REDIRECT_STATUSES = [301, 302, 303, 307, 308] - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, msg=None, - retries=None, enforce_content_length=False, - request_method=None, request_url=None): + def __init__( + self, + body="", + headers=None, + status=0, + version=0, + reason=None, + strict=0, + preload_content=True, + decode_content=True, + original_response=None, + pool=None, + connection=None, + msg=None, + retries=None, + enforce_content_length=False, + request_method=None, + request_url=None, + auto_close=True, + ): if isinstance(headers, HTTPHeaderDict): self.headers = headers @@ -174,6 +223,7 @@ class HTTPResponse(io.IOBase): self.decode_content = decode_content self.retries = retries self.enforce_content_length = enforce_content_length + self.auto_close = auto_close self._decoder = None self._body = None @@ -189,13 +239,13 @@ class HTTPResponse(io.IOBase): self._pool = pool self._connection = connection - if hasattr(body, 'read'): + if hasattr(body, "read"): self._fp = body # Are we using the chunked-style of transfer encoding? self.chunked = False self.chunk_left = None - tr_enc = self.headers.get('transfer-encoding', '').lower() + tr_enc = self.headers.get("transfer-encoding", "").lower() # Don't incur the penalty of creating a list and then discarding it encodings = (enc.strip() for enc in tr_enc.split(",")) if "chunked" in encodings: @@ -217,7 +267,7 @@ class HTTPResponse(io.IOBase): location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') + return self.headers.get("location") return False @@ -228,6 +278,17 @@ class HTTPResponse(io.IOBase): self._pool._put_conn(self._connection) self._connection = None + def drain_conn(self): + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, SocketError, BaseSSLError, HTTPException): + pass + @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. @@ -256,18 +317,20 @@ class HTTPResponse(io.IOBase): """ Set initial length value for Response content if available. """ - length = self.headers.get('content-length') + length = self.headers.get("content-length") if length is not None: if self.chunked: # This Response will fail with an IncompleteRead if it can't be # received as chunked. This method falls back to attempt reading # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) return None try: @@ -276,10 +339,12 @@ class HTTPResponse(io.IOBase): # (e.g. Content-Length: 42, 42). This line ensures the values # are all valid ints and that as long as the `set` length is 1, # all values are the same. Otherwise, the header is invalid. - lengths = set([int(val) for val in length.split(',')]) + lengths = set([int(val) for val in length.split(",")]) if len(lengths) > 1: - raise InvalidHeader("Content-Length contained multiple " - "unmatching values (%s)" % length) + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % length + ) length = lengths.pop() except ValueError: length = None @@ -295,7 +360,7 @@ class HTTPResponse(io.IOBase): status = 0 # Check for responses that shouldn't include a body - if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": length = 0 return length @@ -306,29 +371,41 @@ class HTTPResponse(io.IOBase): """ # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() + content_encoding = self.headers.get("content-encoding", "").lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) - elif ',' in content_encoding: - encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] if len(encodings): self._decoder = _get_decoder(content_encoding) + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + def _decode(self, data, decode_content, flush_decoder): """ Decode the data passed in and potentially flush the decoder. """ + if not decode_content: + return data + try: - if decode_content and self._decoder: + if self._decoder: data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - content_encoding = self.headers.get('content-encoding', '').lower() + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() raise DecodeError( "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content: + "failed to decode it." % content_encoding, + e, + ) + if flush_decoder: data += self._flush_decoder() return data @@ -339,10 +416,10 @@ class HTTPResponse(io.IOBase): being used. """ if self._decoder: - buf = self._decoder.decompress(b'') + buf = self._decoder.decompress(b"") return buf + self._decoder.flush() - return b'' + return b"" @contextmanager def _error_catcher(self): @@ -362,20 +439,20 @@ class HTTPResponse(io.IOBase): except SocketTimeout: # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') + raise ReadTimeoutError(self._pool, None, "Read timed out.") except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: + if "read operation timed out" not in str(e): # Defensive: # This shouldn't happen but just in case we're missing an edge # case, let's avoid swallowing SSL errors. raise - raise ReadTimeoutError(self._pool, None, 'Read timed out.') + raise ReadTimeoutError(self._pool, None, "Read timed out.") except (HTTPException, SocketError) as e: # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) + raise ProtocolError("Connection broken: %r" % e, e) # If no exception is thrown, we should avoid cleaning up # unnecessarily. @@ -430,17 +507,19 @@ class HTTPResponse(io.IOBase): return flush_decoder = False - data = None + fp_closed = getattr(self._fp, "closed", False) with self._error_catcher(): if amt is None: # cStringIO doesn't like amt=None - data = self._fp.read() + data = self._fp.read() if not fp_closed else b"" flush_decoder = True else: cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + data = self._fp.read(amt) if not fp_closed else b"" + if ( + amt != 0 and not data + ): # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ @@ -450,7 +529,10 @@ class HTTPResponse(io.IOBase): # no harm in redundantly calling close. self._fp.close() flush_decoder = True - if self.enforce_content_length and self.length_remaining not in (0, None): + if self.enforce_content_length and self.length_remaining not in ( + 0, + None, + ): # This is an edge case that httplib failed to cover due # to concerns of backward compatibility. We're # addressing it here to make sure IncompleteRead is @@ -470,7 +552,7 @@ class HTTPResponse(io.IOBase): return data - def stream(self, amt=2**16, decode_content=None): + def stream(self, amt=2 ** 16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the @@ -508,21 +590,24 @@ class HTTPResponse(io.IOBase): headers = r.msg if not isinstance(headers, HTTPHeaderDict): - if PY3: # Python 3 + if PY3: headers = HTTPHeaderDict(headers.items()) - else: # Python 2 + else: + # Python 2.7 headers = HTTPHeaderDict.from_httplib(headers) # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - resp = ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) + strict = getattr(r, "strict", 0) + resp = ResponseCls( + body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw + ) return resp # Backwards-compatibility methods for httplib.HTTPResponse @@ -544,13 +629,18 @@ class HTTPResponse(io.IOBase): if self._connection: self._connection.close() + if not self.auto_close: + io.IOBase.close(self) + @property def closed(self): - if self._fp is None: + if not self.auto_close: + return io.IOBase.closed.__get__(self) + elif self._fp is None: return True - elif hasattr(self._fp, 'isclosed'): + elif hasattr(self._fp, "isclosed"): return self._fp.isclosed() - elif hasattr(self._fp, 'closed'): + elif hasattr(self._fp, "closed"): return self._fp.closed else: return True @@ -561,11 +651,17 @@ class HTTPResponse(io.IOBase): elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") + raise IOError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): return self._fp.flush() def readable(self): @@ -578,7 +674,7 @@ class HTTPResponse(io.IOBase): if len(temp) == 0: return 0 else: - b[:len(temp)] = temp + b[: len(temp)] = temp return len(temp) def supports_chunked_reads(self): @@ -588,7 +684,7 @@ class HTTPResponse(io.IOBase): attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ - return hasattr(self._fp, 'fp') + return hasattr(self._fp, "fp") def _update_chunk_length(self): # First, we'll figure out length of a chunk and then @@ -596,7 +692,7 @@ class HTTPResponse(io.IOBase): if self.chunk_left is not None: return line = self._fp.fp.readline() - line = line.split(b';', 1)[0] + line = line.split(b";", 1)[0] try: self.chunk_left = int(line, 16) except ValueError: @@ -645,11 +741,13 @@ class HTTPResponse(io.IOBase): if not self.chunked: raise ResponseNotChunked( "Response is not chunked. " - "Header 'transfer-encoding: chunked' is missing.") + "Header 'transfer-encoding: chunked' is missing." + ) if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( "Body should be httplib.HTTPResponse like. " - "It should have have an fp attribute which returns raw chunks.") + "It should have have an fp attribute which returns raw chunks." + ) with self._error_catcher(): # Don't bother reading the body of a HEAD request. @@ -667,8 +765,9 @@ class HTTPResponse(io.IOBase): if self.chunk_left == 0: break chunk = self._handle_chunk(amt) - decoded = self._decode(chunk, decode_content=decode_content, - flush_decoder=False) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) if decoded: yield decoded @@ -686,7 +785,7 @@ class HTTPResponse(io.IOBase): if not line: # Some sites may not end with '\r\n'. break - if line == b'\r\n': + if line == b"\r\n": break # We read everything; close the "file". @@ -703,3 +802,20 @@ class HTTPResponse(io.IOBase): return self.retries.history[-1].redirect_location else: return self._request_url + + def __iter__(self): + buffer = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py index 2f2770b6..a96c73a9 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/__init__.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + # For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped from .request import make_headers @@ -12,43 +13,34 @@ from .ssl_ import ( resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, + PROTOCOL_TLS, ) -from .timeout import ( - current_time, - Timeout, -) +from .timeout import current_time, Timeout from .retry import Retry -from .url import ( - get_host, - parse_url, - split_first, - Url, -) -from .wait import ( - wait_for_read, - wait_for_write -) +from .url import get_host, parse_url, split_first, Url +from .wait import wait_for_read, wait_for_write __all__ = ( - 'HAS_SNI', - 'IS_PYOPENSSL', - 'IS_SECURETRANSPORT', - 'SSLContext', - 'Retry', - 'Timeout', - 'Url', - 'assert_fingerprint', - 'current_time', - 'is_connection_dropped', - 'is_fp_closed', - 'get_host', - 'parse_url', - 'make_headers', - 'resolve_cert_reqs', - 'resolve_ssl_version', - 'split_first', - 'ssl_wrap_socket', - 'wait_for_read', - 'wait_for_write' + "HAS_SNI", + "IS_PYOPENSSL", + "IS_SECURETRANSPORT", + "SSLContext", + "PROTOCOL_TLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "current_time", + "is_connection_dropped", + "is_fp_closed", + "get_host", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "split_first", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/connection.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/connection.py index 5ad70b2f..86f0a3b0 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/connection.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/connection.py @@ -14,7 +14,7 @@ def is_connection_dropped(conn): # Platform-specific Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. """ - sock = getattr(conn, 'sock', False) + sock = getattr(conn, "sock", False) if sock is False: # Platform-specific: AppEngine return False if sock is None: # Connection already closed (such as by httplib). @@ -30,8 +30,12 @@ def is_connection_dropped(conn): # Platform-specific # library test suite. Added to its signature is only `socket_options`. # One additional modification is that we avoid binding to IPv6 servers # discovered in DNS if the system doesn't have IPv6 functionality. -def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, socket_options=None): +def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, + socket_options=None, +): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, @@ -45,8 +49,8 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, """ host, port = address - if host.startswith('['): - host = host.strip('[]') + if host.startswith("["): + host = host.strip("[]") err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets @@ -117,7 +121,7 @@ def _has_ipv6(host): # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 + # https://github.com/urllib3/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) @@ -131,4 +135,4 @@ def _has_ipv6(host): return has_ipv6 -HAS_IPV6 = _has_ipv6('::1') +HAS_IPV6 = _has_ipv6("::1") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/request.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/request.py index 3ddfcd55..3b7bb54d 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/request.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/request.py @@ -4,12 +4,25 @@ from base64 import b64encode from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError -ACCEPT_ENCODING = 'gzip,deflate' +ACCEPT_ENCODING = "gzip,deflate" +try: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + _FAILEDTELL = object() -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None, disable_cache=None): +def make_headers( + keep_alive=None, + accept_encoding=None, + user_agent=None, + basic_auth=None, + proxy_basic_auth=None, + disable_cache=None, +): """ Shortcuts for generating request headers. @@ -49,27 +62,27 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) + accept_encoding = ",".join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING - headers['accept-encoding'] = accept_encoding + headers["accept-encoding"] = accept_encoding if user_agent: - headers['user-agent'] = user_agent + headers["user-agent"] = user_agent if keep_alive: - headers['connection'] = 'keep-alive' + headers["connection"] = "keep-alive" if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(b(basic_auth)).decode('utf-8') + headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(b(proxy_basic_auth)).decode('utf-8') + headers["proxy-authorization"] = "Basic " + b64encode( + b(proxy_basic_auth) + ).decode("utf-8") if disable_cache: - headers['cache-control'] = 'no-cache' + headers["cache-control"] = "no-cache" return headers @@ -81,7 +94,7 @@ def set_file_position(body, pos): """ if pos is not None: rewind_body(body, pos) - elif getattr(body, 'tell', None) is not None: + elif getattr(body, "tell", None) is not None: try: pos = body.tell() except (IOError, OSError): @@ -103,16 +116,20 @@ def rewind_body(body, body_pos): :param int pos: Position to seek to in file. """ - body_seek = getattr(body, 'seek', None) + body_seek = getattr(body, "seek", None) if body_seek is not None and isinstance(body_pos, integer_types): try: body_seek(body_pos) except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect/retry.") + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) elif body_pos is _FAILEDTELL: - raise UnrewindableBodyError("Unable to record file position for rewinding " - "request body during a redirect/retry.") + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) else: - raise ValueError("body_pos must be of type integer, " - "instead it was %s." % type(body_pos)) + raise ValueError( + "body_pos must be of type integer, instead it was %s." % type(body_pos) + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/response.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/response.py index 3d548648..715868dd 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/response.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/response.py @@ -52,11 +52,10 @@ def assert_header_parsing(headers): # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): - raise TypeError('expected httplib.Message, got {0}.'.format( - type(headers))) + raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) - defects = getattr(headers, 'defects', None) - get_payload = getattr(headers, 'get_payload', None) + defects = getattr(headers, "defects", None) + get_payload = getattr(headers, "get_payload", None) unparsed_data = None if get_payload: @@ -84,4 +83,4 @@ def is_response_to_head(response): method = response._method if isinstance(method, int): # Platform-specific: Appengine return method == 3 - return method.upper() == 'HEAD' + return method.upper() == "HEAD" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/retry.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/retry.py index e7d0abd6..ee30c91b 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/retry.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/retry.py @@ -13,6 +13,7 @@ from ..exceptions import ( ReadTimeoutError, ResponseError, InvalidHeader, + ProxyError, ) from ..packages import six @@ -21,8 +22,9 @@ log = logging.getLogger(__name__) # Data structure for representing the metadata of requests that result in a retry. -RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", - "status", "redirect_location"]) +RequestHistory = namedtuple( + "RequestHistory", ["method", "url", "error", "status", "redirect_location"] +) class Retry(object): @@ -146,21 +148,33 @@ class Retry(object): request. """ - DEFAULT_METHOD_WHITELIST = frozenset([ - 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + DEFAULT_METHOD_WHITELIST = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"]) #: Maximum backoff time. BACKOFF_MAX = 120 - def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): + def __init__( + self, + total=10, + connect=None, + read=None, + redirect=None, + status=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, + status_forcelist=None, + backoff_factor=0, + raise_on_redirect=True, + raise_on_status=True, + history=None, + respect_retry_after_header=True, + remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST, + ): self.total = total self.connect = connect @@ -179,19 +193,25 @@ class Retry(object): self.raise_on_status = raise_on_status self.history = history or tuple() self.respect_retry_after_header = respect_retry_after_header - self.remove_headers_on_redirect = remove_headers_on_redirect + self.remove_headers_on_redirect = frozenset( + [h.lower() for h in remove_headers_on_redirect] + ) def new(self, **kw): params = dict( total=self.total, - connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, method_whitelist=self.method_whitelist, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, - remove_headers_on_redirect=self.remove_headers_on_redirect + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, ) params.update(kw) return type(self)(**params) @@ -216,8 +236,11 @@ class Retry(object): :rtype: float """ # We want to consider only the last consecutive errors sequence (Ignore redirects). - consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, - reversed(self.history)))) + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) if consecutive_errors_len <= 1: return 0 @@ -273,7 +296,7 @@ class Retry(object): this method will return immediately. """ - if response: + if self.respect_retry_after_header and response: slept = self.sleep_for_retry(response) if slept: return @@ -284,6 +307,8 @@ class Retry(object): """ Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ + if isinstance(err, ProxyError): + err = err.original_error return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err): @@ -314,8 +339,12 @@ class Retry(object): if self.status_forcelist and status_code in self.status_forcelist: return True - return (self.total and self.respect_retry_after_header and - has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) + return ( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) def is_exhausted(self): """ Are we out of retries? """ @@ -326,8 +355,15 @@ class Retry(object): return min(retry_counts) < 0 - def increment(self, method=None, url=None, response=None, error=None, - _pool=None, _stacktrace=None): + def increment( + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, + ): """ Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not @@ -350,7 +386,7 @@ class Retry(object): read = self.read redirect = self.redirect status_count = self.status - cause = 'unknown' + cause = "unknown" status = None redirect_location = None @@ -372,7 +408,7 @@ class Retry(object): # Redirect retry? if redirect is not None: redirect -= 1 - cause = 'too many redirects' + cause = "too many redirects" redirect_location = response.get_redirect_location() status = response.status @@ -383,16 +419,21 @@ class Retry(object): if response and response.status: if status_count is not None: status_count -= 1 - cause = ResponseError.SPECIFIC_ERROR.format( - status_code=response.status) + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) status = response.status - history = self.history + (RequestHistory(method, url, error, status, redirect_location),) + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) new_retry = self.new( total=total, - connect=connect, read=read, redirect=redirect, status=status_count, - history=history) + connect=connect, + read=read, + redirect=redirect, + status=status_count, + history=history, + ) if new_retry.is_exhausted(): raise MaxRetryError(_pool, url, error or ResponseError(cause)) @@ -402,9 +443,10 @@ class Retry(object): return new_retry def __repr__(self): - return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' - 'read={self.read}, redirect={self.redirect}, status={self.status})').format( - cls=type(self), self=self) + return ( + "{cls.__name__}(total={self.total}, connect={self.connect}, " + "read={self.read}, redirect={self.redirect}, status={self.status})" + ).format(cls=type(self), self=self) # For backwards compatibility (equivalent to pre-v1.9): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py index dfc553ff..d3b463d4 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/ssl_.py @@ -2,11 +2,12 @@ from __future__ import absolute_import import errno import warnings import hmac -import socket +import sys from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 +from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning from ..packages import six @@ -17,11 +18,7 @@ IS_PYOPENSSL = False IS_SECURETRANSPORT = False # Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = { - 32: md5, - 40: sha1, - 64: sha256, -} +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} def _const_compare_digest_backport(a, b): @@ -37,17 +34,27 @@ def _const_compare_digest_backport(a, b): return result == 0 -_const_compare_digest = getattr(hmac, 'compare_digest', - _const_compare_digest_backport) - +_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport) try: # Test for SSL features import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import wrap_socket, CERT_REQUIRED from ssl import HAS_SNI # Has SNI? except ImportError: pass +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + try: from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION @@ -56,25 +63,6 @@ except ImportError: OP_NO_COMPRESSION = 0x20000 -# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in -# those cases. This means that we can only detect IPv4 addresses in this case. -if hasattr(socket, 'inet_pton'): - inet_pton = socket.inet_pton -else: - # Maybe we can use ipaddress if the user has urllib3[secure]? - try: - from pip._vendor import ipaddress - - def inet_pton(_, host): - if isinstance(host, bytes): - host = host.decode('ascii') - return ipaddress.ip_address(host) - - except ImportError: # Platform-specific: Non-Linux - def inet_pton(_, host): - return socket.inet_aton(host) - - # A secure default. # Sources for more information on TLS ciphers: # @@ -83,36 +71,37 @@ else: # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ # # The general intent is: -# - Prefer TLS 1.3 cipher suites # - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), # - prefer ECDHE over DHE for better performance, # - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and # security, # - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_CIPHERS = ':'.join([ - 'TLS13-AES-256-GCM-SHA384', - 'TLS13-CHACHA20-POLY1305-SHA256', - 'TLS13-AES-128-GCM-SHA256', - 'ECDH+AESGCM', - 'ECDH+CHACHA20', - 'DH+AESGCM', - 'DH+CHACHA20', - 'ECDH+AES256', - 'DH+AES256', - 'ECDH+AES128', - 'DH+AES', - 'RSA+AESGCM', - 'RSA+AES', - '!aNULL', - '!eNULL', - '!MD5', -]) +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) try: from ssl import SSLContext # Modern SSL? except ImportError: - import sys class SSLContext(object): # Platform-specific: Python 2 def __init__(self, protocol_version): @@ -130,32 +119,35 @@ except ImportError: self.certfile = certfile self.keyfile = keyfile - def load_verify_locations(self, cafile=None, capath=None): + def load_verify_locations(self, cafile=None, capath=None, cadata=None): self.ca_certs = cafile if capath is not None: raise SSLError("CA directories not supported in older Pythons") + if cadata is not None: + raise SSLError("CA data not supported in older Pythons") + def set_ciphers(self, cipher_suite): self.ciphers = cipher_suite def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( - 'A true SSLContext object is not available. This prevents ' - 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. You can upgrade to a newer ' - 'version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - InsecurePlatformWarning + "A true SSLContext object is not available. This prevents " + "urllib3 from configuring SSL appropriately and may cause " + "certain SSL connections to fail. You can upgrade to a newer " + "version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + InsecurePlatformWarning, ) kwargs = { - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'ca_certs': self.ca_certs, - 'cert_reqs': self.verify_mode, - 'ssl_version': self.protocol, - 'server_side': server_side, + "keyfile": self.keyfile, + "certfile": self.certfile, + "ca_certs": self.ca_certs, + "cert_reqs": self.verify_mode, + "ssl_version": self.protocol, + "server_side": server_side, } return wrap_socket(socket, ciphers=self.ciphers, **kwargs) @@ -170,12 +162,11 @@ def assert_fingerprint(cert, fingerprint): Fingerprint as string of hexdigits, can be interspersed by colons. """ - fingerprint = fingerprint.replace(':', '').lower() + fingerprint = fingerprint.replace(":", "").lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: - raise SSLError( - 'Fingerprint of invalid length: {0}'.format(fingerprint)) + raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) @@ -183,15 +174,18 @@ def assert_fingerprint(cert, fingerprint): cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(fingerprint, hexlify(cert_digest))) + raise SSLError( + 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( + fingerprint, hexlify(cert_digest) + ) + ) def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. + Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. @@ -199,12 +193,12 @@ def resolve_cert_reqs(candidate): constant which can directly be passed to wrap_socket. """ if candidate is None: - return CERT_NONE + return CERT_REQUIRED if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: - res = getattr(ssl, 'CERT_' + candidate) + res = getattr(ssl, "CERT_" + candidate) return res return candidate @@ -215,19 +209,20 @@ def resolve_ssl_version(candidate): like resolve_cert_reqs """ if candidate is None: - return PROTOCOL_SSLv23 + return PROTOCOL_TLS if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) + res = getattr(ssl, "PROTOCOL_" + candidate) return res return candidate -def create_urllib3_context(ssl_version=None, cert_reqs=None, - options=None, ciphers=None): +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): """All arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that @@ -261,7 +256,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, Constructed SSLContext object with specified options :rtype: SSLContext """ - context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + context = SSLContext(ssl_version or PROTOCOL_TLS) context.set_ciphers(ciphers or DEFAULT_CIPHERS) @@ -280,18 +275,41 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, context.options |= options + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( + context, "post_handshake_auth", None + ) is not None: + context.post_handshake_auth = True + context.verify_mode = cert_reqs - if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative # hostnames. So disable it here context.check_hostname = False return context -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None): +def ssl_wrap_socket( + sock, + keyfile=None, + certfile=None, + cert_reqs=None, + ca_certs=None, + server_hostname=None, + ssl_version=None, + ciphers=None, + ssl_context=None, + ca_cert_dir=None, + key_password=None, + ca_cert_data=None, +): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. @@ -307,18 +325,22 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() """ context = ssl_context if context is None: # Note: This branch of code and all the variables in it are no longer # used by urllib3 itself. We should consider deprecating and removing # this code. - context = create_urllib3_context(ssl_version, cert_reqs, - ciphers=ciphers) + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) - if ca_certs or ca_cert_dir: + if ca_certs or ca_cert_dir or ca_cert_data: try: - context.load_verify_locations(ca_certs, ca_cert_dir) + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) except IOError as e: # Platform-specific: Python 2.7 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError @@ -327,55 +349,66 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if e.errno == errno.ENOENT: raise SSLError(e) raise - elif getattr(context, 'load_default_certs', None) is not None: + + elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows (require Python3.4+) context.load_default_certs() + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + if certfile: - context.load_cert_chain(certfile, keyfile) + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 # We shouldn't warn the user if SNI isn't available but we would # not be using SNI anyways due to IP address for server_hostname. - if ((server_hostname is not None and not is_ipaddress(server_hostname)) - or IS_SECURETRANSPORT): + if ( + server_hostname is not None and not is_ipaddress(server_hostname) + ) or IS_SECURETRANSPORT: if HAS_SNI and server_hostname is not None: return context.wrap_socket(sock, server_hostname=server_hostname) warnings.warn( - 'An HTTPS request has been made, but the SNI (Server Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning + "An HTTPS request has been made, but the SNI (Server Name " + "Indication) extension to TLS is not available on this platform. " + "This may cause the server to present an incorrect TLS " + "certificate, which can cause validation failures. You can upgrade to " + "a newer version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#ssl-warnings", + SNIMissingWarning, ) return context.wrap_socket(sock) def is_ipaddress(hostname): - """Detects whether the hostname given is an IP address. + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ - if six.PY3 and isinstance(hostname, bytes): + if not six.PY2 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. - hostname = hostname.decode('ascii') + hostname = hostname.decode("ascii") + return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) - families = [socket.AF_INET] - if hasattr(socket, 'AF_INET6'): - families.append(socket.AF_INET6) - for af in families: - try: - inet_pton(af, hostname) - except (socket.error, ValueError, OSError): - pass - else: - return True +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, "r") as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py index cec817e6..b61fea75 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/timeout.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT @@ -45,19 +46,20 @@ class Timeout(object): :type total: integer, float, or None :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout. @@ -91,14 +93,21 @@ class Timeout(object): DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") self._start_connect = None - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) + def __repr__(self): + return "%s(connect=%r, read=%r, total=%r)" % ( + type(self).__name__, + self._connect, + self._read, + self.total, + ) + + # __str__ provided for backwards compatibility + __str__ = __repr__ @classmethod def _validate_timeout(cls, value, name): @@ -118,22 +127,31 @@ class Timeout(object): return value if isinstance(value, bool): - raise ValueError("Timeout cannot be a boolean value. It must " - "be an int, float or None.") + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) try: float(value) except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) try: if value <= 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than or equal to 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + # Python 3 + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) return value @@ -165,8 +183,7 @@ class Timeout(object): # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) + return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): """ Start the timeout clock, used during a connect() attempt @@ -182,14 +199,15 @@ class Timeout(object): def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. - :return: Elapsed time. + :return: Elapsed time in seconds. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) return current_time() - self._start_connect @property @@ -227,15 +245,16 @@ class Timeout(object): :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): + if ( + self.total is not None + and self.total is not self.DEFAULT_TIMEOUT + and self._read is not None + and self._read is not self.DEFAULT_TIMEOUT + ): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) + return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/url.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/url.py index 6b6f9968..0eb0b6a8 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/url.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/url.py @@ -1,34 +1,110 @@ from __future__ import absolute_import +import re from collections import namedtuple from ..exceptions import LocationParseError +from ..packages import six -url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] +url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] # We only want to normalize urls with an HTTP(S) scheme. # urllib3 infers URLs without a scheme (None) to be http. -NORMALIZABLE_SCHEMES = ('http', 'https', None) - - -class Url(namedtuple('Url', url_attrs)): +NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +IPV6_RE = re.compile("^" + IPV6_PAT + "$") +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") +BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") +ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") + +SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( + REG_NAME_PAT, + IPV4_PAT, + IPV6_ADDRZ_PAT, +) +SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL) + +UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +SUB_DELIM_CHARS = set("!$&'()*+,;=") +USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"} +PATH_CHARS = USERINFO_CHARS | {"@", "/"} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"} + + +class Url(namedtuple("Url", url_attrs)): """ - Datastructure for representing an HTTP URL. Used as a return value for + Data structure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. """ + __slots__ = () - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, - query=None, fragment=None): - if path and not path.startswith('/'): - path = '/' + path - if scheme: + def __new__( + cls, + scheme=None, + auth=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: scheme = scheme.lower() - if host and scheme in NORMALIZABLE_SCHEMES: - host = host.lower() - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, - query, fragment) + return super(Url, cls).__new__( + cls, scheme, auth, host, port, path, query, fragment + ) @property def hostname(self): @@ -38,10 +114,10 @@ class Url(namedtuple('Url', url_attrs)): @property def request_uri(self): """Absolute path including the query string.""" - uri = self.path or '/' + uri = self.path or "/" if self.query is not None: - uri += '?' + self.query + uri += "?" + self.query return uri @@ -49,7 +125,7 @@ class Url(namedtuple('Url', url_attrs)): def netloc(self): """Network location including host and port""" if self.port: - return '%s:%d' % (self.host, self.port) + return "%s:%d" % (self.host, self.port) return self.host @property @@ -72,23 +148,23 @@ class Url(namedtuple('Url', url_attrs)): 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self - url = '' + url = u"" # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: - url += scheme + '://' + url += scheme + u"://" if auth is not None: - url += auth + '@' + url += auth + u"@" if host is not None: url += host if port is not None: - url += ':' + str(port) + url += u":" + str(port) if path is not None: url += path if query is not None: - url += '?' + query + url += u"?" + query if fragment is not None: - url += '#' + fragment + url += u"#" + fragment return url @@ -98,6 +174,8 @@ class Url(namedtuple('Url', url_attrs)): def split_first(s, delims): """ + .. deprecated:: 1.25 + Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. @@ -124,15 +202,141 @@ def split_first(s, delims): min_delim = d if min_idx is None or min_idx < 0: - return s, '', None + return s, "", None + + return s[:min_idx], s[min_idx + 1 :], min_delim + + +def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = six.ensure_text(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode(encoding) - return s[:min_idx], s[min_idx + 1:], min_delim + +def _remove_path_dot_segments(path): + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def _normalize_host(host, scheme): + if host: + if isinstance(host, six.binary_type): + host = six.ensure_str(host) + + if scheme in NORMALIZABLE_SCHEMES: + is_ipv6 = IPV6_ADDRZ_RE.match(host) + if is_ipv6: + match = ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS) + return host[:start].lower() + zone_id + host[end:] + else: + return host.lower() + elif not IPV4_RE.match(host): + return six.ensure_str( + b".".join([_idna_encode(label) for label in host.split(".")]) + ) + return host + + +def _idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + from pip._vendor import idna + except ImportError: + six.raise_from( + LocationParseError("Unable to parse URL without the 'idna' module"), + None, + ) + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + six.raise_from( + LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None + ) + return name.lower().encode("ascii") + + +def _encode_target(target): + """Percent-encodes a request target so that there are no invalid characters""" + path, query = TARGET_RE.match(target).groups() + target = _encode_invalid_chars(path, PATH_CHARS) + query = _encode_invalid_chars(query, QUERY_CHARS) + if query is not None: + target += "?" + query + return target def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urlparse`. @@ -145,81 +349,77 @@ def parse_url(url): >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - if not url: # Empty return Url() - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. No whitespace, no plus or - # minus prefixes, no non-integer digits such as ^2 (superscript). - if not port.isdigit(): - raise LocationParseError(url) - try: - port = int(port) - except ValueError: - raise LocationParseError(url) + source_url = url + if not SCHEME_RE.search(url): + url = "//" + url + + try: + scheme, authority, path, query, fragment = URI_RE.match(url).groups() + normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, host, port = SUBAUTHORITY_RE.match(authority).groups() + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, USERINFO_CHARS) + if port == "": + port = None else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None + auth, host, port = None, None, None + + if port is not None: + port = int(port) + if not (0 <= port <= 65535): + raise LocationParseError(url) + + host = _normalize_host(host, scheme) - elif not host and url: - host = url + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) + except (ValueError, AttributeError): + return six.raise_from(LocationParseError(source_url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. if not path: - return Url(scheme, auth, host, port, path, query, fragment) + if query is not None or fragment is not None: + path = "" + else: + path = None - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + if isinstance(url, six.text_type): + ensure_func = six.ensure_text + else: + ensure_func = six.ensure_str - # Query - if '?' in path: - path, query = path.split('?', 1) + def ensure_type(x): + return x if x is None else ensure_func(x) - return Url(scheme, auth, host, port, path, query, fragment) + return Url( + scheme=ensure_type(scheme), + auth=ensure_type(auth), + host=ensure_type(host), + port=port, + path=ensure_type(path), + query=ensure_type(query), + fragment=ensure_type(fragment), + ) def get_host(url): @@ -227,4 +427,4 @@ def get_host(url): Deprecated. Use :func:`parse_url` instead. """ p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port + return p.scheme or "http", p.hostname, p.port diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/wait.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/wait.py index 4db71baf..d71d2fd7 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/wait.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/urllib3/util/wait.py @@ -2,6 +2,7 @@ import errno from functools import partial import select import sys + try: from time import monotonic except ImportError: @@ -40,6 +41,8 @@ if sys.version_info >= (3, 5): # Modern Python, that retries syscalls by default def _retry_on_intr(fn, timeout): return fn(timeout) + + else: # Old and broken Pythons. def _retry_on_intr(fn, timeout): diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/vendor.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/vendor.txt new file mode 100644 index 00000000..06fa1358 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pip/_vendor/vendor.txt @@ -0,0 +1,24 @@ +appdirs==1.4.4 +CacheControl==0.12.6 +colorama==0.4.3 +contextlib2==0.6.0.post1 +distlib==0.3.1 +distro==1.5.0 +html5lib==1.1 +ipaddress==1.0.23 # Only needed on 2.6 and 2.7 +msgpack==1.0.0 +packaging==20.4 +pep517==0.8.2 +progress==1.5 +pyparsing==2.4.7 +requests==2.24.0 + certifi==2020.06.20 + chardet==3.0.4 + idna==2.10 + urllib3==1.25.9 +resolvelib==0.4.0 +retrying==1.3.3 +setuptools==44.0.0 +six==1.15.0 +toml==0.10.1 +webencodings==0.5.1 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/INSTALLER similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/INSTALLER rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/INSTALLER diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA similarity index 99% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA index 21fbaa35..0afb33c5 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/METADATA +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: pyOpenRPA -Version: 1.1.14 +Version: 1.1.15 Summary: First open source RPA platform for business Home-page: https://gitlab.com/UnicodeLabs/OpenRPA Author: Ivan Maslov diff --git a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD similarity index 97% rename from Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD index 4fbba60d..275189b8 100644 --- a/Resources/WPy32-3720/python-3.7.2/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/RECORD +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/RECORD @@ -1,8 +1,9 @@ -pyOpenRPA-1.1.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyOpenRPA-1.1.14.dist-info/METADATA,sha256=aOG2bCjUIdJa6gP7Meoa873LoWkxQhlllqL4y4EU5L0,3352 -pyOpenRPA-1.1.14.dist-info/RECORD,, -pyOpenRPA-1.1.14.dist-info/WHEEL,sha256=qB97nP5e4MrOsXW5bIU5cUn_KSVr10EV0l-GCHG9qNs,97 -pyOpenRPA-1.1.14.dist-info/top_level.txt,sha256=RPzwQXgYBRo_m5L3ZLs6Voh8aEkMeT29Xsul1w1qE0g,10 +pyOpenRPA-1.1.15.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyOpenRPA-1.1.15.dist-info/METADATA,sha256=QZCz4uAPAscqSQmt9SgpXNqSEDBYuGlZg8r6k4KxKc0,3352 +pyOpenRPA-1.1.15.dist-info/RECORD,, +pyOpenRPA-1.1.15.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pyOpenRPA-1.1.15.dist-info/WHEEL,sha256=qB97nP5e4MrOsXW5bIU5cUn_KSVr10EV0l-GCHG9qNs,97 +pyOpenRPA-1.1.15.dist-info/top_level.txt,sha256=RPzwQXgYBRo_m5L3ZLs6Voh8aEkMeT29Xsul1w1qE0g,10 pyOpenRPA/.idea/inspectionProfiles/profiles_settings.xml,sha256=YXLFmX7rPNGcnKK1uX1uKYPN0fpgskYNe7t0BV7cqkY,174 pyOpenRPA/.idea/misc.xml,sha256=ySjeaQ1DfqxaRTlFGT_3zW5r9mWuwxoAK_AX4QiuAZM,203 pyOpenRPA/.idea/modules.xml,sha256=Q__U1JIA2cjxbLRXAv-SfYY00fZA0TNlpkkbY4s3ncg,277 @@ -18,7 +19,7 @@ pyOpenRPA/Orchestrator/RobotRDPActive/Clipboard.py,sha256=YB5HJL-Qf4IlVrFHyRv_ZM pyOpenRPA/Orchestrator/RobotRDPActive/Connector.py,sha256=MkxTVaOVITl1V3EvH3oNx2gbCx3EeRS9Gb_83rmjdjg,25553 pyOpenRPA/Orchestrator/RobotRDPActive/ConnectorExceptions.py,sha256=wwH9JOoMFFxDKQ7IyNyh1OkFkZ23o1cD8Jm3n31ycII,657 pyOpenRPA/Orchestrator/RobotRDPActive/Processor.py,sha256=HcysWMmxMxSjaUybqovoCZToGrvzC0WFSVZbw6nfa68,9254 -pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py,sha256=h41JsJu3ca68KV9F6oIhJhVPaH72m7TDvZIdEK4k-Xk,10301 +pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py,sha256=jCtHXExgRW0licn8K-xSO3tFd6P-4IFzp46TdS57vQ4,10726 pyOpenRPA/Orchestrator/RobotRDPActive/Scheduler.py,sha256=21N0ilFzWI1mj3X5S9tPMgwvG7BviuBxfTuqBY85Hy4,9144 pyOpenRPA/Orchestrator/RobotRDPActive/Template.rdp,sha256=JEMVYkEmNcfg_p8isdIyvj9E-2ZB5mj-R3MkcNMKxkA,2426 pyOpenRPA/Orchestrator/RobotRDPActive/Timer.py,sha256=y8--fUvg10qEFomecl_cmdWpdGjarZBlFpMbs_GvzoQ,1077 @@ -46,8 +47,8 @@ pyOpenRPA/Orchestrator/RobotScreenActive/__pycache__/__main__.cpython-37.pyc,, pyOpenRPA/Orchestrator/Server.py,sha256=kWMAUDeHlG4RX3lZR-pHBIpeZ6lxF_PJ_KDVENvbFK8,25403 pyOpenRPA/Orchestrator/ServerSettings.py,sha256=dK8pQzg2mcLPjbizOPCP7yHMMVLiTh8RbYVnJXn-Mvg,15640 pyOpenRPA/Orchestrator/Timer.py,sha256=HvYtEeH2Q5WVVjgds9XaBpWRmvZgwgBXurJDdVVq_T0,2097 -pyOpenRPA/Orchestrator/Web/Index.js,sha256=9_-DeipJ8Dx6RjGnySbk6q7FQbMnzmh0UH-O9LrOeGM,27786 -pyOpenRPA/Orchestrator/Web/Index.xhtml,sha256=dHxRf_eYWbsr504OpC-Ex6uz6AVw15mxXHYeaUiZSGQ,14267 +pyOpenRPA/Orchestrator/Web/Index.js,sha256=zdbj1ioNgxhprLpGIQ2YYraKUMUpkBiVoR-3GXGR5X0,28258 +pyOpenRPA/Orchestrator/Web/Index.xhtml,sha256=uRjdlkK8-yyrh4y3BkBYlFkRHzzPlHWz5dRh1FmG5QE,14260 pyOpenRPA/Orchestrator/Web/favicon.ico,sha256=6S8XwSQ_3FXPpaX6zYkf8uUewVXO9bHnrrDHEoWrEgw,112922 pyOpenRPA/Orchestrator/__init__.py,sha256=qVH8fEPgXk54rmy-ol0PnT8GF5OlGE0a8mExwJ4tFqY,124 pyOpenRPA/Orchestrator/__main__.py,sha256=cOd8WU77VGgzTZUB0WmWpPmdYyMZY1zVyuU9yx26MKs,144 @@ -313,6 +314,6 @@ pyOpenRPA/Tools/Terminator.py,sha256=VcjX3gFXiCGu3MMCidhrTNsmC9wsAqfjRJdTSU9fLnU pyOpenRPA/Tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pyOpenRPA/Tools/__pycache__/Terminator.cpython-37.pyc,, pyOpenRPA/Tools/__pycache__/__init__.cpython-37.pyc,, -pyOpenRPA/__init__.py,sha256=zJQYGBDH-YV1iJmK03mujDcj8s95Ct5RaUAYji-Xdf0,175 +pyOpenRPA/__init__.py,sha256=EYr94WCIlVXY6RR7CMN4jtIYitfDcpxLYJGK5kylOSM,175 pyOpenRPA/__pycache__/__init__.cpython-37.pyc,, pyOpenRPA/test.txt,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/WHEEL similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/WHEEL rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/WHEEL diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/top_level.txt similarity index 100% rename from Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.14.dist-info/top_level.txt rename to Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA-1.1.15.dist-info/top_level.txt diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py index 34add348..f0c66ed9 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py @@ -15,12 +15,18 @@ def RobotRDPActive(inGSettings): # Global error handler try: ######## Init the RDP List + lNewRDPList = {} for lRDPSessionKeyStrItem in mGSettingsRDPActiveDict["RDPList"]: lConfigurationItem = mGSettingsRDPActiveDict["RDPList"][lRDPSessionKeyStrItem] + lAddToNewRDPDict = True + if "SessionHex" not in lConfigurationItem: lAddToNewRDPDict = False # 2020.08.03 fix: Init the Session hex field. If no SessionHex - trash in structure - remove if lConfigurationItem["SessionHex"] is None or lConfigurationItem["SessionHex"] == "": # Minor fix - need for restore existed RDP sessions lConfigurationItem["SessionIsWindowExistBool"] = False # Flag that session is not started lConfigurationItem["SessionIsWindowResponsibleBool"] = False # Flag that session is not started lConfigurationItem["SessionHex"] = " 77777sdfsdf77777dsfdfsf77777777" # Flag that session is not started + if lAddToNewRDPDict: + lNewRDPList[lRDPSessionKeyStrItem] = lConfigurationItem + mGSettingsRDPActiveDict["RDPList"] = lNewRDPList # Update the structure ########## # Run monitor - main loop # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js index 56c80ba0..17d52f94 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.js @@ -664,6 +664,14 @@ $(document).ready(function() { $(".openrpa-rdpactive-title").show() //Show section $(".openrpa-robotrdpactive-control-panel-general").show() //Show section } + //Turn on the restart PC button + if (lUACAsk(["Orchestrator","Controls","RestartPC"])) { + $(".openrpa-control-restartpc").show() //Show button + } + //Turn on the git update + restart orchestrator + if (lUACAsk(["Orchestrator","Controls","GITRestartOrchestrator"])) { + $(".openrpa-control-gitrestartorchestrator").show() //Show button + } }, dataType: "text" }); diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml index bf4274db..672fda6c 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/Orchestrator/Web/Index.xhtml @@ -135,13 +135,13 @@ <i class="right arrow icon"></i> </div> </div> - <div class="ui animated button openrpa-control-restartorchestrator" onclick="mGlobal.Controller.OrchestratorGITPullRestart();" style="display: none; margin-top: 5px;"> + <div class="ui animated button openrpa-control-gitrestartorchestrator" onclick="mGlobal.Controller.OrchestratorGITPullRestart();" style="display: none; margin-top: 5px;"> <div class="visible content">Git pull + restart Orchestrator</div> <div class="hidden content"> <i class="right arrow icon"></i> </div> </div> - <div class="ui animated button openrpa-control-restartorchestrator red" onclick="mGlobal.Controller.PCRestart();" style="display: none; margin-top: 5px;"> + <div class="ui animated button openrpa-control-restartpc red" onclick="mGlobal.Controller.PCRestart();" style="display: none; margin-top: 5px;"> <div class="visible content">Restart PC</div> <div class="hidden content"> <i class="right arrow icon"></i> diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/__init__.py index c16aa7c5..a9791f74 100644 --- a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/__init__.py +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pyOpenRPA/__init__.py @@ -3,7 +3,7 @@ r""" The OpenRPA package (from UnicodeLabs) """ -__version__ = 'v1.1.14' +__version__ = 'v1.1.15' __all__ = [] __author__ = 'Ivan Maslov <ivan.maslov@unicodelabs.ru>' #from .Core import Robot \ No newline at end of file diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip.exe index 1bcff1a2972519891cc3398d6b4bcced58b6ea40..996a84675b3e96882215d1742360674ae425bf3f 100644 GIT binary patch delta 32096 zcmeFad0Z4n*Dv1FFzmx1GRW?*DGDlz3kWEqgEk6^`-X}EagCx3ibgXe1BsK)JRz0| zY7#a3o|u?N(JX=+;7UwlOpHqQ#xaS}L~)(p_jETT&->>6-TTM=eC|JYlIEN`b!t0R zb?Vfq>h{<R9-n{WQInyOp3EF+eQ{{vs-I{1$9Fsy!_)CUA$;e^%kf8K_)`3nGOUUJ z74X;C=i<Ku{CSo>ey>d5q4IB1A(P?6MMVoKPfq>iXLDTfBOcuPo_7m6XjizHuADFL z!*Q2EdPgBQ4Q64Qp56gG$GLM{c{LlO8Isgj!(~wP4h{EGK46W8i||$WmA^c<RKsz} zZlJT@`9AI%u8w~)IxMiSWqySPEWcjDp8!0UnGYZ3FAbq&NRbL~g$?GrduJgOgir#T z!-x6HxuX=$52OtC2pCW{HxVI$y#t0ZZa_JulHAr^XZYTlCzHK4x_9g91r|~JYKn#v zQVt3Bs<>n?*4sTQ$;fe1Y9PnuI?Qp&f<q`29EMgu4JWyD9Jd#lb1p;8Zyb00;PK-d z-6Q#w6xQ86q31(|NO!2qf2Mnj(v1x5Oc(JRTjt)a_pT8rRJ<jKzo(qb6`OM%Bkt%w zX~_|rrRuK8Kx`7K>n)L@Rvf|Jb?>k7V&A$Otv!(!s!}?UKwfHg$`85r`KeMNT5b}L zSJxxX4=nmk4N}WJmrD@AgC_7Pr=<Y2iNezI2nES4x?*yCatCbHE|Xrklbup8ekrwe zr@|@Ox;V`z=o@7GWsZ2lEFO`x))0;ZE{HACPjdEWsgm7Ml~yVyh$5!Ej_iwEIf-`# zx7%%xLR253H{Hc!(ldIcV3dlEyj_jzUHPlv6>7m|@nDYlr68V^YP%8tNy@YWrJ;dV zv~Qd*+K!frXZ24tNSE)Tl_*{iKQq<QkdV&2CYMhM6Yrr4QXYgOH$=?7A`2v?-YxsG zkl2pa-i=aPn=2kcp=g>Ei$YNn3eqVBQ7Mn06q9~W_B^g6T^k5`xi(My(OE*-IcjAQ zo<>VPhX|!N|67)y26trnq0isz-XFP+a}*@H^3u<*4Abw?*7;~yjYqeHRH-7G#!x|A zR*rZmN1T<MBbEVuK*$llH_bK8G0ip=%szOWo%D$C-iRqFIQop9$gX)rrw+vItghEr zRY4T{X`DLO=Q^}eRIX!k+(N;T8#i6LjVcNv1w{nKv(kk?mh0KG-xp3wTppH!(P)k< zgbY6O#@M}&`T_JKPOY4!oJy+kVJ~?0=N(S=g=cU7W+WBQg2~Tic-zTzUh)1OGc;Uv zv&Adrw97ES$+Emcd%YD`c?13R(B(>Rs)4o^x<a0K)ur-GyqR)3rP*ajaI$q?vHZ15 z?0K&U>vLYH!w<Jyu7~ojaLFW>>{ip>8nkU6azQr7T!uwZ8K~|?u!>F69x$hzrnZ)v z9U~S>FV4Z_yY6zi8qIN~3z5cU*eoZQQZImg6AfjDp<GG47K!)%31*j}jF_f4rZf@P za)ql@;hI8Rf5=?pl&o5aX^g_OL}5w>li<)j58^)Ri^lA?NRPP;iI>=u-Wk^FHd)@; zF2gxsf~`0%8Lh}GHSHyJY8LBLPMhtyaR~zc826G&bQ#uwGZ#Fl@O4iOrfOqKvlMaD z<r2-*pOfRJLm+mo&om8~&6F+rxX@*IQbniRj-qAZ6zWO6Ps1V~H6nt;a0gkU879gv zm*@v2j$@kDU5%#6rm2GMvQZE(f$vxF3F1wcp%3~UTFCSXEA@%Ao{{!KcVQHSg6DnW zag6nH8Lp6oMA8-I$A#*17T+AV3xar~?G$yN!>|R6raDsIf_O?Q^6r=|&x2lFj~0?T z+t5z3Tpgm{<6nYN)x{*9k#bR(tc;Pdo2nlEjYLQy%^-f;5nko@BmPLGa*`lkC^4t? zu!QRO_!3>K(0v5kK^_sY<v*(tm$3vu!FiRK?XeU|TFH1@Rz;}y!_+{zf>30nCagbV zbeGtM$CURfv8Bf-YQzC9G=S=KgrYmLvNiHlKjgCu9Hu%bN4fp;F%wXxvBYMLim`<L zmC*;3(Nl6p>yZ(vNP>u`24pMsw1!gdPCZEaJxx-wLiMAX>-V^oKuSLf_Sq-}ibW8Y z#tDLWL=Y`;`GW0`5n82S?m<DsGC18!u$Kl1_KJ`x==Le5sitXy<F!-_3QXM{o>Wim z_}(2;x5k6UrS&I{v+j>UBTCXzEn3I;drnXNo>PLY-d(@vQ<@uFF{V?ln;j)yG!VRV z9eoyJd2z<(Iu`QIt|swws`Gj*o@l*)j_oi{8keJ5e-9yfjUge?dPExHK9p9HmU%4) z!?neNyaq`?M>&0geG5#mU4mpCrC9h&O!k3Ok7zffX}EkT7J|dV;4tj-ko!(uMUtT$ z1c!}s7sPukLD$_1E-X+E!xLbnzP$#OkhNim0L7S}cso}-hzjp*yGzsb6x2$BoYroR zGr9~P{RmxnQeKQ7qGF2LDTDWsL5bb6E?>aPnqQIzv0E^j(OI8M%P_UkYHtC9q9kod zxm04c4Ud&8T1w?({9BNmdRCUwaD^W|+lZd^lzSHAE?Q2pOieN3?<sWukb<T@#=$AU zZi&m1E@E8E=}V~2RvOo>RPQ^`a=GHVxGeExo-C2ols_WAN5qHnsCDShq51N{{kc@5 zrM8r>!?KepASZ{T5Mtd&@{uxNKd1zk;S~DNF{x{bDQ$9GRC(_lsB5ER=C9H$7;0h> zb<!{-!dS_}m=t9hu0W9`_8~&iLBa79WsA%zF|6_&?q-+aQDm6wc$o+(^*Od?SB`iW z_LL)^FS+_|j_49@e46Vpf!ZvG@31HR`&jp(G!*rrRxS$VWa*zef@yx?8k%ii8kd?2 z=_Kbmbf1C)-3~(oBFnX}i%ZD07snZ;G#?r?r=%Pxoa4}t3DN_1fc?|2q(tAn3Nr)= z(siHqfEm#xNN8vXD7n$#a+UMf4hjv;0c|HjS)P87Zfz_U3=XCPCwa!QC-m`!HL#FK zjbfDL(-hK4gJm8Fm2>Wg;_6!-BgJ+mL#q^>3b{&efm~CQQt#{z%QBk;Dw|ldWm~Wv zONKPlg9Qcj_uqpl{2`TU50OU3ut@=Ne02<46Od#j&5#eRAb924vqEz1f^pK+Y08pt zGo@arZq`3lk6Ia|zQ{tY`G?UQXKQjf#}dP7nZa^VaA539t>~s25}X^VCZ5d|Pr`bl z1q@p0lyr-TFxS3B&304Wh0&m9V{AcXL0iq8GYh6SR+DDSkV#aDT(LnAbp=B;9N8Oh zeJc;bZqkdW0A%Je%mf8WXlyk5G9aq^slJLRQQz=1b6l3iAQciPCZVr1HJSx=>B-N? zV3}Q_hTVmYVhI*2n;?^F?S81eaxW=kD46YT(1Y*v!oW!z>17EQ#E}8w2ANH_nb^Fg z?EY*^m&LmI#!i7;?al%MR|Nis9TMg~%|N4m&sphW24jJ9qei77+3GyhVl7%$9wyC4 zm#d<@8pQ&-GVA?AXrMNi8>OE|VD*C~$4Tq6fVl`eh<qUi0Ze2WKoZYgAj%aQFG*?I zpEJ=If9mc(ks9UboNr1ikx8Sxd6E`%qF5T7fzl~(NfSGXDfKKS$e>>lso@T0$zm6^ zKg?#Mf}*Vrvtgy(!NyUrYoE%KCsTQxBj1axk;s=Q@}d%1p+<Vvg`vjIRVmFFsBXwn zum@=b+ks@k*5D<$jzJrW+Oc4E{zL=AVYnTk;j$fqTaMV2>oEL|^&m&A7aH2Nwr{)V zu%Cj`Gt+Z8t}?T5u!iF-TIs|nSr(^rk+tnmLfbqjxZ_H#1@SA`Wz&(Nkp9)mVS+>Z zO?NiO(0kzJG3b|lOG5NC#%4jwj74>T39k4rzQJ~{Bl*|1n?dWl+8P3+ePh^1hR~1~ zDHh|=VQ3DgT#v3Cgk1BwvtJBJy=S3bXjL+DDF7d()#>-G4ACQlwqX?M7(<c6zhNVT zM-I9ag&a~23-)Z7R@d`H*eh45#_086>2Ua8^K%05w&iH8l#<6j4DJ{9LM}95Q7ig- zxU`ZOdqc#S$R**-y;~;VGn|d=md)?TWsh}B<6jPAhr1<4K08((2!R+cf_R38#w*z4 zif5gE((Ev%3yIS#U;{!#Ui4!7LvnP5*vP6Q@|O^12p!4S=dzih5A#1vWfwwMq~uJ6 zK`0){g;}vMM?8Zm7e0jwP_`ttc9&%ddM#0ETLt5pW_ndFdpK;g=Xg{@+S-k^gp~(= zhF*}Ou<K9@dNY_!3*W9=3ALnBTn6SA(Qib_WJnWP_O`X(v&0KUu)&WC4L7w|dmb36 z;cVY^7nYo!p5qqOgzgTF!&vG9BmjsYVlB{49my;aeKU_3Iy0UG)OONUeZ9OlRc<GW z6wl^~$5q_~<NB0z45fDHrlM}_VuUF+peNSEJ<a+(?t-(2Y?9_;Im@E?oop5xT!ztK zu#u6;x<9c+Rh#f|5UY$l5*G6p<z1MeP?l{osYi!SWQ9?o{DL{`fv6Zh(Zse#<?-vf zvR|Vn@Xya?rtSmy`)9NA?!Eb5S?u}lQ*|p~?WtMc>%!cPDXeiq|InQ@>8f4jA<}hN zR%*NrDEoN&s_2JCD)NgK#OxMV-q{NG4I$0|!7-%;n~GvYvdJ@^*6oAskn0$wf#~|p zW;bT^imd|0mKvk4ew>ufDunDIZIy&C1`fXO3)sk+X_*NMU->{O3Ygh$boQ>UU(*X# zYCrT?d8FhCawCWJf^+&xw_r@DHGC8(+dI?9exDTNTT7cN+o4n_e3opCOWM%MM@!3M zTkCHrzt~M7CR@7(IyZ{!n7eLRJB~@)(paT2#eYT~nQ4vkXnbl0YcS?!p7QT3s{klv z)CW5gAKPt>MQgjwm!Fraes{TQVARm0I3@jxN?`2;=6;opiSB7Vnu%@_#K<CaJ)AiD zDjQmX-A#FlkQO-%L`%7Jdl5)>ZGJS=-=2ju4c2%|KP@1nd>YdBq_ik@tWbU3GKzK= zZzB>$h#>mX(v~7UN8$9wRiU7i*fQ2+mg|*?>L&eG!mdS6h<Pa;t7FPJSSHoil@++F zRD~Jbv4{ugSV>I3#TQGc6ALwD??7=Lm)>zgs&)@r7=ol;NT4nOQ9#8IXIP$iI7j@P z40k9Ms&FvIk;N_7-g7T@0Gz~Q2^0@H4T8NO&X}Fjj2S|^cj+{9#YD4^7mWeW!u;r{ zMFMbkXmXOCTvZ>+9ex1TNF(bW+kZgeU~2R+v0XY%0z=0Qg9bp!E=6Wq+bNf`t00a| zAffn3376Qzu?2?3zoLVPdk|C!$?TZ)+pp|eY;5pW>~SsbDd(K2LXox^0Sw7Uj<Te< z{?=}=me80OTc#0emd85Lryo|&2x`*7q>gpeyBC5>dv>t?iRFn{d1A$o<)qD>tQQgx zY+jB|kctqEF80|A)yqlIU~w6a9Yu@m+JP`kq<5EbRtjuJcG8>otIS@+{AVI%x>zN7 z6X`pUWK|*+qBdBoJt2`3Am)nNVX;`??b_i0f@m-Th`M0>l`?-KQP4gFBaspmQ^u?D z$JtG6x2?0rWx;*{eFjL!rRDfrLyEruW|!?y^4x=p3A+UCK{%mp6^1>QH8>!=u=m@5 zdO?&=qW*9hP|HN^I$Etz;B<MFo(#7UxfP@@>Z#pisoJpJ;VdQryQEZ(eH|ZW%|LpJ zgV6>jLoPfQ(5py^(PKk0XN(|L#mQyawO*9HLk-&Mu`|^^Maf0&uW~NhxwMLdzlQ2& z*A{}>s2vQVw>wwgs7(fjiAo7{<UB>Ovlk-Ztp-Y6hQZCyU)mjDRK)F%#?vM<7<9CD z?lI~|4Fa~zwP51PeH6Z*mQh4V6FH(#7N&~JaJq@L^w>Li6$~)W5{1-cuSuK1i4OGN zr_|(x%P<vWajd(kjwm_e7ddXHq=(u5p1nM-G@`?7yMBeMmyhbT>rTQmnt(N+nd;o+ zW_%`DE$r3a{ikNhM6FCbCh<SQk6T|(J`5lIzTX0!<6w*Qky&Vf(d4-JLnliBSh_e9 z!4f00c&aQP{lNMr4iB)EQGNB*U%^A^(*7CD)+bKx{$_ttG%oCXjOm#ZM`FQqT&842 zHz3ZK6m}&sVaU2<IbqdKIX^?&0P35dtI(QjF)(w*$S*<35#6s{&kC_w{NUO})Wx6J z_@v<ZVsENu6MV<WZyTkw`?b@ApF+0Nz8!j!_SAz^KzS65+y~^${ccJ=-^y7pp{#9j zw>f7VGMKHzwo_V{VgCV|Hj%;rb~I@KU)hh{OA6)N!dPgZShozVh7Io%5}LCRJzN}D ziayVcv!t9h)ons6r1uB0(mvs#*>Vmq!?A~W?3<xNG2o@^b?l`+sr>FbcA?Lpp)WVk z$PwFNo&ckK1ec)_D5@NQRoKsV+l6U-Oe$VLZFSHdNJu#@O*zl<`VQ(gwH`yMn=+JW z_Q53dMPm5hg4w%$J-dtvg%3phDu;C4RZPGJ*4THU+dMZ78_;j`g6*)UNt=7a@GQg5 z+361FO916_P~MhW{}w;3BR)uuS8e&Pq*;ICN$KQ)1vvr{u)4$DHLSH?55_0=;8*Tu zy_3gUq5d%`*Tn^xCM%o4ucV+%S$lshk9vJoEs1e+D=p&n$wK<sa(8I{uF@k4rI0AA z<$2)JzR^X)-542yxyh9m<vQXAbi*(xYR|D{+~LX-Wf!RA(E~Ha?tX{b8X~Y~=il!> zHUU!7!OCrFh;cj3Y1l6fiB^kQY{hIZkkz3X7|s1sJkzDjZ(J^E9R_zc+{E_mI$E}q zq$MDz_D4Im@YyNn%&D_qv-k>P_72+)OmpronP*WxG?ua}X`6N|TfZm4BwjV!F(k6Z zTPE>zu6@eRlI-eVtoo|&ys4LG*X^B-v~cQT=`}lMH09Xp`CNN;CD!W=`@sS~*WnUV z;=z*ihSdhBjJQ(@P5()*SF7z?y%WC8qe9UEp$J<gGG(<qIP{+c@eQ}!QSegnDE8J> z;h5*HW_?vGvIMkNom2@AsIWwZVJd9*kQu9csdd;}jB2Jg=%4UHMfFv2&^NYrlfLQ* zdc)Q}QeOpk4v=hp)dw;%T3_|1j7-v3Z9r06`*eNPGNiS&&$M*2+S=#otDcq_iu6?& zx?EcexN<~~9NQ825XkQc5vB_w{EaYz2JG)cV$$#X!M7|#zn{Z;F&&AxEr|y{Z8wYU zZ8VO|`u(XS?CEWGgkVA~(vgZeCe14)NA&4jvE6yU>BgaCFDr(STek1_Sbog#`9#%0 zm09&>j3u@3?5gkeRS%&A6ppSF^ybJ&IkRikt_ORVQ-dwh`rZC8hHIqxSP_spYU?Jr zj+k7H_-jsgt@|3goGiK8W;n4*_0_AOj!oAOW$CN3t(e=?7%#oe4`>OdNTGau0z_y_ zq*$t^T1$)pSu7ommc>(o!CH)JT83<po%`7}<_(kM7UdRI(geXNKn<xP9O~q?NF!lR z$W>h9k332>ur#0nO6=J!f@5}zNiMl<5@gVry@8}!lI`Z)3oXQ%Y{#=?7V-CLm)<@R z{b$;bkpXiZl52ZP?$7!9$Gah|oFKj7!DD+DfdNCM$_;th+L!688bH_!G0D_zfLXTo zQp+4$dzrp!9gK**<!am+VysXC)dvydFum){Mjv!EojPJW&e+(c!`-bLz_336+z{nu zeI2b+5Q@blTVbY;X-Z{R{jg@0$&mw(dHn>(gwrK3RtiaU_c@D9dm&(%W1PzD`=c`_ zM;z8U3Z$=>sDjg1{fw<cwqvG|M6QLO(aFc!hbYp-;C&49frMq+3kAR_*^#@7eQ;!b zfUn8+!wpmVU6X$NT?`5f%W8x-gMIoziuDvkM50VQD7X${9A{VmsIR&M1yfyLmyDb& zUa4;SDkli=dSGmAaBI7OKzWxEb9!63N$tAlp`1+VH%<Frq{8*%Z&H?~bk~DDA8MS9 z5`>~tQc-t6d-hIDD}7Zd62S8%_PJq(_txxmGmDr89#}X(_NIkWodqWSCwGOS>qu7b zGQ`%g{!M9pD#73~e76U=Vf15mxmx6&mn#&8bu1XUOp=*C-ot8}qOC_EM4Ag2fizP; zhafQxG!}gw3`~rDoF=yYpb&XW*E(Yd(NXa&O$Vnpswl{0GqNej#o<IncH#|l;tgcu zR~{(uro|@gN$9vEV+3j@me;9!?A*A3Ld3wM37uk;$`H%OHwRl)?t3Qk2gA!Zy&QVk z_24`~yloO;v}D&PQz$pCSe;VvH@Z$rCl<;hBDYAYu75C|EYk}89%6#;KTBGT>~h@h z(p<Z4-6z-7*-HlS8=wMNaIw<fKhH{){Swkss&yRckcgyIeZ3+E?Q*w7$eepg9BO4J zf=d-7xlr;=A5tXhs&r~L#yf4vKKl%2K6(`y(t4wCwIw2!786^0jwRp5Dvov=6e^SU zUG{C$1B0*KOJ0o1allfK1D1W4{o1Z;<Z&I)_6raw%(hl`=xDU{6NE}kw$zmJV1NpK zfSkr1Y5vf+J&(vbS}u?ymWquOB$c29Z4@M3pad-xr0MSvzE>y<KyjFiN`TxQz`XQT z`M^lIFNMslaTT(>(Vi0>nn5}>yi+=D=b#x-vAey?dzqB^EVoQnoHRE`nUUp>y(0zN z{-6r@?I-y-)(I^#?Hp((@fMY7ZaXPgm@-T<i5~32u`YcIFp=!<5|iz?`;D_0#szIV z5X6q9z~NUZ=VeA-M7Obs<H^>U;Pi4%CVpoQ{%V{fNy?ti3<P-RVARHJpR$2UETumM z^hd}RzoYsPKHV8e9g9J?l#C3;YLHt-rUn@n`s)5Tt}>@ikxi4l9k%D2P^G(cl;`ki zfs!JNa`Y*7{&?gVGW=ks?xVBglxD~=8KVGmyJ8pE?AnK*W@!!X<uc3xCBvgE3^RKP zsPe@Z(_XAnvOhg_7yBf60Ke`kyPDiH^tO+NgKHO#2$x~}`^X8FSRY&FGhNt#{&Qo+ zcpB*E1jqUSlVhw0gmlnspY4}xpY2|9?M=avIp#Y1p#Qwcx#$srZrxOuq8PYP#2j~8 z&v5Ge&3kOXfPBqXW*d;fH!o%l1N!hUEClGv+)|AE{R>%q$_xA#$5?a9AinR-jRX6q z-M#@$I)dzo@GCA~Xefua?iyvAiwivIP1w9oI6(?gdm4;_{n0wK4533e*y4esqACz) zi=XdF<K_0f^7uyWRNx3fR{8I;lLMook~#@d7!l6TZMW}~<H8Gev5ndQPv$cyjqm?E z%O2F5KXIL{7-a6Z{2gU4^b?hYb{WM}F2e|wcvvQmi=b_W2IWX;?1w>nd8?Lf9~|jl zXr$$R^8NS|x%5wVbTAHZo7fXWHt^aK7MdDo-TxbI(73$Iy9naHs3ke+$Ek(QXrX-@ z5w5u0u0KNKg4b8y0=TaI5aQP8tA9b@6CM7*+4N@!?fyr)+qExZ9T!J>*|pnb#LuqX zLdci!k-ER(a$#H=ZMU@R4MNdT{T}mJuK=8?RtXg|*r8OTW;FXTHM$p!Lsz2{=la?R z$o-1Tiz=rJ0IpQ-!6vF)r0Xi{mlhnf=U1YCW1;f_X(FN|!zDH&ZIEFK8H(jWd$D@C zByY^|M(yxW_F~!$e&98BFYQ_W>8tG7p(UDI%q_jQu=)t~shB_#EL%)HDuw{3U1P8< z4d-jpl)rKO2o(j60OHR48@>M#R-PVX)hQ&ha`#t4LPFvx>E>a%0Mbkpyrcw^DTfOR zPBtnA+rr-gFwVkkd+A`3ie5AyBvB*=={;h0JTUzevaG<~huG6bU$F!VqF(lD**sBS ztz<vUA~Qnx<%ijjj3|EmVKyzp#1~&<TQidR`>wF#8U6XWe=t|Z0OPJ#lp_aBaZ%R; zy=%MavgllfYoOb7L4U9bnKAt9^{hOzA3yyPdnI!OZ@I=UWeyGSdKulX8;Y)>t?I3p z*}!4ZL-g`&X>l3OAQrs>wGfPJV^`1xF2mPEv*9j>-OcGK#T~@`nsYRq_r1&>A2!)~ z=m1G+tUwLXO1qWNbS3nf5?ZK)wkn}gCG@xws!&1?${|q}y`0hg3SoyLvM)bD9psrj z1@!&$7(OLM!tKrG4iD>Mlc?J2Z1<2N4zS0DC;7etIa|UY@XRZ$VR$TG-@twvp6E65 zC35-X{mw$OjNKEdFGRHbDr_q>I|`dZj7mQmf`;oavgujbyx73DXN~3qPcSJfO|zWE zn9?HqzfS7G4T(sH(v=rID6$*qrx#d-DW2Cq&$gR}^c;MV#B)rVDv!`((kK;vEqeiu zVGC}Y)%NxRcGpyCT}Go*{K^u7jbIUQY1?SIQeJe;X(ADHlTZk5|GYpz*9agzh?+tN z&qVlIHo`}#07v9AGE3&O08SkGxluKFR|{N3UPAiCmA?qKOiwC~Zt&^u>5+H;Af+@t zd2^dhI68M3e$ummk*0nlZj%0}B4yR6?Mnf4i+MwQEYRw6`s&ZAmqcALvT+%f?O>H7 zgT02yf&3k8+sI)_0q92<q>`ELk=Q+5I7ioA_9yP^+vOO9KbONlZD(HQ{<Cb)Q!8}E z0MZ_^W*vq~g|vc5FM(8(UHyYaFB|z%fFIVK>L1E`G-@N;mFg)GH+*T8KP4T6RSoDe zgl%J+%vOHJJIs)s&4+!<3bK1dJ`58UCkXngat#cHac<HI5Fk{YE%Rh6+nya_{qZ{T z`6vi0EY#v=ECPaff^Kh|Z^3kECVyEFlHy*=uf!<dRVlF#YC~P%;By)Lp92+lKYm80 zy^%u?<e=O+d=?p?YYcyR(G`<1^&2=}=~Cin<RCY=>IGYS#;VaoIRFasIQI@4J1TPe zaVWNb%AoZeG5}i*Rvp=ye?khCS<6kh%Ais_6$&PaAp(4+x&R|>sZU5hm9Y9z-3Hq! zlmA*Z%ioDgZRZh5y%QJuEYpq7;!owX@uNcn-O+MEd^|3T2ZV7FSI5?lP70rcf+2BB zx4jT4CyiI?5oybO>jG;S9TWO37B2OenT|h2T?+`9rO&WCqet@D&oYzHGoaWDmDO_S zV)2AsH_VGw2>rt2iD)~3%WY$BD3MScD-VmhKRnq1A)WuylevW8pzT=F1;++Ti~;HP zh27xrWW93CfukwH*65Y~*}4r*4>59xC$r`x2Atk1i#b~;`VHeU$dm2M>C6AIg<Z;t z<@b9q{g~9y^P8!u5<1Fd$)ouu$oe2^D=Qq+kI&n}o*a|nbp=;RT!uHcu;XKf_c;uE zB;^;FKQIuXB~D6(dr4cIC8H_<CoXX!+{MLP(7w#AH(2V}@~~K>SQSG9nMyM|wo8zo zIX%+%25THUqfgv?s)C%pA065BPSZYz0f`hRU506Lifw9&>gy~ox3JHW|DGbCGsV$b zDx)Js`=9LV+`K|<u!dVdUnshU<^C(WI%CVY!ddbK(eKK26Fj>C(pa3jkPtjvjWBKF zY`25f^#}7?#0)OQEgOZr(XX`%@pq}m2eK#kQ7wBkZ$`jJT`6`_03F>mNr6k*mAvKN znIIyxb}3spE<4bQ8K-wtV)*11eUI$NyR-e{f_*;QBcBXu`?<4=<3{M5uw}-GLEk|I z-`T_Zj-Sa-{*FC1-l!iyYo5ceNwDi(hWJfv@AzqBkAI5-usci?s(-PB+aB=JSPYmA zE<@2Ks?G_i>^=04Ef#G)7$CS#mVXXSyB&$J0JhUY=4(sWlzX@gDVx}={GI_bDax%G zjc$TJ_m7QiV}1-j*U5J0$Mc$R+4=lve%a}b6M7BVwNchL-Dss)z7y8ZfYaoUgjU0~ zIj1hiUT~LldPtKIg*~}04*9waJvOq!2}yi@EBn`kzI^?Y?2`#YyTxfyB(~`WY4N9> zF3lS)Oh55HjW1g{@o8N#G>&?tTfK-GCiOD*1w}s6eU#>+5g~hs^FgT_5RCPVMQqNb z)S+ii$xP)b(s5vPp4$uOT?t1#F^}mucQ>e=$GNY|fndsR@F~_ZX*=KNO}1ik?8YfM z{Qf7{*eU63O^qLWWJ*QgJ(|8_LxiF$*Bc%#?<Re6f(1+s<6k(zlBSxHdcI2nmtDse ze<@T6$!=p`OqXaln=w^nL-OsFY|GUC{L@A3_|&*Q8BiCv8$wq^geA?>1vkF~PER!E z^RHnKo4kycZrF~W#0_Cy%3i@DrzP;=MY~2%>!%6txYmU3gb;;eokINLD7!mz2*0kB z4W3oZ-@Cz{ofX4Bc9zx8+Gf24>l?=-snVxF8aZz?yzMZ|y%ZO&Ms0WrSi1Herj_2d z8946bz*mWTW#4abxpEp@TH8<E$BK{NG%mvgS#o{)W9I%|uVFP>3f_lqV|}-->L%49 zXB1E>7eJD2n~P|(_%jw@cD`U*Sk)DpKvzh2giAMIR;!1)e$8y$>~7JgmeQ)B^Fxuj z;@Pocm0Vn|y&ySD39v_Ir)ZY3eY1^&P>;uQlhaH?n=BKstIFHp)4^|Y`&w`v$aWOA zn>U!jQF&KW-d9bppA+1dS|G>Y1Shc!?PwVME#Kfo9dn^S9T^y1hVFJYX-=F@^C*Us z+jq#M-MEZZ&Dn@bbekKMk-kChTpyQV?;|o+lD7UE%RDS;y&iG76m|`F-JI+tSi&mT zPxrBOH*bhVag@!9xxEH#g(XCEr1-2Fhr)WNC(tIK;DbLdH~oG||F9qA>xB5_(vQzA z44HuA`yceXeMjg&snV1p1c7#yt{-OR!dO&oVPQl!xs?YunER*MZn>3rdCW~M^_fB= zzh)8pxUk27yH(UVtw?Kd8N8}x#q|(20=8qWwxb%1hlAqmR@kQZSGincl)?@3o=ZZ` zBq;da&XS}|byzmxush7FGTVlnkf*=P5K+Z0%nSCZvUSLU-J55OKzdZm`?2h=lq$)= zG_`d@?@JnX5_ac4A7lzqcpz;_+LW`FV5eUP?drpzsrQs2phEq_hp`KA#;94mbl-r4 zt<R$wx9^$s=5KHbV1~=k#YT&{7Q=2db_dGvmkJKDd-sLL%mbw@3FaEkeZr&^gfNKx z{$ks5fTb0gdR%~6pK=a|P5%a>P9WI)T*^6`K(HaW^vv&mV>^lr{P(}HJw?4kR)Z1z z`7%Gv2j?`{dFL0gn?=zBqdIeuQe-}e9kF9EB2eWpiQ<u%?p~_Zt!iN7iW77PWQwe~ z#y2oqaq5Wmpmf}1@CJ_b8Fc&;D5QV3QYYUEJtKK#p!{RG;sNZM=+Z3R={PK;-!Ai$ zG8@?4;_mL%rytD44VEBwrQWM+)?;nTO^&R`PPmRqX)~~o@ok7_>)wsx-#N&V7L1II z+>TYCpuXdnY6rX<IHnp4f_hAKc0b#=px3x5R$K?Kei`kjwPOhS3btIpN!W7fjg|zu z8x@;dRF8?D1V&t+Guc|96>n<umefzrbE|45o#XUD&B5YrW!?*W(dfQDBDK<zByHN? zSzPpfma}kmWNRR<R{ghQDNL13K3rFJe&Hpby{D9_yf=V-x+t4pd5VQB9-`UH@)tMq zi%znwyXFPT9(bv#uj~{hfAw2T_rb7TN0&rt_=VrGCI5~HCtvlE<23Y-c)?A5M{!fv zxeWauVA1zyB|M5am!WMf%5qGpMAKa@rph}D$~!#NbLIHvwajOCg!?&^#Cq>u9bH2& z28h`^I-SS3fN+F-$MEDxJtu<8v@A?Bg`M9$fL}YGb=i~X{`v$^_b==1{s1A%_C#x5 zU^RQX@!vho5|(%8n^&`(<w?A_iFwt<ur14X^G_zS38lgA{~izadv!hC9|N-MX<X&j z4cy%+&4z!o*H^^rde27vVJVddNH_k?&aDXR6TTWFw4F$Jcj8}goI3fGdW<%2*gdX) zV<H|$TLq|5bn^EsX63lNN>FV^PMQv=PMjw&8mgNs%F;8%?>Me(i6CCg6@SZVxQoki zgK9vseUHm==cbPp-G9YFV~CgSTa6_f^J6Fbcx80zRaj!`tngGaS7uKXYKzeyF8vM$ zDzf=6#;HKrJ0C`{SL%b^o8r*q`ug4){=VJpa>MA@6Hj6=`|au&%yr6OzWsiuJT|<~ zrXH~HkNt}sKM>2$|JSbH59Dh6_8d@#(!24HWzL}y{GtQQaVX0Db>z?9J+#=JE)%iH z!-x3r2KN2o4ek@7QSkC3l^Xtw&)M&daS{FYD?>>34FAV*$5Z#q#~oLpWLyS#+h`p9 zPsbgHWu|`j({YD;2FhR`HC=EY8U<uq^9y{2eb<PiTX=p;75uJIx}4WKn|0<@=5?YU z8(tQr`|Z_E>bh6i;<8x&fk)UAWlMawVM`B<-c!nXm1!*np&z2k<rQRogh>})=}ffr z6}Hy$0&m>UqRQif-=B{P7qw$xRNz{ge6LHoeu7Oej}CkQQJ@c{8wgoYM|&nRv3#)i zK^s)pVdt6S?AI}29?3X8wbZdo<-<baWY=grJi=jUCT&|jLS$4O%UTs5^7C;_7`rD? zD`Zvo1)V^2+053j3JW{DN9mp)ABIU`kKc_TZiqE%Z+^z!Ta^-A@FMnvn8t-4!wO^y z&*YEUtyM<;;rT3Lb-%cWU|cEplPz6x#l!Nw^d{*U$Q5ttpj|FQ&-rZm>OT6jV?d%E z#SGZ_ZlAI@R~xObgC^g_A-}@2#E^c_5`{rrLpa^X8B!!m76ya?7;d%l<hYU!<)EJ8 zMdVL<&_r{=VPGwk$nR(#rp1UO=1?#AH7tJ6s58+Dx*>>ZNH?XD$zg+6W$bKBMaVcG zio`_t?gi>6+3I^pnS1loBMr#1Y&H&@1o4{Ca1V1YJ`5z=&zO6z>9`2<2j*U?Jokp6 zprK>f&Wf<U4|isT;{uR(qcvh9CY>D-RMxys2I<%5S-`tN%x_H?Kc|u<teL<&d$I=* zx-pN{u8HM`ezfbOHAgkpOsHh?FP_0{M&Is-+dc(L)VcC-y3_ChA~Q1K9KuzwW73!) z2*Od|rg_Y*s=oYZ$7szyFE9v2O_H^dlG8|;Uk3`t1Vj?K0=ZSD%Ns!W&5*h>J=0OD zvGf&+epT)~g=6DnrnJgHD*J07t&L&Ebs-TE$b!l?uLqMY-VZ@^cTf15rk=!53Smpv z1qXlfoHCr=$1s!rTqkqByoYUGXXWROXQAuQ@m1s4_4VSA5m5cq+H+7ZIB_VzsP1@q z2qWx>b{r-c=KLMfy<+4ba0~X#SN_fF9-PL1e2{gqCi^=f6O-#$O1;@0dF}%?-a3#E zoz1GPQ@Wfk06QHHS|-qb@cRSIY3-d*3o7O?Zi}3hiq_CN`-ePy7Z5it(<1_dc}4-t ztW5F_Uk{=qGiW_~qq0Y!m$Kh*L|$7blP|9WxgY<)bL?DY<jCUZumY^2sf>$Ejk72> zj6s*-1E6N{Y);BKmm%P9_$FcjcWOuE1G8BALy1wpJ_|i~<Fj0Q<Yi#WOJMRqK5mQG zHL|LQp5|9hXAuuS&5y`sA3SUhv1}$o&F1Tp#sKTEf6mmh0K8q*dndbNn;g4vI2@^N zv{pq+0ks|cKRr!zAp|m(&OOb_ss`~%)7cwUlY+mThLY{st@465R4AhNwVpZ1W>$wM zU6_JF-Xu*!QYjFB#MHKD+^L?J#eql$*^X5bOqLTw)yfGl@%_Oqh<U9p!?Pe^NWU(9 z_!MK+@qUHr$UrXS(gAj;dY<0%BC43(MD-1ohCamxKGL(-xA0UT^KK=!^`D282M9$| znnYhId=%NUF3Ta&mwch8r8h3@s(Qpn({<kv<cYpN-VyQo5cd3|q5Q+m?4w7A@e>)l z^Qe&zIM1RsMDSO?VHq2GcKdO#oJL=D4YpZFx|YFg?S@1?y%&3R!!G{l1Xf^AGUQ?< zkcBe{hjg$uAd%mnWY5{7d7o5PZy&<v)UccOIRAcFO*?wz!y1<O*!`hF8056wrLC^$ zexS10%j=$~m%dG9?>%OvcQ+GkH}B!4p}?9BMdcg;3!!hiPJnhU>mgq+GE4?8hz)<= zHzPCsCuT1%TWc0x3Hj`aj^t9|6KvT>-T8MnvL`-@O4~z=h@(#phAY(ES05?z8-!sm zJxj-HFhdh4@PxeBoDz!83JtflQsoJj_HpE3USEaFHz@j)G*#wyS>&WSe@~k6pGo^r z(u71el^_14Q_^*;R#X)30G^WCXz7BESBv`kID7o#zWn<`*+(B|@j^Yj_wf|#UK5o{ z=LYahx-X^#1#zYHdSnIkd=_4coP$fTbjz({mRh&tfXIjRlj!R*gpLD(SAoc>?mAWO z{M!nFi?gE6UIR^AFW8zjLYgjaC-(oF>ahdvc+HY+{bUAzcnSOSlL~%FE?d4olh1#g zy|X`ue|s#uxj&xYx|kX31wNlZDx0`<H%k`bb8xW9Vg<i5hn*C2`L!Rih{r?tor~Gv z$5Z&OIc&+}@w{aW+xYk>K4}d5`tc0@!F?>m5y9_V#0EPC@>_G*Qpd1?k6{^vx4<#> z%0p@A&&wKfd<pg9_`e9lK%lebJTD>Tgp^RsE;xqqmJ$~Aug!dq$5`FJQu)V6v8(?Y z9We`6tc1i?tQ-&~md^JY-Ht=E;;P@13)qw=dh^<S?4c*Zhqh+ZY_=HinSnyQ<V>&b zlWH_!#$jdodu2uyqiifmd2kz_kdp61*-4DA3FOxmv*4PAyB?{T>}DNWNPRXoK<cj` zAyR^Z7^UtCk{|^uNV24p5noBG5VE8@bLG^6bWK6>rArDjUHVo*3Z=6OvQRpuAgR(3 z1u2z2o@<rUS4bboB(ai|-fIdD=LIJ{tKb*~oK&OW@WFCYm4fdeyh6cWBYdfXW1w(S zp@L(j<)q09UQf87;7x>EGn7CJ1(Fr~Ea7nq-b#3gf=h(^Dfkt_IR$Sg{Es=*5|Pez zBuT;HM(3n&6r2u<q!tAaA^ebnTX9;zNp(sff$+B!Jelwv3Z6>%CI!zTe1n1ugs)TZ ze8Ni=yp-@_1+O4{x`I~{o~z&+2+x{hm1|u?fq_cIX2KH`d?Dcx3jP}50Sdm8aCZf- zBfNdKoKHRBmleE;@KyzHA^dX%-$8hjf}hn`5%@?6v{J-Q1(yhaNx`oWzFEQB34dI{ z>Gqmbso-?yQnDyGeNRPNsNf-l7bv)q@O%YNAlwYxiaw)T9a5?ikxF=yg42PEWK?j0 z@E`@xC)`WHrxSjsKyG~@;a3!VA>ltLcq!p$6}*D*;|g90e413R1U68_2MS(8_-hKj zneb;7d<Wq*3jP}5RSLe7@CpU5BYbHAd!sgTLN=xiPLxN<F=KIRsY_QV0~JaDDE$>m zj6#_X$^wNFpitl`*3DKZ_hzw>%|`2GY>tuZ7qh6*V~vRaPN5uBC<&mPRVeQ%6kH+E zom42>6-p{7hZKrKp=5!wPoX@ZPy|qRDwIVEWg#e^SQXMVg;WaC2MQ%yp;UnKhC)e^ zDPkq&Cy37}#3+SG+6BZKh3Ksi(M}@T6yhH<(Ly?1#;a56tBW|GV1u{l{cG3#WkAHP zW0STd@^jX)$}QjSihkx<jn(%llFfz|$_`1~U7tERX;?pm=svmpk_bHMpStifBn}1H zu3bu2r5sByjOgZ5(5@>{zybv<1d!j`6?IdH;)=hXMz<JfeZ{NAmbj2<@)rkK#&g51 z(XgUbN7p+QaC>CItT_J+Qi-}V0Bw(>av*)JkUj%&;;r-e14{Tq3NMwa7&S!ph<nja zXC>yJAlB@lhKWaWi?l-~b6naCcS~0sNd?Gnl(xDHZa5`mx1Q?}VpcNC1n9WDTZkj^ zRF=9e!hh2iY)5dy=7kqmgjd*o+a}W4y!a`7u0zyLn8X^k#dQP4`I&S->@{3kj{~<S zxZin+-PyJvwbxdN145Fa(n0c0%P}TIirh{WZ^F*jWw4XYY1giOMFvY=Vb5$IY;<i> z4uVoZbr~|jf;aXdn{YYYX9)X!dk;P>iG}Wn;eXu5GIxXt&D)SCmfj4Z!R016BJdso ztm{|FI>lQ_FJpf|XN0wK81eM&j>nY9GCA@BZJ(v#o7napY1XVsQ>H4tMaE@5cIEpj zkP+SJGRFxfQOXss=8A37bQp}d4we?lHX|A@@>hRB!guJ(VBfk7I%=ETLHa62o#QZ! zL4;hh2_1IxTDjLy*Tz9IN5}+vlS({^Y7axM_+$)m%8gMNTJtGP@jc{tua5XfmG~W< z@&0nWL}W_800_=TkWfq2g|K2!qZtaj%TV9JH{48^U3<0`{TqPBpCSaeEVM^};9ZkO z0KB<pxi2^Uq6IDh<Q*g@*1b(J;uV*{eGr@YLXKdPIZs&zx(x3ORLbmyTN?5aF0N*X zx&)P>tIOa}8R)o6;SqH=pC$>E4P@WH5W~C1GtU?M`#)Ru*Yr*(W20Ua_)%r7=Ebr8 zH&^}@Q@xV4y;#AQtz<<nMaL|xMhl`vZ6TmtH<LnLrF5{Xldx$8d*!7BYw#q@#l?7? zzuZ$AkPo~-u-$51JrWgVkKv{#x(aeThilw;sUPp4=hDrUMJaNdA<y#VY1-H)p@1=B zj!FHocY}-*1>5b`RUZ?jJ1Ei%E2vaI<kaDE!DZM~Qe|`d9F(dNE6<CJ{_+l~55l;Z z^o6vvBO+Fb2wK6`zC4JJU%}pcIhb$g!J1$0%|H1P`}5^Nev9ElA!}9CzE;KtztX4I zlcg?KbgHx%KyC1i<=9I=XEb<FkK&^r-{Y|6jpx|oujGZ?=!&9`NuOV&`s=HXV$nim z_H*p_SGp$Nc}yPixb#;5C0Z{JcWpQ}MrkvG#)y8cIijClzv&=uPxfd1UJd1w`m-^w zCiDBAW$RuY;QrwE@T0!}Y7cjJLcV!5(*67IfZTmGlfTuEWxN(1sc1nzya%s-!KaXN z#AhAwWt4u*^4bvI{tSEjwP@a>A8UDS4*zs4OMX3?FN<Z9UQginZe{CVj}N_?gch7_ zyNsG+$CKOURGRR>R`%iRkMP&Fu=qD)e3thiTU}q(&&4LZ8OH2yY~{PiF+42soHtMM zkx(#iB}QJ6$Fv{5!GNpYR4oCXG%s`F5tpGnk<ELnD{t)0R=qW-uore%s$+Q=P`o!> zcas*8PkT{qoXyk_AsT0cawd3_T)sPVt+!H*U3ah{z*TX)+W8WsAAua&Dv+Lv&4)@! ztYyA$hxLAU5fu^vxl4zD+KgOVCb$ymVlB244r3U43){Ut*=+pVrg6mzkCY)NlU;aN zKUMena?Gy(;y$;4^gp@JHwCZ@ZyQt3EbMHi6DVq+_|N$+XeRm3zfh>s+kY_*1q%Op zFw1=>%(|?T-iC35y687xLgEDMxu;N{^>F9o3rea>@dc37`lv2N#hp)wVF&cM<4M%( z<`7(ENCYVzBv~KrgUm4@;T)~+DN7&P9s4$e)Ma5Jb&YgsK}Rc8?|*<?`>xWboe|QU z1+oKjM|~I@QPPcnVhfv9a*E%*h&}u6$k_O7Oy*+jAaPM$uxIa3oc@zQP^Vbye8%tW zHEwqN-%qfHzl0uj7(y&`f_3CSPOt`o@qclG)g_*-+4=VqtY_zU7T0S&+q-kMaVK8m z{Obg(0arlkbBgK(x2940-BsQ#5iI|`cAuT)B&K81YiHTS_p|v$<!sCQL-?I~Hes=W z{rvuWe4zy=Sqt3vf9`Va(tc2^;pc9^xs|co$1CZ;q47V@tP-N-Gph#?=Q6Y!k@Y{G zSxuGW>y30~#h?D1CG1|){ZU%A#q1sbcw!aR&g#LMyNzAg-Jf6KO9xi`jvH*)o+N(d z4YqVoEWhPB_T-*m{;Lf3`W_>{Kaw5Wlg_UkiqopE!&ZF?O?6$?z_@Z=s$98$?6|b` zPj;uS7r*?^T|M`DX>_45rPUsLWfsfW7pDurog>*%)kQi#i`~Dkd*U6;H>~`r<Xpo_ zb$>VwF8R>vJJ_a9+<-Zz99mu5!1nCRvlfGjc_C#0s*dxG%B)+Jp1B;>ZP~K_IJB~W z^iQ*n96g($yK3ktOBv;X_*x-uw;_Vw62Ma7>?VByTUb>*mw?(5jHthSBFs@!FexLX z(-eU!z3~_CkeTew52N+#=yXW@N_hkEw};rd4~Mbm*BA_6!-KXUT{jE2X3FLYMK|F9 zTkt3we<F(isemm%(WB>ThymK^PuL0(o;d#wmfoJxS&;)kNi{UNSmC=7EV%)%9h~s> z-}MM`TO<_WBPD(IKf-RDi06kCu&9#>#v`P-MDtx#-Or7V(}Shk(>t>Zn88X;#wA<= zg<5f-Gsc;RYW7dpwL^uJ^%9Ccg{pLaguQ<<#;`+WuIOZzw9{GJ$^QJ-Y8G)S($w=2 zn3=Y9DSlizt_bF)1gLM~mW6QKjkDpWV{f_4DSq-hQPOQ(WPpXMA1nI(1m4g9m*H|Z zw)RwlF%TsS;sN?1hkPC9H2Eko@p1!jhbsQ9ZtTFR#3+%H*W<PIBW=53MPTmDt485C zr*LTBtYW?`c_CgzuswjvM8iJqsLRxDth6O;=ouRnP4zG3#T12C?}KsAXC#!KpAtTZ zuoJJjpOl$~A3>C0Pr)6Ct2TD1Wf=dyjk$knG$ucYA%lSoA6C7kOemsD@at@t(suV2 zn|6kcnLk~?|NCL~_NNQFXHEIbm^wZgYtunzu^>*#!Yg^L(!~^(a(Y1E_6^X1mk`;= zVHLx7a3-_Wr<48O2E{Qq23j_KGTU`}MShJzo-kc)>k~MAbqW?ma-uyC!{#}c;WL;U zw)VC9>TsEU9aj$^Xk0yjfkh_{FFuMweuZTwe_H{WuARgzpA}}gLtbP_7MbbmfOrKF zuQcH#0@mdzNg{Edvl!9L2(&lBEaLXU5%CN9Zk=*Fhc~dG&*$pCok*33^f+nRL{|BE z1fP-2wtXHIuz3j`LExpgO^rhOpp8q|@y`<sg=?uuY%P>cmqYND4wo_SF9r+?1v`DE zLcOY>y}1}Ic3E;r&}Qk~w@L0=_xUJh$y+oM)~u1XzDbCsR60L_t@&b5W{)*8(-TNZ zsjzuU5X3<>a`1Y&QMvG|QJB`MOxqNuC5xH!ivfK48?5`8WL~?NO*qqw5A4U*oiT@R zT0{p#Uev;;C^52rz_}7RKd^|MJ2QbFxQ6xpGBV2>Z00n0hArjv35|`fyIh|@^FWvT zdEmXYORJTZBlQ%DUy1lUczWT1b_97yz9X6VWla8dEWa4#{!l*BGvhmk1$D^bJ~Vz^ z_&y>M*CCK+?<*c`d*LpvpB*3AppTLnrK#hv^(3|@`>?xTCh}4r*8A+15XV)z*`-`R z%{Mzn<HN5Q9ILOgb7w<S1b;MgKfMa%n1=lWJ_CmLcP4>I7gkZD{?ahys@@n&0V;AV z|G`j}^;P_=*z?dSI1cd>HVcWkTYB!naOXMcW*(@i+Hzm%6tG5)?&J$Nv&5}v6w468 z2dF@l4|bdAb5yS)<eai*HqKN8@e43*{GHW*6_ULa^c!b|aD1fgBHZW#W^#GFRMMHN z^Z-!V<_`oXx!YkdRQY2vASalJ$Z62;Eb{9pe$85z`Sl6wnB2}W`{h`dtByvm(?`_K zT#n=#a>&bm4CU?hrEwv&m8JLB&EmK85h3wwDHTS%I7z;gc;I0i7qufBWdX5`4Kdca zTyR08LQP#ikY&+HU(2yER7z`#>pHvtn}JDl`=HL97Z)cep+b7!oh~lU_)yk%=!s*} zk5|}<ZwBxuzGa&8aec}vFxmg%;-W9^2>q9fi|c-46VE5{<Gy8;=i{t<X)v~VEXCMR zzT@F)Q3kWmUU~L%SnaQC^Je3<AWM+)?TLNRI$0)%z)P;tcuN;cAt!BFLdUE)Frpg> zq4*d}rznr*P>Y(#sg!U~`Q%#Deb}U(CW6q*6@SfJekq0C!SzB8sn`@s1vDsd^yDY) zMed-SDPU*M_2eDCO!G}xoFy)RPF9kYPh44YrQ{d!y;Nmd2^y#o$<>pcI&U!U%#a19 z6>t_FLSIWY;ytsDX(5dgnEQofYfU`eC~Dq^lCsRuYDSmgDIb(-*Zw+->TyU=y;q{{ zyiCALGk#P(+*CGv2n`@N%@v=3hPdK;%YO$djq3cQUlzVdaoI>;LKX9}q^KnF!(eNQ zFY;!wH!tXT_aydSYgbRi;xr+V{dU1%%uAM9692|WIiSUt_Ii@QCzMly9)gJGekyuN zt6%vqtW=nM(Lo9+XS$1_k#{Zs#6cTo={pN?@QoHW=VBE9s)apxF^0EU*vA(mecM-2 z2Lv6Jd=uG^7o+)93)QZ_E^aDK)gGu!XG=QTNKH)p?XS{gDKMcmKje=xWtif?5gUwp z<-pNLYG6BCBa@fF0rB7RlI+F*KqWC**xlAJKDvzEYYpW+%24&tF1QrTNk75=(2>uc zdA&7(8tdl%<YM5sS5#Q1!XqkdRpC_?dJItbBUG5G!pSNuQQ<ljZdBnrDm<jZvkJ6w zl1gY-VF11#PQMrx9;dIr;I~hOZ>X?Fh3izl4JzEE!nag-NQGxqD5>y{3~}HPGEgaC zunNbhaE=NqRQR|Gx2o`f3Qw!>I~BI8&|{F2Z;%R&D(tPo!30r0C#Zy}DlAsvG8I0k z!c8jNp~Cl7ctnM#RrrGnud2{}uu_o-6~?J>ARv{`WvPUTDqO0<M^yN%3g1`ZaTR`} z!mBFuQ@bQgg}qgnszUmbBK>BnaM=(|vP^hTC7e~EOvInbYF`agVPT2}AGwc@tcl^W zD66wm6<E`(!1ir`Peymu)!$GErLllW(z8d$=l4k!=Hb~9qv3W0D)nidq>;0$oV;u3 zkIQt{!T=>qCXD=3!_ZvvZ--uqhbokR`Dz%(fc(4Suf)SWA^&RBaG)B_Qo}Gq<zK5# z;m0O`epaqjC4?#j&Zvg5VUd43{gedQ4#~ePHB3$#`d#r=;&Jgy{=KG#yQ|?=A0^(X zhD+6Ov>cv-D@R@m1J+LYSEz>9sNpO%yjBex)$koP4DX06QI{HS_tfmVeXWOY1~-qh z;91HUx%u2m{I?vBkz2$q!{4QdDF;@>EtF%2f)>w}ajKkmMY_gnu>L8(DPWw(-G@Z; z!Dc~5MkHK`5EVhRQaSq~;AQyFh^))xf))VpiHw&b+>=uZA)Mo$dR$Xbo2t=tsnuvS zKSyT#w;Ijmijl=iZWvdv=SPi3&vy~se|i66@dopAx30O*z3<_NHMLrIP1)pxRQcaX z;5J5b+`sV*Y*%W%5BSNyg}a2gb72F5OAUoV$&^L|Hu?_f_tbT28jTOn`3&$#uHE6T zG4h92)E;oxj0x=EuIt5d<nIe>uZ_}bf<rp!6}>rbaN=Jj*Vg-My44nGHC+rHEc5$v z+;TkQsG!<rZ%ue|mmm)=sMw1O8tBOd4G1XhU^NWjxHvpAE4VctJm)cy&#%q()Pz`x zizZgD6ppLMvrw)1N#NiAEv)fz=X?fwP-{xN=-oLzO3@GS?JT9=Ab550bd=&P^LukX z6FoY4+y`^qi+F4(gP_KbuO089$?FtIOeV*T7{+nWx7WVop^1t=w1VSWwP3_2nA~As z(NE*abDo-KWO0+oHMPBbG|{!+=`_I}#NXmxd&xr+)`Rd;r0pVC#8a+#he%dV;J9Wy zAGO!^_SA$?$BgpSM9IRSJkMetRlyYaNEJpG64?O0vztfG<hU7lzU+wqn{YY=>I17P z2+viOrx);iJb^c<6{uHNcdjdh*mZzj77#jN5VsR^<UF2^M85&=G5fDXnm~6haG;J0 z+~}Jx_aiD*8{wsi>lESkLXLZT9>?{$Sv%276GKf~<)yi=Gk)1(j{6P|M109h6BW}@ z-QG(%ZW*2lYW04{9`#X#AG%rF8>6fCTOUo(Tty7|T?}5F!GfpQlQUpU7zT7{=@9LL zWgPc1o@dp3G~qs6xFvuKFV=J61O2)1jX^De^#S=By$7c+_Qp8!;`9@JOGzZPuX$_Y zYk&0C^y{o{<%1meJ|63>+IKKTY72FmgwDvfMUK0O2a*5s(e$o`vJOe;(#3=8GSG+X zGC)`FTk2ETQL)%<95))zKDA=CfB0zfgF9j$eHAfyS`oYN)!Lc9ny@Hk<oIwtEB_k8 zC*FmM#>3yL-KtYY&TgHihelKToUbNUp5eZl!98j4(fX*gQjb>pbNcs{R?36cpXdAs z_|zu)X+mpr{WM;k75U`|$Ni3nxPCuUyVy??*4f_bCXVac%yDCH)jsW~iR#-?`Rky4 zbd=*3-6AP+Txb+zy3x1QyT!9!Tk2lumd__+9DL`eiLp}6jNur5)Q1Cosqb3#E&nO1 z<}W$!HXf^*-~S}iXU;<YcsfKHn(WJUA6U$F*UaPsuK2h5wfNL~m3kI><Z~RA+r)E; z|NjO5h!1$+Q3|fTr_=QAZ2T({Ubx1y`Bv>ve@#>>HJ%jL;WqpOIe2!dm0b#4=j6E0 z)Ud{Y0Xi^%GraF#sw?!%uYJv5lNlc9#RX3G!CV3y;9u`o>RYJ6hkU%DWS-V*jIx-L zaqTiRiRZ!)BX=l_iqJ$)F3d7e6E-kK6Q)Vl_^M4S^+uQZd2oIhd43a>Mr*qJaosIl zxbDRP5Q{(8ePzg%;8u0`hxl+I#ds!r!;1CdLIwo2Vp^1j4$*NvEO+v`9>ur09s_T2 zJv4uCe<f4oScCnEtP)3H!1kq)qWqF&TT8=Ls0Ke#OF`Sr!?FtffBS|1w_o_bxnJlf z57Phr{X*@^K+VyBV$G=KEAO9Y$(vtRHgCcFX|+SUYQF9#a{2Qt3n$LMfBCBUqZTcl zKdE$n(KIewGs&_N|L08OU>g$0$mRFnw`ke?J&{2g<{nsCs+hLQY=Nm<x+gG9v)o&g zz31Urjlae!cq$BL6=tb0Lxrg-9H_!%6(*?AsKO8xda1D8L&^8D3R_k9jS9~y(8_(T z5?WMvT!l?4Jfy;U6@H|`Iu(AP!ksF7O@*6P__zwIRM?UKIysCTrA3Wcs=`7QPFJCz z!c-L|t1wB0MiqLguwARP93O_2m$VMi<JJExQe2V>TM5#R_N+>1QDKt`>s7c@g*#MO zqrwUm7OF5`g{dk`R$+n)jVcULVE~|&YUHI7I2E?LD>c2MLP>?KDm<&gCKc{fAuN8m zg*7Uq0x17VHC(E~LKO<`nh`QXs!A}b&`X7P+>{LdP+_YI>r}XdV6D-jiP-2iURyN= zp(mrm|96mD)WN;BAVCwMy)scw)^$K(kW$)}5CxWoDX@m}MKkjA6bR3=yi+U=-g6*9 z^NyRfJy3~|RM?TxeZdMn;kvTF|33w>3F7FHYYx+aYo#WDP*L=M23pnM4{E;a)O`OL zLd*SggfjlArc}AkU+r^!G)t@;R&f}~+)sF}^uY&c0LORXxPo5z*Lnb-?27l2z_ST( zeGtddKQ34a=)}_s{>y*?1{g=66CBnJVc=PSxuG049XP?J2v`Kb37*D7{8u6%G(9%9 z0kKF7_zRvO;8y@gMj?Zq8m<`dPdxdcw*wC94qX8}6>tq6;#mh6X;g3{;NWO%Vet=u zqyo0!p~4705~DN>t{@Ix9;C&@Bcv7&F>MCCEk|g$JAi%SluYXYU+e(^qu`eSf5Y<< zRR}PwC&%ps9sxM17p`9cpA2{#PYdunfT7q|ACCtQ;Fv^oK^&w4_`Vf?s5Kt|mL(~b zumJW(cO`%+39t%}892dh=;cER@InFpl8j9==vM&!`zr+m03N`z4m^hdhhh6&37#y# z+jyv3?*QJz274#yR&F52Ex|)#TnhLTo-2ru0LNiheFu0xU?X<TMzo*_@M}CIt#1H( zz!)}zPDXM3P~<~6pf(eg1MUSlcNiK0d?Dbj;T-oC@DBig9FFoSBME_NSr`ZCv+01V z@Q}Fb0bB794?)hP-~>nEp-LA5R^d4d{u;p7@U#No3HTnKA1EKd20Rk*Lx4|>faMN+ z^9U=)(Z8^A2Z0vAL-6vs_W%=M*f^!I2*BNVNGf%J=JASn2@afqZUR5SU-1aQuK-p~ zL}6$Q!9OQ++)~in0nbe3xDCMJgG7swzZES@p3ZTv&rtg2EkL)KO3T~<*Wjr`f(?Lg z;~`1x1e}lk{8`W!0$#>Ll_MBXprj=@ayI@!PDscMco+{!s|oPAIq=>B-vMa-3V$f# z8^Eh7?l%`Y01wgg0d06D1Fr)77!MUz54gNg!Ak*M^Ej>$JgN6_+#EcV_Hn?rVw`9I zzYN%KzS4kHK>Fehc&yw~Aj1}N+y`LF0{jF|3-Eftk&6`E4A`vV^rfZAa3e_IA$SiD zhtbY0fwb_@2ww--goi3Y@B_FL62MarXucl<6F9+R_gj^|`~iWZ%TPN+90v?ouC$Ec zbv!iX+5xjxph75s;2b<ONDBcE;-SK5v-T|>>ZTt6O)EL>8>FoSJc;KD@D{+?7C7pF zms)|$EJuOJv;gp(RZ0tX0)F=Yb#=ZmRh4lZKZi)rV7qUuOLNl~{UI~P7Bej7c!|r| zm9fQ~TeW7B1$k+_z$h=Q+?ZVFwY4sp>Ge6pc$1P)khZcyttP|<<)yj0W2pEC0Ygz_ z%g^^dhp~CLAD{2@JHPYX=RD8vJooH(Uho0CA^u@Fxr?nAJp~_S(E5TPJdEjz6L1=< zM$f>+9&8PMxVFW{6vM}&BquRJ_zD)$s6&V&m`-kk>-XB}ErkbK?HzFt_I<{^ON>}~ zzz(7guKSz{9{v(od(h5M9lVQaBbtZ3U)UKH8xGMw@$Vnv{_mopL3G1gn68YHyc0NV zwU|oSj>Le$xV6N8J-bV$;9qU*s`!)e+?RI$7c<-K7mFpn7ALWdtJ(h>DZc!KRdRA4 ztUkssI9goaN%o>k;k%gDI|2X4G*IVj^l=i57^jP=#&mat;OTB=0{<Xf{*B!ag77t8 zhhZ@CEhi8ZgrmKVGlL$3?|tVuE1qSF;NK@$d-Mgk<s{jU4#Tz|$Yiwm@y`q#{Rwn_ zvA-$evOeq2fi;+Bx)Bzh<|2uYlp?aukQ6jl!9THPbQ0dfqUd>;ch+9<M!4%Y@&G?? zP@Ex5oesl)eka@Ur{FA>UP|cnvk{Su;%-dw&G6L$)?bav0fJ>rsrdFEHe;*dO<&Kz ziu0^D9f{8;=?Gl}9~t3l&^fRiQ=b*^3Z{MHIy^aM?}+c==8JZjDlT%Xyl{!k=Hz17 zg6ZTaTzA<fT?xE|O%XE=w~pI^itZJzj;EkCu;?1oiWcu&=K%vP=1kbU5szcqKf2+L zX=Xh_P#AFy)8mwLlg|Ic(BfM&Bo}%+jAQ8;f=}P3@P@w#)?xCCU-`NdE}iAeh{=IX zm^Px1;Ez}mKkvJo$S8kwvfdj!j>QNP_3ojAOcpo!eo<9=#fYk=D@Hux`$ZM66(g!p zEiJ0HEG_2VP3uUyD*c>oP&&3{U<IihywmqLL6vpo7ghF?7KePT`f0|T^@}fK8l0%| zq~b+Y_@u?wxo2#nm4?cAIv@`CTBW=jR`Wt|q9^-Bx7Ljq5+Q>pi%b@u&r}<aJj~!i zJJ{|z9odWRQ%9MuhZo!V-5h^%u{}6KJFwWUze+rPFZ7qXL7VScXlIm7_RPKHW`$Pr zKp*75ph=pP88?oX?S;Hb&k3+@_~TDR-TWolCTN_}_@<~kv?0V}CkMi&-ZYwK6Em$Q zVLD8w={2X!uSOk>n<;ZA-BrNje+YT`UZGd)m8Cm=-P`Vky?U?F>#5!A?#tL;nDuGQ x&CC68+e4chdJ?()^xYqSD(03iEqJrKs-WhLs(0e`F*o)=#Pa=a;Sw^G&wU6cQG);g delta 28499 zcmeIbd016d`#-$*hQkbp>A+z=;6V{k5gbs&K(RK8it~hug7buWFmnPuP~w&%mxfuX zIi#hjrR9(rqzNjisW}fgJhc@~N>d!_yq|mTjrKg>=X<^HdtL8!z5o2UT7K?zuY1kY zz3#R4>C6F#W9uBsI`Q@ICU-6Pe016K+jf6%Z*+5dj@>T9|HtY9+p87qzx^Wxv$xj* zYv)eceiwM#&bIwag`cU(=W3XvVBZNj<0(z<KmIbiaa``34&07OXNDpuNC~dzA~;vU zh2yTmyG@NTuQl`Ln>lj=$Juk7<5JdzZyy`NbDb!-jOR9F0t<Pr7SUe()$`nBp5r_p za9l9EF2w5UIc}}+Zg?Q8=bhS(xB@{>ILZcz{`1eoaoH_r<c_k8;<%OV5Q3OoBHRtu zQV44uhg5yx3k3e-FNNbOTIQ*-l!Bl@oErpMvIU5RVsd@q>-axnrL>$eX7aQgj!Usa zIEt;5XX(!4LNve3-rotO6D8fICXCrf6>RLyaq=xa$7PhJnd8J#F<UG(R6GYW$DiZ& zQsOp4*)@)<IapI8mSmZo#1f0ybC77gYm#)$5_qKbRFr;CxRfCs$tdmeTz}wXx^zU| z=!GPuYO%P&5+dnr@g#RVHHlBxr(H<5Ri?*4IIY8T)=?M8d$IvKQ$`XpMc%i%K(w@H zT(^W<86{)lWM5Z~OO<MhD-dRah<<sceDZIbO_U&mG7AZ(<PGR*^2_f8ssyXl`8Ayn zZiXT?&&;Pv=Ti6BqdMu*k8(3V1x4wkEHj(VSWveF@p5eGF@K46&u%Y5rN2RM+Dpge zZ{5`tQ%et^EuR%5-}+R&_`F*}g>+7K)UrsE4yH@zP(yjIf#i=<(sNKslpN*GeFi3K zAf4B*sFZ*E3#BjZSu0AXQ}<BE%LS9w{3&A0-zb7S4I?jRemzwxX8zrU_o?JBvwauU zq?$F>=p0ol1v#tPh5lQ1g~+Z~X;zG_XVR4!f%@ILAOw$1?O<nd=^1kk%$a}mue0kZ z-BeoeelQJ%EOSb_bSPaKmY6Qh1UXksmu{wxOdXLrJT+^0<471VkcL5A0@~#F(m^f7 zl6mHA`8{XUwX}t6SGL<RJh9<2#}!xTi;7Uq?`Sa9&Ce*+EqY2a{mtXW(hT!p`I^#e z<dxu+&da^Ln5R>-fcKTi`GE;ER?OLG$o7aRpE9saCtrbcW@DXN3w<uL_no3WiXkhV zFRsXUvl%8_W<NPac0ZcVbHzt2P6?-MhDVn;ZV%-zS`S4Xq;c#?#WfBPODCHP#f5ii za$#1C2*c=0I3$*I%M+z138xZ{*bH|ru~(g&2wz=dlbnMc9iN~G9iOqcokI!|kSa>` zya9Pv%kIx?wuDnurM$G#F5~6-y)c0u*=)9|G;`i~<ZUwyRU!<)NT`-q(I750TvH>T zd}_1p{S(4ALt9m-nuPLIp^K`}77}`>2zf%NwOBgfBuQneQa`IwZ$e5eb>9T$KB{t6 zx2+t|X1I5e4Rh&K(EFlN>M}@YNL7d)iU?^^7PWtW^I%armR1ttBJzllMxyAHY@4A( zBL`cLqKrX}KI+Cyo1uxCX0BR-AD`H4hz0(^3s^Z`EF6=m)!B|hH=`)sLEQ&J9An33 zh(zyU!l!=A@?1j-ewN30)3C`8fJ`9Cl=g748Dwe~={zP<evMds$>N%BcSV%`zI}qa zwbZ~vFm(?tkTehVPK~p{3;g1W4Cyef9KI--Sh~Qd->n~uvEh{}{VX?NsZtg?By5Kn zRDi@H5eZS`e>FhfS|w=mZX|yM@(`6p>Dt(|4$Uk9`rWR?|3c-vh}MGw0-EIiss*f6 z0xV8r(@L69peUV?voKiH+4{$0lvpXCC^gXfP%s^tj<t4<$ZtN@niQc*{RJshfXdXD zf*X^v9u}y4F9ppgIZ`3P<`6^?sG^ZnKrgi%0Tj7T4~~9!wfr2L78YqYDwuw^-Pq!a zi2Q$vlEs`UmJCNzVkr=%$!1ZMsxWs<v;d4e$UNT*4929v27^hHAEZs1B6A)%Hp7N< z>@D}O0vkN&)dT1Xs$sr!Mrn(fKd}ZxWR#8<>b#}1RDhmp0X)*JKM1M%-Q748A^%oJ zY1`N&p8J*-zJw#`(%CfW2b<xIb6C1MBzq%7I+^bZA!m@H^^PExE;VOkKOftz#60k@ zZhJJ(MP<roA2+sgoZ8CT5_|*GQY^Kq`@wV8%Ok9Sni;EmsllFF8r`q~GHHvOiCB6v zW1eM52c@%nZ~w({Y0`<3$>u1rB+YEH8LpmH#+kA`J$;0F<|utQ4w2E!x$`o`(s|}g z42N8EriF%`bXM+y?G{arfr5_12+EhoS_@ujqIUEzNQ6deQ>0Mmp*E4X8UcMoBhir> z=$En9SHjg8x3m}!v3xYfsP$;<)t517s-iDXWR&EZQ)GwdjZJfzik(5*lT7vAhGeM! zQ<N5Ygci9=86adiLJ}&{tw(Iw;nStRN;8GAe{9DT6d(VTQJP9z#s3%U>Di(n0=<FA zRg^j<-jJ$aaR_P3kw_*3O_-PgO(HI%)SU`|_QigP(vf<B#+__7$!lpV!CXnfq{Sjq zgUu#N_Q7^Wo)>>($Lhmqdx30{`@2S-%l8s1D?L5OKCZOc@`VQn#mXa|w^RL?yH{KH z-@>pSa()n&=Z3M~UXj`M!Ia5)>ds6Wr0()(ii{C5b>E?TTINT{<u9V4M(PrifdU4b zpe~jrRMdq*)#)Z35>*}GC&Wd4v5?A#b?l5+YmXSj|AulcG0M9`nUA+wcoNDwc*n+L zZnp17%2b9_DN62ZV|kADEBUmt#`w!yLY3m#44;65wYpy@i}48!JB<XjwJ@kdnmNT{ zkjIlGhLx*4B9x8yX(kMB!QS-=4et}qa~40Tuo~GH>wKW*<~zwLF%T}X!{il$*-0N? z=exn+JIm=2?4Hj=_bDMSn(1jU%k`b^{h$TLz-~Q_A(MXhdHH4w_N(v6&?T5>`LFjx zwsXYdBl&^y6f~#Sz)yo%u7MSN(Ht`rVJ5jQ0Shfk$;k^_gR&7-g>+Hxr4YhqL{-0V zk~sC$7qK05G0=`J9@Lai5F7cp*KLvK1W_i`5Aw7Zq$nM(^OjFTN*?-xnNY!I`5E*- z=y~p7iqg)KE-{m>@(V98Wb&NMwWoH_-F}72Sdnio&2&OuB_R#H5qPtm5?G@JI_?QX zj&-vVj$jZBLW*LEFE3iZj}xtxPNMCY#Tm`H2Eui}Ks$s?Z^(2j746cc>WorDw`3m5 zXN6eVptD{NOJ^PZll))lh;v5rhA1p@7M=VP6-IJz`U+d>Z}iyefgF-9%;+qZ>f*fF zm;TX0eG<Fs9}wV$9j`{MjVN8TKJlIJW3BX*4<s>9V}SA6adpcrHH0WB9G%e?DSYh3 zQjM|EBatDB8z<T#S3-{By6gANFzS)O?Jg8p+MNRD9%tpou0m%ocG(zY{1F4Xarh1a zeR~=TEU)j(^Z~5`y*p#}=G3C~UXkaJU^H3@<9w=-4Gu^aS{Ye+KsUj@GrJnlLHOLC z1q8-~xFsun>5VQIrL)w#Z+9a7t<Fsz<IhF~n)zq!%|OYiDus%BD3Ses(3iCcYUMs2 zlSmt0e;U|?pss?sGusxlNZ8e##RpIK4d{+7M>?E=^=mvf4NRCik?jr+5&EaFv%#-9 z?nE~7I$vfE$@gwgbW<qcT7o-$*p`qr?sI%za2$QupwL!b+KFhX2Ad_?+VHm}Qp|zQ zbyTc;qJt)TqAkx^Z-j}HP7O}C^R0&3HN_7Dzzak!2vNeqsQaufs}60Md>l%Rmenwj z*5kI~NBPl;GJp)E^F5^+8aJvAc0x`-RWJ>tb7&oXI!g#k6><aEtgtY_TE)u4;@tnj zcCMwj#EYE?JKQ$<C1*EwRcp2F_E736msjDL>qOl&-`<+zge3+GF_ctZKGBbzFhvLj z3G9KXr%>d{x`g);ocpmA;qk)Hec6%lXkmUl`!jr?`y1GVwCo)8Y;Z&Z^N$RP4SvBz zZgmr_wf7V5m82R|tqnYulh3eNOMls+7RTh4Em(GBg2z`~X$!4V`lh50`#3TqIR~4S zmU2CgTa-`>=yxvGXS_vceJ12zkv*aHmf@5|gXM($8&q2?*`UfEvgoGG3O>X65{(^_ zfK?0%rM}3DF$f(oKSAse;sK^5U!F7otP))ux<zi*pT^%C&ho&1MC1=b+{dKFAln{d z@gvKtq(g=vP+7-av3^++WOwo>Rm}<zCD*amPH!aV>(v2lCy(sQPB-ln(WVtPgoI1j z%ZeZAS5zQnAK8d1HBO!lZtT@&ttR%y!9Alio8N~m5^F+@-0?T;xg`#?1o_I}Vqw!( zw=Y3T#=5|s(hup<8B+BtuyR7m54wuQE~BK`UPmM$FT*M-9jr6Z!N$}r;RwbNon7Ry z>{_#MHaRLTJ`1xFy&yHn+o&DT9}cOAQ!QUon?h$jVXN~Ir5;gKR~OlIj~$Q7GK~Hm z?M~8daq3XFv|oQ`uQYGs_sa>J&0?Q$sV-5>(HTyXKIMMu1e@Kwb%75`jWR}9lPCM| zoPd?Bc@wPn*cmwaJ5%GzufrZAkH<=_il%C!e@8$7gO`(+zYx8MsPe{PXj^$CQbNzU z>_tRQ4uOcxVEYa=D$y0TLeEM=xdQUs@`mTFA(}90pz`n7GiA#_jRoC;ldpoMsQz*m zN`u|S5iN2A%nV642dYFyiEb`Hlnhw_$-OWBm4CWTO+o4(1{g)iri5qWpE`f3!8$L> zoPsSD`~hIq$dmAI8Le|!P@=7e;zk}+6`9wDu@lk3O|PJ!Hp9s8s8e*+Xav$k1}hs# zf1G*gL@e*!!@Ocz6)c9RqIPD`;VMId9IY1F=;(2KNUav7BJ*O@3-#|$DV1toYXz(} zy6Y5O(sfkw&~5P|Uy->?mEVfMD%~P5o$a}bD&0I#m{SyC2(M;49Opyb0L_zUGfX^y zbzXNDf@%}q8A__U&Cnfu3<jx!`lc;B*5tzw;__Wox$hxoGvt2Dar-DQjB1;~;Q$Mc z-P`Vas6U(~6tVY$k}Z%#r+6sGSW)`XW>^Qd)Wbe?50x!l`Z?Y1guH;cwrK8PMtX9= z&W<hIcwu7bPO;?&yywRKbR@J>s&QH#mdzftYCUkW!qh3uWAf8qZ8i)*I<&dy_ucod z>xGSbh$2FRhKByRmm;1J@v4h~NH0Z%CLf9H`jyRzd&P6)NGh+s_yU$an=aLj9f|87 zwkeX<1RJakrljQ6i9Dw-y-U$@9-}#@MzKz<qq1h3m54<<l=KX3p%93Y?;lv;W?-;o zNFhzYNtf&&JW4TIaaNN~BIq1~>U7Wo>*m?2Zoh#n_YOvU`K?eYR5k2b$ly*hR?*ro zWeLUysu>FPqN*7s?)F3$ZYfFchCkhEe#X_AA^)iw+CWNz&G2A1O@)x<&6r<&e8@b^ zJ(2Wu<xFUKIh;$}+j_Dw@lDz+ehLXPpA^@^%9tuONPkEdtWRuM0wS@_SaZ#J7#bO7 z3pSuV%c-y)(QJQwP{12XMjM^bt;#V4iz23gd~pY>k53d<?O@RfZ9880ng*EEU^7&K zqI5)?VHZf$%(K{*?`C~w!wfzqrzi_|DV0G?sFAPNviB3(`mg&6BhFtPabyO9n)3=G zv*~4i#yC;Bjkb9Y8dIutA)QvbKQJr5W<G7k+kM`^u?1~j8}|YBDO#ePp-+7WGf<tq zXsz(1qz)=alezy0U*8z+tAuO1=LLDtKV%AC$Ut4{i~wkf<DPIVwrx{3ux*4ebQ_z~ zwnsWnJ!p%4WL-`;0$WlX?HV@2j$J5CiLTNO-HA!k+-7(WoK6li15uGJ;3^i~)V*i~ z<w%jYi+Oa37YhGoUAi<2NP+;AUF=^r!&^JmdbO}7U-D*)x{Pd6hcVkN;gVQ7-!rwe zhXan~2h&Q1yJeINw;%gpi&&bx>M^tLIx1xQpVY$C-+9#5((h8by=D>e-N_bp&E%i5 z(_K3Wt%ozOv=)NPFn|y1mu3=v9?J65)(K-Sv4C!Eg#}MpX1CTIdZ0lpU1$>ys<*jf zlPlFZQ4Uz{;>4tCjG~^G>2RuxfS_3N76vr}b-qv7zHYCES`cQ9RK}3?+28q*Rl4^u zo@k-485Zwgp|6I9PJY1{h^bb0#`^4ezORUd)KsO*bYeqa?I0}tgROWqS_o-im9M6? z+O%Drn7>d?boP->*bH+t<_`+9S1=7`EQ_4{K5O&ZUg0|jcK@{yj|vlYX#XkrCuIL4 z7APJR)|_M4#Kl7XU^XG$T%i9Q7G;|=j;qq!RFm|i8mi%WQUz8Mquyrs=*?;ng1#6N zi+iMVeZ+CRzW5$I2cTAPP*VOh()QBTzC?Eu<_xK;Q;F`XLb#RaE)a1J8fgD-Y_<}V z-uhHG-yr53)$dO0;p90swWLTKpU!-{oA~yuZTIlzSe<NDbyv}*lOIz4PE<JeY|v`q z=A5I#h4g;N3cCCGI@S~avW1RJaxVnQ0gu?H-P;;A2BJ0cefQEbC_6)0tJ1v_$R2bb zA{0Gjy?T5o{PKX^?J*Wge!q-papup|r&1KP;LR-RQ3=~D2K5n(pS1&y^KzemNDNRV zJW0C4Klm+vW<O*^6gaCa5y9UTqJ_km6Y`&Dlnij*Q@p>ZUK}hr`Jze>rl3;Yo<9H> zXMxte+2Byt7Y2bPNhBbDPQs;g2On^tgiCevNxZ{r^%ieY(kp6{)e+^@$;D^bl%7Um z(i!$v&rqSq8Mdiss<7)JyVEmH`0{ra+^e;){ZH1rSG=iilX_Ug9GBc@VXV_)>25Q` zYzFC``zQOjSA=k(lKs%Dm9V*vJ?hm(IP{Qp?A_5b^AmKx`x0~wEr{`(SW)lr_9NlN z=3p~;AYJqd)&)P@kJO_JYzA-QSz$>nv8!{GO{ZYYxkSVHk5Aa;-u(+Iexg?DAyP$j z@|UV_u<F~O`o^oi_f%h=>RYV(W~;tAiceCSUP)+z%2=y5nPUaYpv>ex;D4u#;S+KQ zjzesFW}w&U+f;0AwmZnipV-CBSl35rXG<U&y!3bG)u)ND|0HYIC&p>+M$$h%`h!jA zV+xx~eL*#k1{c*1g(V)N(v60oq2)%lxlcFYt&{A2pVtJB%Pg^P2mTV9(YHg$sLyD5 zNZNqdh(!kHSHMwplF$vTx^JW~avi(hw|%pYb<{k^<Tr;?@G*IohAvP`;m0a(8g~22 zP}Z|wVZi}XcccpzBNUmPpydUxD1~y8jzNVXM%xjHRcdHS`Ifrd)?;)nKuNK=42A!6 zGW<^|gVK=G2*ZMV22ck(h#eJEu$_dy;v^<rn{iLHCJ&<QEMbV)i_-S;RddtRDd0#d ztaLU*J1>^qKeZKIyzu%WTCA#cQ^|vFNn0G51G4y%zW5CFlH|S%vkMldkJ-uoeok*I zp2Z)t-}-ls6+b3{^Ku&5(Mud{u-LhGQUqG6A@G|Of9S_-@PO9CzF9}bcmEI|??z{! z(tA|ar^MO-R#~^=TNeFZQnzGJ939}zZ(5}rjy0Z&rv^M$t^rTT2Nkm=M(xBk?ACw+ zp=3K7Gq9U5@dn#EuxZF0G%HS?`l5WqEbV0{UjPG*>TXRAy}|AeG!~FyKXt#a9j(o} zYHR|c^kb@YMy|y6n{b4bH(x}Bp}HjrDmR8>OWHO^`#{^Y83wKf7Z%-Lk!Unhplw7_ zuHOog0J_HT^7KWPOq1M6hP{&R=XSP4W(He@Xl>{;>owx60|#li?qDAb3K`r6{r%q( zxCQmI24WT1b(BNkkMcsSfha&KjHViAn##c>F=Rk4b&scsw7ehXUPGDZVE=Zdl*oS; zZT3HzYH7z1OS@tk^${CBI7QI+V`~Nncy~h0MQN!yMF2vn+EvAl4vq~vjEvFZ7XQ6y zQgpFeju30|^J~m&NJK#RbFR_yCQ27DIy}L^Ed7XO4CyM&|A@Uiq?zX~XB1Y)p^K#- zOWfafX4OMl1?3ad`u$*7q#mn*SU6_<Ai1YGGw-2ELNjO9d#Io9eQfDs>0*kE0crPh ziQ$S9dt+#tcR2-EtDKTf&s$vQAcef;#7+#2@pSo6Y36QX&V7u_Tqou-tfdh90qZ!d ziJ*64BZnmhM6RN$%IGMYr6<idTz=q;GVlYoV^}Mpcon-oEWs(ok><#aRV+B`l@>aT z)r5P{e^RYASS60jS=F>!D0fv+IH{ZI?D8R!m69(uu*F&VfjJmnvm$8uHGrK~x<*F( zr6q;*Xkh-shqPEan2MmpZ$(G_=nlFokChn5ZH9N=rx>fX7(YB_9}UlLar%G8$b1n) z__r8|k6HAHp4kKZd2ap~G3Noc`{t;PXzf(bSu$}psDG4-V}_^P`z$mnG=hVz3da|- z^_lOy))0Q~0AWa}XM}+(si^M1s?6Wzh=a<p;+z9JH)4qAVm}J)=Se1?YI)8`mNIgh zvoo0R-5AOCjO^xp!a!%DLJXgb99%-{*65f%+t201Va4>IE6}l4*<IX|bUfEx^1Xqj zx8X2bkUdn`aFbokHtA>3o>%HtEtcqQhT$ui^Qb{Rf^Q-NoUdZU;(L}Ln7(+60kgqo z_+SMU=SO*31^UO@1a&^>DcX+bpTSCd{|1HiXrPVE)tbcTJJ<}9R<JFjnt6UkL3T$_ z>1v!BVwbZUqauVgcbUWJNTJUS)?#$HFykuA9NoNq9aE;JKW!0{zJvPH?kdzD#a5m0 zsLnmTB<nA(&Oy#Z5bP@M79?ylj9_fX=vZOxb#`@hOQB&I^T_GwU!+5xIMo>BsaGfo zv>7KqyUa%BjOHh??{nUB9}k5$qx4lC<9JjS8<X4I6i+H}VuZf<EfO}vS7NMNAP0ef zkTHvG%T4Tf@sc8xpCF$El~!V^YYvM_)L}<V<iGPn<bB{);nfT_99Q{s#iQii@e(tQ zStESAk5!IovMkEvx&NZg)@a!hL9@81SbZTemB`=YVx)@Q9MfHp%2=zhNo>~G+1?Ye zOiDeBVov>|%0>D9^5_f8#svzmUSPrFQezK(OTAJ7UH`nrL;gzjWy6>6iaAv>Mrm3_ z+iX6Dm5ggGtRBerk2AN3eTGHodKj73#Ysm7i*|0`*EyoXhb}>BPIWR_{1Z+|$6@aj z<jNf8K0ZolGH_@6@vV3knA0-gJhUTfXruf+#=F+gH1yfe5wq++eGZ~`lK}grL`-h- zp8K0nF|}qtF^=iY&6{L`qcr<cYW6Phl^Aaq65Wo}(NcHoI5y1OA^9N`NXX^4k?(+_ zmXAiuH^)U`E4ArQgZ-lE4=Wy}>HC3XKbcL;xwSi+IKjoe@MW0$8Mbgjdtqrd+czOs zSoDaQCPoMz=UCdr)vocUq2$?05@qMp?B2ux^CD=Ndh1H)uef2!kQ#9LHm=-eORu!) ztiOcykd{6%jbPD}f}Br4J4Ce2r&zB^6Z`JDM^jq4k_nPqAxJY%rYR(C+_!5nTBV+1 z!YRe7d=1+G(k^?C;+^jy6(L=+re(#Ys2=7rIe{O+I!`v?-;tC3G7y|<cTu!`->o#e zA#HJ5T!+-czxe!?jd7P`#C1&VSZxWx;V@vaOCwv2@B~pQPDiZ<m6Lr1=`D6{ve~_O z1sdA!2IL!1HK!@dklzPWLd~1rR0fj^EL6n`HMYb2%a&2tXFp$VvuWa2ruakr&neC0 zKO2gRn)!oW5IzXu7JZ!~$PAEai<^kHgBeLvWc?yHsBm>?MkTMzsoBO<Se|d`zjp1S zKTyQy!G~SjM{aq7RZne#l<!Ur_D97Le{ovt4%Vl3`Cd!xPzNe%*ff(+GK8g0Ya0KC z73;lJi?{}xflS+2JNF*PP&#J2eH7YC(m`o>E!5-Rm)UIH)y&_Y_7T!=pBFaXrAVos zht05Z5mEZ$_t0OctiGre1Zv@gXlj{x?onEtlW0lW=YpWI9R{4Yq|jr5CEd`6R4SfB zL7kU$yYLTh2d%yb@?s;d;<Tj1MY2oY#T7Uu;aBI@$eZ;pbDJK7l0{9A&pG-zs_^XZ zRDIfI*sh1z45bT6m%wuZxuAx|m|~(`u8<gA-0f5-i`IrI`S#May06d%?<44TESOkm z0_92YVaTSvjQZ0t_U-i4WIu2cF2Tfd?^t81w+|q_giFOo^ou_Nm6Sa8H<}plg3geB z&5$0aSN@Ibo#b)9v7{N%#_foR_=}bJG$HE-!T$DSBAY)Wyv@KDsmhZSIdls`$4x>% z9Ynb(9FbeS6x)B4othEl{uB>swAJRVqs(z;qQ_U@G%l#AM_JFA{vlgqklV2g>3baG z%Ko5*>XGFKG3nV%H#z?(TR1bUKzo!o&+M7npDvQ2X32>dh-jcI1C<^xDo2s!yW3tY zuKD(8F(3J&%25{B*Y{u`aW-AwQyllWs}a^AE<Y!uH1bp}){2~lbZe*QTz<@6GD68C z{)Hu@#Qr%{qkJRl+HPvddZ>*HbBs1zV_0*{*i~s~&GE!PI2AFM9}@)YefG+aCxlfM z%=5UJb+Uvt`RY^bj?b%Xwr)78m2_K6*EO+n9T-}p%&uY!EX{kRF2yUB;*BUB?Wt?9 z3PZ_#eiTY>Qk5l&uH>3z<Wyj_F+vbmt*O>p>;zAAJttKR?rB$aghp+h3tA8&e@g66 zOLLlYkGdolT4LokFSDzJR$=jDJ@Q8h6$0CqA0X5U>`49{myI=Q)}?Xm+gaU&5jD*H zr$83^dI+D)I=x;c3^>L-PK|QvoNhhrBtPuCbL#A|ykLKoT{|5d<bXYX2Fgldsrm!Y z#hgc|>mHjyH=DW7NpVU*n9cC*>ukgsqm!%R`RH}F;7qXn?pU0OSD$&^o=XMcIXA$* z6`u337IOpbe@FqR$J|!J-??nw+!*^W@XCd4nH$Yp*|nd|N!|#i_Z!r%QnwwX!0MsE z%5y$e+B&0Tzz%7iIgRcQ#?$pd>NoTd7OnGD5o<dyR@l9U%{v>xisyYD@NI8QlSfD( zhAtZN$fB5%x}-+_mQ6bsZNKbKn{DT!`3}7M8En;BuZ{YatymD*e;alHQlRra<#HvE zCNtga^~L)c98BpV4?6YznJ8-m$vN2!GiG3{3kem-&l%-igLri{bhd`g8C`yjc@)58 z0<P7>$qoBoOy+lxbn%hdGn0~Ezs7MhC*fgMhIBu@@-Ix`w)3vBw+jOKHtf@a@OH~y zM3OgwL^|qZU5C<QvIfcTs$6-(OTkCqEGsm!50|!f_feS2hP?J0c5!JFq5LCeTRO@0 z6UFYI-%OtJ4a+ah3J5xa*+T{qJczLS=0($#?PGUJ*9nEy?CrPBeu2}_LOBhn>}=d* zDp!HB-&t1ucDVP=sWfjb9m&L<f{Fj#2=?slc23t;VOIZ8cb0|t206fD9dfIZ4SlDx zF^!ZAn5;{5bE=RgV(v%a+m&p`JCR053M_FX?ri0e&CfIz3|HB`cLD?3R;ojrN6Hx` zkxk*nonn=4!W9-?mf%;o8>c~x;EKbT_1oW9WOf~96U$7(#i?v*Su69$P%zZT8Wyh% z=?CQjOtl;nK*_?4h~rJcRCcYbg}xR`k#e9K0u6uWMHcj~so)Fnlq(^!C~P2sq=S}F zteq=~rs1(>GBrd#cIo+`aMK}>$x5ynK<o51q)!t))p|1w<HPPEMRGgZbJC^3T@I%O zIKiS}al?|a0JWfzhml2B7kI95$TSjfK-s=`jlEh@Acm6zP~CBEFI0!qn52;63m8tr zMd_I+JrI>0A#w>=)?abqZySvJn1}iU8HrBT>%65sGU_srnU)2%{N(G#q;RJS_5svJ zs>0y<908P9@e2Xj`)f9SSxaF~Ia|4`kC4=a)h!DYdQW1Gtcfu6(9T$PlrQ-FJPHJ> z;v<ZF^lPjAn3LECF69TIR=0N{uv0QjLbzc(CVv)+Mkvj)<CoYKRpkF#`WnBF)H5*$ z(+L;0>L~evdL82d%@K0_8z?qO%CYa5JQ=YIlafpGcuPw$=Qs7*EeOY@T#Bm>qM0uT zkxK|GTVV`diX<rSY-mHGHF6%j=;~&$FdaUJVPs^NSNQqe_(B~@-pE__QYCej%;~)X zVZtz0^4=w3=}<PdTx$R2&uVR-?L%s%$u$_!su?B$$FFmTbqA^uZZq^BN129fgtS<a z?C>3nS~*C#a*Qop8RyXhyL+nbSV9Hfu_hg0->qyTOdresTsgoidMw0qWYoQnh-%p# zW5eE$j=Dca8D*|?a4#sMP2{06biV_iUKJq#1Nn<F?DO|yoi7!G342^I3tZLI+evu{ zkHh0mmA#eNt%SNySdUd9UC(@i1#K41V%(*LDXC%n*$i$V)1>q1372e!@wqSQyP!!r zu`%#;F59vyCUgYSNq>Agqa-8?lzRWB4B{GV<ouKD;i~t98`Ig+58e~H4`${M(~K*( zk}hO*waHUKHL4U(x3Izw?+8owvdJIyZ<5yo=2JUbdc);5TN>qEKvi)@w6SdajNSaG ztxz<I1+MPr*J>1UF6maQEb1M_9D01|`Xf8KIw<y4EPq&2=6*)UUHl^@-u}Rz%83~~ zF<WWd9%_U`H54J#Hi9Bx(#Jy@&(3RY276jUFXewKU;C8Vt%-CS{~8idG8tOK+N>F+ zm+DZ&Zq-!Y0C~ixY{Qyn&40w&hULM3tabjGz<f_JXF#>&DmUp%YNX9_NOC0$=qb5n z-Oh(=TzH?a<B=x%zNRr?V?6WvI6#<shQ)r|S(vqnP5#&<EWgd({y12eb(?MdxS9WT zythCcebED)5*y>yv}Sicju8%;ng6<-Li{jRvo6-Kp>fA+Yctg1xInAyC(M0)xZs_} z;?}nprfgsn)|);0ZG71y*EX;Z)=vp2!XT&fE1g{>`|k^Lot*xb^z!XA7QUf?89$2< zW_-vxeKuPt8_d4`EF*Zv5j0y2RQ`E*&<X3%)+)yuC@E0s(%xdBTM~o^gIK>UkwV%K zHfPJL!t=rG`z@V>S=Gz71`BC#ve2z<go%S$|E-<d429JQ2fNZ9^$R;(xuPsHHy6>w z>-~~(3JmEmy2(jQ_)*SW$PR3aWq)k-7M!fi;q#S($NOyU=ZV7P0qo-EuLbvm*+wZF zt+ZHyzo@d)$`)C~lD9<*e^s#w+k!e)^rwMgG2lCf>@+-!!^aRb%e=6(V=Mb-fi4yz zf2?jjVR!sd9$bL@I}4r*+27m7vn!rXJI8MCZ&z@59*v|Pp7OUUVU%~Ngh}3_5>fIx zm57tyR|r@6U6qj{FIEXrE>MX~d6r5HmZzvhww$XH<K<y0ktp|7i99)dUV)PPY&k_? zNrg0j+NktmqN7y0jObvMhS?1EZk1k3w7p7iCc0rRB`CqV48>ig_YhsH(iKFXQE6Ha z<$`L}bCNv!Rr);9J5;)s=nX0@6TMQU>xo{f(hWoxsx(>NWs6F~j*5pmDh<;<CugZN zUDC;!DqR3`Gbg91o+zRdRXUF7Sd~sB+N9DcMEk0=NVJnmXA=E<j#B$PqU%+9Hqp0K zx{&DeD!rKKnmJ1QOJ(G#Py<#Hy-TIX6TMlbHxvD#O79@LOr`e_U8K?#M9)^~YN98r z^hu(#ReCMa{Z;xrUjUD&dTJ@4lS<1($EkEZ(PouyAlj(Xq<qV6D(yrxr_!E8Kg6>R zi~}RlvPzqXzO2$wM4tp*03mu8ARkf#5{cfU(qyrhx2m*A^jek9Bzn0@4<>rCN@o*2 zPo>8bou|@yMCYpXY@!FNbRp<La)#<zOaUn>T}E^pm0n47luEB9I#{JQ6YZ(eJBYSd z={-a@%w|iz4(aprYFuyW<WsYiNrr1L_hTxjLgjdZQ>k*cshq*!JXSd$svI10-S4TK zC9_$>*QSDbs&AAUZbJABmD5}0M1eD2<s_>d*d^UZsGJy;lL$_Kl@p+HQo!l1avW8T z2u_O1`SW$92IIl$RiLtNs;oS)(p1h*DrYu09aYYE3P)K~z-*y1KU0~sx`1g?nJZK# zEi+*HsLVoQ(m@>GZE3HDVc}+ed-NVLFwiw>2fJ6Wggr6BA1he)p6hJ-r<<G_pV;q= z+xt0Ra0$mpQCdtZ3|YwSZ9moFrFCcc=mCuKrVW-(-0I@OH9-WsL|61#V<>$B<UXIg zCGHDUC{UpgfP&F6V+wI>k$+?BS3eoT*FqMvaZqai)B&k1>zh{!{)A0QJ7+EeS(gF} z#h;%dmgN2$;Pz4!4y->^Rvn;j9{d+o|7r41miw=ziYV4hC(@v2U^0qQ*;=ZYbTlJJ z*M2k%WJByNKDglURGw_t+KYDIi%BeSe^X<Yn%DrqcZkW+S8q1MZ?Ce!`-44lKY^Ny z9oY$Q@{D)^v%jzVC~RmymEdy$N!Mo-yT0G-4^G`_IeE2WMKMF#5z-63V8P#xOHAF2 z=0GDUsP>kk!IyT2{R+Anlq#H-Y=(Ecp|mBsRZ#x{(k}MRx9v<P&_#5s&<|Xjp&LXp zq+0ls6o1=p%<Di?VL%e=a3DhXY6}~FAW(dt(!@U8NvyQliKW4K_ynzOdzzB$9y|<6 zmvG~>M)4z@zQbCk1~Mh^DjmsW{m1OP106yXQ&giyJnlU-kc-l&t~NvSG#0Wcs34^u zJ{nZ|b0BP0xNFj_dlOmF6QVS;m@3H`(jOVpZMpsfvf1JB?o37DZO|9fu>j*M33{T< zaD|GXbfLb8QTLS^yb++3J)FWwW1B+(RGadyibU-QdKxMnM|*`LRhyxK_$2VUDzJG3 z`6=8%2|wQ$K1dB8|04XijTC=RV|aTIuyZ5gzYsvp8G#aotK#^4woy)>MpTJz52Wfm zQTgpe;8tJv2?#v2+eFb})*79Wbj^Z;C(3rK3l++}ltQF>o8e3s*0CbpJzSAIVQFJC zEa<|@DtwzcX&fKif>9ilg04#NRDhmJyI<HqMef>#9jk~CnoVZ+D_VOb!eZ7q{SQg3 zWu++Gkl5_X9v&;Cmm%R2`>ArapqE&W@53W9alus=F6l&IiF+6F`N-#%yqK4{OW51r zM~(e#DyHW|vG`QJqkMQ0=q%CtwD$F`D9I#fKD67Sh?hit+Ii)>j|_Sm(IHi-B#NFt zt%L5_YApL0BFE$->qyUs5z6|kcGkDV*$s}|c?k<X7!(tZ%#}G2{EDJ=A1T2}!LIIS z*{LzWP7T;m!bTr#8(4{O?MAMmE;(Jl9oMY*OjO?3mVI$BS}6H~T{+m+ZS;1e3LRV0 zl`UkUhgvjmfoS20vV#)a55<?+V#Tm<yV8L=51)wLgb{w(7G^orvtW`B%5qF@V<0V8 zUvw1P83J2u;c$s*Uz_?U&C#h)U$hf!d3m`q*73S53+rozGR}2&knAwTx4Yz4n=ZNO z^~(?91c_=;V)GFTnzeIE(Ib5k?aDU8^UfR>KF<uB#1%}WtZppjhX`TaXDs)Jc>7O` zFc+@+p=o3+2po*`MPJg)upO)mR^QRmhgW22l3hviUq2*=9ZlA59P~xKlv#xL#v)#C zny3;#e9BS}2MF=WEbDN4Vfv?R#o=(_Y$vwo@Cf0}cxJ2$7joiRQdN}j@n$xrDl*_y zN3_NH+jmh?*k3Yk*MUGfIh)z~syBs8o0$8N2;r3^*1LMY@B~LD_=b06t&bcR?sZ`N z(HP<UM%MCZfPc3R%8|j<4o^?hlKw8(Q-{@vOJpOCwhjCNhZOC`;37!ezwLQKJJA>I z+2==t`y5PEn{MZGoF(R&MZ8<zfbQ7?(c7~jNN)h~sh+!!GR^EOl@3a=xTY4)=K}Rb z@eq-FC|Lwk&u1@UfyaV`Yol2EW2x~R0<0#eqzfC%aQmf6auDX-f0!h*HjqhD@C|2& zkD1!dco9o}3nUU!ZIX+^(OhJcoUU@^^p}EX3&|$grY2BuS;#VL{0+tHk;{A-K1E!6 zYnH0@Y+g-ZK^GI=2a>7kQJN9g;4v_teG`WEr?q8CdukOjudQuMT|{d+29`no97itP zS-c5Q8&xgRYNUlC`N&gV1jtXJdSe$QqXdQsJ`l|sIpSbV@u>wF%5K0uo!M0Q>1(#I zuo-KZ87f52#sxsvCf!D%L6;)xzZw7Mf}w5iP6e>W2ePu``d&YE{pSTht6k`=Qp3_k zbOCVq-!1?)L-7CR0-(4nyLbGb7XXJAyvVZ80_6gr>xogOGTeVyuj}jZIq*?2rxMQ# zD$;W*M7!!&^<Nh`?{H&>PBgfgi>Mj##tbif*mtuG?8?cO!WAoS2ign!P3+a54hv@% z;c8%<{Y-3wJKs2!%L~>OxE3%8zb|C`86Tmjm2wI2<wBamomwfE08124a4Wh55Zc{h zAD(%GFJSI-gYCCrMI!q@pT@+w@xuIIGX4v3+u4^t#|pxBR{L|4z;_>GeE%2Q|C7Ni z<g7_Z=)_(*n<O+H1PlMgz-Y(+ZrzXmoaLQsF4(s2{NS7u@1C0fqKS@M*pBmN_h4Vt zoGdY3vM!%pKOYt|I0m~xnmLh}*nloHS7s_%@m&zR4yHTJia)L#R(wB}dZB&%Qj0om za)FxZbr~iXEcvsNl5awLm^t;|Ecx<#m^OZ_;)1=f)`uOw5T5wg%*L3qx0;x!4pcWP zAVzAi{1hTIH&4A39XXSQT@2UP&n1})>XnwyJFNG`&dsfNqpVN<#y6n7UWnIqL4jH( zxu0Bt0yPzL=;Oc3E7;KuVea}DLh^?**rN?$Li<U~e`9EeZ#K~wZDE{+eW5gR6k?)X zENx*%l-DAN?yRE7!$=NB?40vr<ujdJw24)23}Mnn*O;M-m=>q!KjWnQdz{u3Co0BK zlgIO%6Y^6ip_B*S^PiCK!Sjziu71LNKWQo8Hx$x7NfBzx*_)pXX#Z^pH3dDp#KW$d zOp1_Y&ZE!1XJbmIs8bp{clrbjm<rK)gcm!w_b*}HH%$;)_hnygnl0QnvsRmvg+tTW z)XnL_7&F_uIZ|-w!)iB+!v0J^JLdLdpo5>h=NT@I5(}ew;o=yUyYRIpzxTpYkT|7r zDcD1n?O3@P7}`>>YceZb>c?&^93k9DW@(F>2;U~}oUkZ^cWZ?UBbtH!XE8G>tzFF5 zrxW8ru}%MWfDL!^iGKPbPZ0XAJ@8uzW%4Lw&YBfXw4aFuovkQ3B(OX-ta!1#a|25C zYjGhj{I!Tpc*`t|o5tRG%P9OA%z~0TI7Q>G2A>)Pv#jJ0r@wGJgGX4wY+-V^J-v)& zZ5Cg(|KlMDzmj!A_NzPhmAo$qeY&tssUaS6{^%Dq5k{jXt=O82E*=N5c+4Cr<~+eT zIZ?!RUknxSRPWryreRgsgs~m{2}Lp35%;uy@)0bS+TM0`6bru8)+2~wPY`o1Aa=7N zHu6%Wux1E*^HP+_PmTQ-=BJw-U5xw5{x74)j$+3znWG%Qp-FKyyD{Oqo*3n=liahd zNVvI}^Aq;M$yOG4Il{0XLL|KWg|Hl+&H7w!oh?9^?p|==(nD(Vi*g#W&P#CP^4pa5 zc9vhkwpgb6@h&kV!E@hQJ+<6H=tPgt2c2s2)RvauwwI0wvd=<jcz9OPLvs5CvH_l0 z4-M?b<tSm!BIb4_Sa|NsqOZh+t}j$9zK+u2+h0R5LEpcDnE1Q~`h?FSHuFl)0uwQ; zb5V+LoL3q%{RArlwmR3e((!y6eZOC4XX~nE{u{QPZpHWVb4eb*Z2{3oU9nfXseT{$ z>+tE;aYg8r!w3>f67a%dD3<CTF~RlXp7m18eeyb~rTDmxy>c~12wcb}Up1K&ix8GY z7(y#7GsPTwihrvRXQ~qWlU7}2A=`R&obclUX1F#kENl2nja$ybDt@poSCj^%&@aWv z4?43CuEl%%7h|e8K(LC7rdE5F#jah8b2|d@(vcC^um@$apxWs|tv7qSHb_YDVq0oM zeS(S*%LOvZvn)uxH;i4b%@!6fWSy@E3X>PIq1R19UvD<=`bhV`hQ1iBONKJ}da!US zi8<T|^{gLHD&b$L`sGz(QrkP@S(_VChS{@dt-zU4Jx!O2&hnLMZ2FD(&eI`IA3<st z_qsmtql43_CDP<gA0u9gZt`eUB4;gi=^Jmz%ho7bexCef2)llxZE|Pq91!|-9HmrY z!KoOFW?NbDKFT+#?}7tVp;MaBYE|gqIM(lGypXV(y>&BA7(R~ez1du7+L7J5nHF?+ zEZrzMQTu&Pk&*P=x*16M^jOyQR-cg>DpOx1{*BNa{9J;$8b4Q1?bNdbKXIcQh3IJ= zurgZm>C4R}AK7dN1SAF>!2!SRAYdP&7D1{5D10Kqd(tlhoDfTW9c0J(?7^*w=HGcJ zqtOGqmArdUV}H_eeIb#?q76T8g2*+Atn<HuU1to0QGxs$6WN%5#R$(6*|LAVpS$on zy{=N9sb;2?zJ^~y!UfhOaFvVJWKp^z#^7voN&j{w3TQKIc16v<qbD4tgK#v&FBahA zpY>qUV^>U$*78u~q`mFx4N~G*MR@?*{7YoS`Wo6(Y?u=()kGHr)m@Se5V9uf@?GV+ z0qp)S#)7{vo0WUuYWkG_F?@9c-WZN+7*YBeg?qDBEu6lnKUQThKF6x5sG=Xb(V`#* z*~G`!MB53`ir;RClot)4^3YcTaydxlls5y3;+G2`pfB=3Do7kpvyoQwT3eV>b$!ue zbX`KlZ30CL%^)2v<ofGu<F7w<8H>ENt>r;Kn{5xB<?37{-S?A`Fi-niqdiN$S&0PD zl`eag1K%xpAL;sJvCKP$g57=a!JQoyt>*-~+l1~wH~s<gBP6P&Zv`omc1%gCzwoes z6!y?l#DJ;mK87J|Uwdf)-HxYZYR1A;<b=c$vESAXL*I}>;wGm<1f1h4$CmHN&ff|1 zosYIa@h;$d9^3@sLwQ)zSh*b93MW&kpYqD``$Z^HoKhrb<YMh%H0`n3kX<6y&qv>< zSlngre(aW<8v9OrDs1EH%Y~{hyYcnqfNRRiOC|;R&#SD@?Ra6rNmhK@+~TnX$^Mtu zm+uVtAFnT$USVf%#|n2&F#9`Z=2mBnjw;8m6shw^<gt#(J_#s=I)Pe>r2~28y1823 zI*awH3l>IavdMLElg~%c_i0D=p+YHXSYS;y!@r=+VtNi7LQQr^)C`i6dpCuFuWH<= zCGnKm5QO8p(jm6U*&uC^bCiFJ@Gn_BQwFH7sP5p#DO2v)o_XFihM3SAWr!4qAHPxr z;_wSbdyyr+OP<bBfAfJ62fsg7*G?5PvBJBCkat&UzQ#x<ilmI@zSG%{cLS2!Pyb&f zg8vaj#d=w;y_hPvBgtb;?nQ*0&--sFpldn#+dTH#y%5($=pKAbdQ?tr$EMv2H_iBO zF)*~Y^`kXnr9Inm&)}XZ54-;&VRrsrrm+aBm3Bvo(`1EuN9iKpW+}gg#BG`OLilAl z<s;A-sL@RStiRMVMgEYL{uU^Vn#SJ#Eg<&Qf5iQdiehQPFEsok9KXA_uAtCF)#JFg zHC(IVHVvyayr|)C8rp}e@k|<a(y*_F<25YOaFvGJG(4bU0e4(u{GwrlhVz=L8BEkL zHbV6$YM7>BmWCD$-_&rWhFdkP*6<e%pDI|uaUM<7grhX<q+xdrXK1)c!(|$7*6?c$ z4{3N(!;2dJqG5xE4rVn!Uqa;1g=&nJ8m4KOso`i1XKA=d!{r*T({PuDH5#7L@Ro-4 z8VZqW0fK>)Ki5uUWN0{A!+9Dm)o`7L`!zhO;YAH)4I4Cc(Yh{F!<HJRXgIVf4+R^4 zi#3Mg!9TtcJO*cENGB7QLS<dtO~t$|n!gzMZn%&8;5#ZKI2?#5VNJE4tK?`E><L$o zkGSgX)C$*j<(1?L({>JhFx9;vg;V{czR+K-O-+rKcKR#S{MZ+izc|g0Jzx1d`L`<X zrTOzTzqjT$X?|EQmA{I=RC%@1X!}f!5ugchnm<tUmpxY_1ZjT!WTR36GMmxg$!DrR zRP}O;HGi1quXw74n>2r><_}l=*io_@RDs!=K#JynL-U(7{~XQFY5wP$zu~bO-=_J` zKjwA^34Bx6-3#6M*Ld@QsiQ_u9%HghGv&+}Gs-f?ls98q&X}1qP5Cn?OdV%x7CCul zvvRYZ&nmCb^8w}W>iJ)Huk_^I1h4JpRabs%!FsSyyME<Tx#Bfl`B&cj%&5zFbwXWx z3Qh7ST-hBp=ck}O&@7Gq@_9bINw8#>ulC`)ccfS}1ag~k+<dstv{+ujIxe`2vBEFU zknJ1CagB23DEyo#T-lxS2w%QsW+UG#7C(dvcdaqLuLI|s>%{rCapZjCJ@Xpn+O~jN z3)d*e>m4|Kt|O;!1B`dgD?jPO8w*I9>XP0PLl5qdmg;Yy?OMI`d+F^F6R{DqF^8K7 zzX#WtgR_f0=hDWRbLs2wLS_$YGzc$lo*-R9G%M%EdvUzUjWgwXmG?97sosqVo`v=D zQ6k4}tt<b;z=sxCvQa1<N{2t*QQ#c;50oaPW+WHT%9#tbe8q+4?&3n*?BqiE9h_$- z?ijK;E)Vg%lma<&F7Yo56p_MlZ@~H2amrsnm=ovO#*_1$;dauc!YR)&+aZ(UqZmB* zvW+Oa#c3SZrrXP?UJe515HG|v7XN|Ban9)+7q2C)X7BJKdur!>2tNeZQ<J#>n(NMS zQ#3!X6j_bq<zvtJw800+KJj{`K`DXk9#Hn-8WUIp`Yhaw1ibAz?>6q7_cGVa^8Wt3 z`9<sY?#*$-(K?Ik%9r@_5i~Hj`SYVEHx@q>det(xt#!x-e_jDDT!6)m3&?fl0@{G? zYpC}*=~Y4URFaEBv26^)cc+^C$MCm@|FP!h4NjcF0*66oz@Rh4dsUQ|8TsHBCGsE6 zaRcDI@0K4h@?rk=JZC=|?XHYeXN)%|PTv<Jwcf}_`Wsz1W3C%#?C;7MQ5s`k-;>@I zo_XcI0eqaHu@29t;n#HYI4<rk3Rr&Iz=xNA63qKiT@M8FfiH5dT)=S;;NtF<e-gk) zck=Rb;JlEkSG;?LYo1GXV|qJbmN^G^pVGr0?~92>ldV4{8|p+8uG}+_&m7Vi!~bpc zCtN0C5b|ah&fF$|Gyf4-Z>;q@>09BQ=auc5$<yHJD`cvJ#D#O2@p6m~hpDs>ZiZ5e z-vjw3Q~|#rz8zt&Al^^eMR^y2##S8fA#fh?E^$hd7>`Op@lUxWh&TB)7VO8(9QP9( zjpUy;mwy)YkLDQj8JYv`o4e&_gZR*vjd9Ly;kXaCa@+;2`T?Qnk!7y6&L<r!bb0pK zc9}vPN)!>yM>x@>F$E#l!T{bkr7?x;dpXW$pVlGB`G0L_$UKH?Z0PdI0esYp?ix^q zgB%>XYg-^6NUd-*kZ&3h5a-H;wfTe#<3Hp)>pf~;PGNNsIB8%0TQDE}BAw2cIBq1I z)4lTO5I!`KN=ZX`_+^eOf@^XQ9{kyZ-gt%Mx@vyjfZ>Zp!tkX>o_n@iX8Fkg-dJ9U z?*)SJO{+7FS#KxKyT1#j9x&dc!Y$7=yZn3zZ;GIHiL=LI73s+NTiS5`Z4x+tK8|y3 zT&Vc+=uqC2tLMUI80-CNm0_J35bw^JEU#mYoW+^?=X0hu7S6=yaRIgdCk++a5{I)Y zPjjXVmPz~yVB?<yX_fU@0!=S@Ch*Gsz;9dsA7%UhDBJ&kD%<|bDE*(xcKMQU{%HBw z2>!ftH@=@`2L4OmJ*5fH?04rS@|nDMVYaF$H;(ec?A_i`{4{62?e0abc@MrIO49?) z8k#f=*3hV-uZEr)I%&vh*ziczCuI%KYgnUUg@*ez+@oRvw@YK}&~U4Un>E~^;aUwp z)NrMS%QY<1aIuE7HJq$ru7-{2XDNQ1qWWtA85*W&n5bcthDHrNHFVRE(@_3XZGrO| z;zzxdZKJX2Pip*X4J!!g6t_oX?9gzthHEt})38v(JPikHn4)2vhDHrNHFVOD)3D*8 zTF`o60WEg2#;DcsyoM(=tk$qX!#x^q*04;&LJjjYqzov1{BVwfnHr{O81<0rq68Q< z2B)F?K#g!&!!sIIXt+|tLc(&>B0iYa3%zwk9WmJ74G;Q%LX>;@@b=|d?f78b+Ac~o zpZFAeb+VjvQZds-#WG44^~g+C5!P1a*p%k5`}=nMHoJoJf*Me*VPisF98^Az1spjg zCtw-n2%OyS{}U|W3beoFTEcl+!vAlP3PlAa{O8}+U)2uz?}*dPe-HRy_~joZ@skRS z*nBOX9CrzB7k-TIGLU{7v_JSKfqQZIP6NFk*bH}1PN1WJ8E`8>6Ly9@Itp|Oke>DI zz^_^ox<jSP1K$%E2uJd9z}|4#_=&&FAe`Y_1mj!@PaMz+XU5MpZ2(?}^96ki7#X7S zqku;=x(0Xy2X|utWPtO+)C>sYO=@L20fWQUxP)Eds1RwuO$r|~Z!?GsiihWNBTzas zh6t4o_yJrs=ncTbaA!bQ18q%l*agkuFuV!wA?VG(AK=RHOBU6@M>qhR(V`8&5zW=g zWCO#Z)k2tnXA5G`ln^=(jEm*C@t_HBz||U2GvElEgqMP!4GhA#pl%8VhN8z0fi?kG z!JWiUS9}O0E!rMG9Agihi*suf=y|}m;i&dyz>ClxGr(64(HGjFzu~zI+}j>~26{ix zrvowu9Sj`T5gCIX4}2ev68aG6nuOY;pl-luII6t`xEzjT2zO~T;Zr#3V$V)Ekaj}< zpGTr8@C=5l1)U8X1$PT{9&kFG4B7%b3P*|70LLb)H69Oq2uH0&c<U98y9;^3)hYO& zl%Q4C0^?HAefU*X!i`<w2Y++d0*-qV$KnilU?k&qz)=RZuX5ZFQEloh;7&Ly^)6s@ z9DC<MJ`p$^j^wj}hVD3UfNvy(Q@R6K0Y`BOAN0Vufy_f-eg^pX7?hh0qGwP1FFr7B zGJz#<6k#cFoks5m+BBN$g-aW_V92)tPJyG$@_=EzRoVpHo{7GPj12VXqo&yj_<ldM zHuxU`3$EcGs(}m)>aPj~1AiNcT0*EEICT(YK<5D;X!Jwi8-rE;JfJ;J-!$(Dx4~6| z-T`brRBgpr;A}Xm7-8-(==pxwdGkO#gp0$#A)K3~_T_TmoZ;vi@aF+5;HZ{_pNv3a z;L{t~`jHrP_)%R#k8E`SdID#{QDzq4yKq#=<v^Rp=SHFXMxp;#BB3&PuEK2wT??F& zgW7;D1WwFFZ9q>3zBxwCcrozjvACiE|2!~yyjDwK?`ar>;Aa9S<#F6|iVIu@rxXGh zG+mVsE&y>4E*7J=9@uOKKA;301)K^;HOK?%W~$BW1l%<rOEP2#yDv}^$^gcoGULII z1@<q*6a-Bu!%@ce3$c2_Z3e#v$Xn6>)O7YBO5kV;Ed`22YEu)s7DF`x-woImjtZ0p zEQX^*7X#O7G-2LbYC42#7prRn;j|^{LYD^|D`6kOvOQkH{LfmduHl5s;i!)ZYc!g$ zpcLZ+5j@}KxIS>5K=%he$77}}(1gd|!+-^S5-60b;~)<B^nDbBWPr<7p({Zz2mbK^ z$DI$r`u`Bbj1M{Ge|b~}JhTQ=5w#@zay_OQ=v_e526YW5q~CQTek_n4LsP4)1=8=} z#evQSI&Fd_4zvmQD_ja_ZZpSy1t)^u1H4p#e=^{?415AdO&a?t*8DBdOTdT4p4$mW z8SDZc*owskbOrDn9Lb*tdVH?7oF~w@9jyl$!jIsT!U3o4Q0YA24mgTiK!o)Rbr2N+ zUA|OlH{j&G7(7Ubu*W_urC2^QfK9$Z%|M%h`Ec2wX9Hh5tPV^Ocm<BSvKDx-3Wt8k z90K}RtA#KEU#~|0Qz>SX=P3FKLS;Z*4ae;V?Fk$JN9*=r;2Moy3p@u$H9HS%{-aum zSm0qeT8OHF5f`xBBCZ+u6C4fjGr$EG(cKvMg%>eZ9>PO~AbkH4`W^xFTRk;b&}X0t z>Hj%Z2%2yM9PM(1-D^REp8<5dh3*CI1hm4DrnC|m_X`#;@CpBhy9}E9mE&5$tt^00 z9Ed#ffF}G@qxS&4WGp6#;0s)T8#^254ZwgqkVhecfrH>U#KmO}H;4G3$&OD}`ZUN8 zl5O0AkxxiAZ=wkc$hb{Hgk)DHnve{^L=%#|mT1E98cjCa#TrdWmRFJ?B&#aXg!eU? zEUVskRX!nEIUzGyA=0$~GCY#ukRlU)14kt#Bs(7Q3CYq%G@;Wyl_n$`81V@^Yc$!x zMyT}eQHgwWez%jzPv;AkVb2rLxb|o~{M9eVqKl4c^c&y9_Tu!?U$GkNmF}fKY6HR} zU;0lTf#36`fA&SlBkjiYYOlhNd>Z|Ncy0I7blx$uNj+vP0`{z{SXaI7<T`m>{kpjI ziCA#3yMw=Z?Vh#erd52Dz{QpKTE(CCNWpkRK;F99>k8K`E^qw-?-Nyvcn#~g^-k+O z*BjTH)<><6Tc5dp@cQia<Jae{pS`|t{o?gy>sPMdJ^cgz9pT+L$M;tA0q%QN<n}86 h`Q}%Lkldm2rmOkB<qKEyW&gnsUBkyYpat>we*pmr!(0FW diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip3.7.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip3.7.exe index 1bcff1a2972519891cc3398d6b4bcced58b6ea40..996a84675b3e96882215d1742360674ae425bf3f 100644 GIT binary patch delta 32096 zcmeFad0Z4n*Dv1FFzmx1GRW?*DGDlz3kWEqgEk6^`-X}EagCx3ibgXe1BsK)JRz0| zY7#a3o|u?N(JX=+;7UwlOpHqQ#xaS}L~)(p_jETT&->>6-TTM=eC|JYlIEN`b!t0R zb?Vfq>h{<R9-n{WQInyOp3EF+eQ{{vs-I{1$9Fsy!_)CUA$;e^%kf8K_)`3nGOUUJ z74X;C=i<Ku{CSo>ey>d5q4IB1A(P?6MMVoKPfq>iXLDTfBOcuPo_7m6XjizHuADFL z!*Q2EdPgBQ4Q64Qp56gG$GLM{c{LlO8Isgj!(~wP4h{EGK46W8i||$WmA^c<RKsz} zZlJT@`9AI%u8w~)IxMiSWqySPEWcjDp8!0UnGYZ3FAbq&NRbL~g$?GrduJgOgir#T z!-x6HxuX=$52OtC2pCW{HxVI$y#t0ZZa_JulHAr^XZYTlCzHK4x_9g91r|~JYKn#v zQVt3Bs<>n?*4sTQ$;fe1Y9PnuI?Qp&f<q`29EMgu4JWyD9Jd#lb1p;8Zyb00;PK-d z-6Q#w6xQ86q31(|NO!2qf2Mnj(v1x5Oc(JRTjt)a_pT8rRJ<jKzo(qb6`OM%Bkt%w zX~_|rrRuK8Kx`7K>n)L@Rvf|Jb?>k7V&A$Otv!(!s!}?UKwfHg$`85r`KeMNT5b}L zSJxxX4=nmk4N}WJmrD@AgC_7Pr=<Y2iNezI2nES4x?*yCatCbHE|Xrklbup8ekrwe zr@|@Ox;V`z=o@7GWsZ2lEFO`x))0;ZE{HACPjdEWsgm7Ml~yVyh$5!Ej_iwEIf-`# zx7%%xLR253H{Hc!(ldIcV3dlEyj_jzUHPlv6>7m|@nDYlr68V^YP%8tNy@YWrJ;dV zv~Qd*+K!frXZ24tNSE)Tl_*{iKQq<QkdV&2CYMhM6Yrr4QXYgOH$=?7A`2v?-YxsG zkl2pa-i=aPn=2kcp=g>Ei$YNn3eqVBQ7Mn06q9~W_B^g6T^k5`xi(My(OE*-IcjAQ zo<>VPhX|!N|67)y26trnq0isz-XFP+a}*@H^3u<*4Abw?*7;~yjYqeHRH-7G#!x|A zR*rZmN1T<MBbEVuK*$llH_bK8G0ip=%szOWo%D$C-iRqFIQop9$gX)rrw+vItghEr zRY4T{X`DLO=Q^}eRIX!k+(N;T8#i6LjVcNv1w{nKv(kk?mh0KG-xp3wTppH!(P)k< zgbY6O#@M}&`T_JKPOY4!oJy+kVJ~?0=N(S=g=cU7W+WBQg2~Tic-zTzUh)1OGc;Uv zv&Adrw97ES$+Emcd%YD`c?13R(B(>Rs)4o^x<a0K)ur-GyqR)3rP*ajaI$q?vHZ15 z?0K&U>vLYH!w<Jyu7~ojaLFW>>{ip>8nkU6azQr7T!uwZ8K~|?u!>F69x$hzrnZ)v z9U~S>FV4Z_yY6zi8qIN~3z5cU*eoZQQZImg6AfjDp<GG47K!)%31*j}jF_f4rZf@P za)ql@;hI8Rf5=?pl&o5aX^g_OL}5w>li<)j58^)Ri^lA?NRPP;iI>=u-Wk^FHd)@; zF2gxsf~`0%8Lh}GHSHyJY8LBLPMhtyaR~zc826G&bQ#uwGZ#Fl@O4iOrfOqKvlMaD z<r2-*pOfRJLm+mo&om8~&6F+rxX@*IQbniRj-qAZ6zWO6Ps1V~H6nt;a0gkU879gv zm*@v2j$@kDU5%#6rm2GMvQZE(f$vxF3F1wcp%3~UTFCSXEA@%Ao{{!KcVQHSg6DnW zag6nH8Lp6oMA8-I$A#*17T+AV3xar~?G$yN!>|R6raDsIf_O?Q^6r=|&x2lFj~0?T z+t5z3Tpgm{<6nYN)x{*9k#bR(tc;Pdo2nlEjYLQy%^-f;5nko@BmPLGa*`lkC^4t? zu!QRO_!3>K(0v5kK^_sY<v*(tm$3vu!FiRK?XeU|TFH1@Rz;}y!_+{zf>30nCagbV zbeGtM$CURfv8Bf-YQzC9G=S=KgrYmLvNiHlKjgCu9Hu%bN4fp;F%wXxvBYMLim`<L zmC*;3(Nl6p>yZ(vNP>u`24pMsw1!gdPCZEaJxx-wLiMAX>-V^oKuSLf_Sq-}ibW8Y z#tDLWL=Y`;`GW0`5n82S?m<DsGC18!u$Kl1_KJ`x==Le5sitXy<F!-_3QXM{o>Wim z_}(2;x5k6UrS&I{v+j>UBTCXzEn3I;drnXNo>PLY-d(@vQ<@uFF{V?ln;j)yG!VRV z9eoyJd2z<(Iu`QIt|swws`Gj*o@l*)j_oi{8keJ5e-9yfjUge?dPExHK9p9HmU%4) z!?neNyaq`?M>&0geG5#mU4mpCrC9h&O!k3Ok7zffX}EkT7J|dV;4tj-ko!(uMUtT$ z1c!}s7sPukLD$_1E-X+E!xLbnzP$#OkhNim0L7S}cso}-hzjp*yGzsb6x2$BoYroR zGr9~P{RmxnQeKQ7qGF2LDTDWsL5bb6E?>aPnqQIzv0E^j(OI8M%P_UkYHtC9q9kod zxm04c4Ud&8T1w?({9BNmdRCUwaD^W|+lZd^lzSHAE?Q2pOieN3?<sWukb<T@#=$AU zZi&m1E@E8E=}V~2RvOo>RPQ^`a=GHVxGeExo-C2ols_WAN5qHnsCDShq51N{{kc@5 zrM8r>!?KepASZ{T5Mtd&@{uxNKd1zk;S~DNF{x{bDQ$9GRC(_lsB5ER=C9H$7;0h> zb<!{-!dS_}m=t9hu0W9`_8~&iLBa79WsA%zF|6_&?q-+aQDm6wc$o+(^*Od?SB`iW z_LL)^FS+_|j_49@e46Vpf!ZvG@31HR`&jp(G!*rrRxS$VWa*zef@yx?8k%ii8kd?2 z=_Kbmbf1C)-3~(oBFnX}i%ZD07snZ;G#?r?r=%Pxoa4}t3DN_1fc?|2q(tAn3Nr)= z(siHqfEm#xNN8vXD7n$#a+UMf4hjv;0c|HjS)P87Zfz_U3=XCPCwa!QC-m`!HL#FK zjbfDL(-hK4gJm8Fm2>Wg;_6!-BgJ+mL#q^>3b{&efm~CQQt#{z%QBk;Dw|ldWm~Wv zONKPlg9Qcj_uqpl{2`TU50OU3ut@=Ne02<46Od#j&5#eRAb924vqEz1f^pK+Y08pt zGo@arZq`3lk6Ia|zQ{tY`G?UQXKQjf#}dP7nZa^VaA539t>~s25}X^VCZ5d|Pr`bl z1q@p0lyr-TFxS3B&304Wh0&m9V{AcXL0iq8GYh6SR+DDSkV#aDT(LnAbp=B;9N8Oh zeJc;bZqkdW0A%Je%mf8WXlyk5G9aq^slJLRQQz=1b6l3iAQciPCZVr1HJSx=>B-N? zV3}Q_hTVmYVhI*2n;?^F?S81eaxW=kD46YT(1Y*v!oW!z>17EQ#E}8w2ANH_nb^Fg z?EY*^m&LmI#!i7;?al%MR|Nis9TMg~%|N4m&sphW24jJ9qei77+3GyhVl7%$9wyC4 zm#d<@8pQ&-GVA?AXrMNi8>OE|VD*C~$4Tq6fVl`eh<qUi0Ze2WKoZYgAj%aQFG*?I zpEJ=If9mc(ks9UboNr1ikx8Sxd6E`%qF5T7fzl~(NfSGXDfKKS$e>>lso@T0$zm6^ zKg?#Mf}*Vrvtgy(!NyUrYoE%KCsTQxBj1axk;s=Q@}d%1p+<Vvg`vjIRVmFFsBXwn zum@=b+ks@k*5D<$jzJrW+Oc4E{zL=AVYnTk;j$fqTaMV2>oEL|^&m&A7aH2Nwr{)V zu%Cj`Gt+Z8t}?T5u!iF-TIs|nSr(^rk+tnmLfbqjxZ_H#1@SA`Wz&(Nkp9)mVS+>Z zO?NiO(0kzJG3b|lOG5NC#%4jwj74>T39k4rzQJ~{Bl*|1n?dWl+8P3+ePh^1hR~1~ zDHh|=VQ3DgT#v3Cgk1BwvtJBJy=S3bXjL+DDF7d()#>-G4ACQlwqX?M7(<c6zhNVT zM-I9ag&a~23-)Z7R@d`H*eh45#_086>2Ua8^K%05w&iH8l#<6j4DJ{9LM}95Q7ig- zxU`ZOdqc#S$R**-y;~;VGn|d=md)?TWsh}B<6jPAhr1<4K08((2!R+cf_R38#w*z4 zif5gE((Ev%3yIS#U;{!#Ui4!7LvnP5*vP6Q@|O^12p!4S=dzih5A#1vWfwwMq~uJ6 zK`0){g;}vMM?8Zm7e0jwP_`ttc9&%ddM#0ETLt5pW_ndFdpK;g=Xg{@+S-k^gp~(= zhF*}Ou<K9@dNY_!3*W9=3ALnBTn6SA(Qib_WJnWP_O`X(v&0KUu)&WC4L7w|dmb36 z;cVY^7nYo!p5qqOgzgTF!&vG9BmjsYVlB{49my;aeKU_3Iy0UG)OONUeZ9OlRc<GW z6wl^~$5q_~<NB0z45fDHrlM}_VuUF+peNSEJ<a+(?t-(2Y?9_;Im@E?oop5xT!ztK zu#u6;x<9c+Rh#f|5UY$l5*G6p<z1MeP?l{osYi!SWQ9?o{DL{`fv6Zh(Zse#<?-vf zvR|Vn@Xya?rtSmy`)9NA?!Eb5S?u}lQ*|p~?WtMc>%!cPDXeiq|InQ@>8f4jA<}hN zR%*NrDEoN&s_2JCD)NgK#OxMV-q{NG4I$0|!7-%;n~GvYvdJ@^*6oAskn0$wf#~|p zW;bT^imd|0mKvk4ew>ufDunDIZIy&C1`fXO3)sk+X_*NMU->{O3Ygh$boQ>UU(*X# zYCrT?d8FhCawCWJf^+&xw_r@DHGC8(+dI?9exDTNTT7cN+o4n_e3opCOWM%MM@!3M zTkCHrzt~M7CR@7(IyZ{!n7eLRJB~@)(paT2#eYT~nQ4vkXnbl0YcS?!p7QT3s{klv z)CW5gAKPt>MQgjwm!Fraes{TQVARm0I3@jxN?`2;=6;opiSB7Vnu%@_#K<CaJ)AiD zDjQmX-A#FlkQO-%L`%7Jdl5)>ZGJS=-=2ju4c2%|KP@1nd>YdBq_ik@tWbU3GKzK= zZzB>$h#>mX(v~7UN8$9wRiU7i*fQ2+mg|*?>L&eG!mdS6h<Pa;t7FPJSSHoil@++F zRD~Jbv4{ugSV>I3#TQGc6ALwD??7=Lm)>zgs&)@r7=ol;NT4nOQ9#8IXIP$iI7j@P z40k9Ms&FvIk;N_7-g7T@0Gz~Q2^0@H4T8NO&X}Fjj2S|^cj+{9#YD4^7mWeW!u;r{ zMFMbkXmXOCTvZ>+9ex1TNF(bW+kZgeU~2R+v0XY%0z=0Qg9bp!E=6Wq+bNf`t00a| zAffn3376Qzu?2?3zoLVPdk|C!$?TZ)+pp|eY;5pW>~SsbDd(K2LXox^0Sw7Uj<Te< z{?=}=me80OTc#0emd85Lryo|&2x`*7q>gpeyBC5>dv>t?iRFn{d1A$o<)qD>tQQgx zY+jB|kctqEF80|A)yqlIU~w6a9Yu@m+JP`kq<5EbRtjuJcG8>otIS@+{AVI%x>zN7 z6X`pUWK|*+qBdBoJt2`3Am)nNVX;`??b_i0f@m-Th`M0>l`?-KQP4gFBaspmQ^u?D z$JtG6x2?0rWx;*{eFjL!rRDfrLyEruW|!?y^4x=p3A+UCK{%mp6^1>QH8>!=u=m@5 zdO?&=qW*9hP|HN^I$Etz;B<MFo(#7UxfP@@>Z#pisoJpJ;VdQryQEZ(eH|ZW%|LpJ zgV6>jLoPfQ(5py^(PKk0XN(|L#mQyawO*9HLk-&Mu`|^^Maf0&uW~NhxwMLdzlQ2& z*A{}>s2vQVw>wwgs7(fjiAo7{<UB>Ovlk-Ztp-Y6hQZCyU)mjDRK)F%#?vM<7<9CD z?lI~|4Fa~zwP51PeH6Z*mQh4V6FH(#7N&~JaJq@L^w>Li6$~)W5{1-cuSuK1i4OGN zr_|(x%P<vWajd(kjwm_e7ddXHq=(u5p1nM-G@`?7yMBeMmyhbT>rTQmnt(N+nd;o+ zW_%`DE$r3a{ikNhM6FCbCh<SQk6T|(J`5lIzTX0!<6w*Qky&Vf(d4-JLnliBSh_e9 z!4f00c&aQP{lNMr4iB)EQGNB*U%^A^(*7CD)+bKx{$_ttG%oCXjOm#ZM`FQqT&842 zHz3ZK6m}&sVaU2<IbqdKIX^?&0P35dtI(QjF)(w*$S*<35#6s{&kC_w{NUO})Wx6J z_@v<ZVsENu6MV<WZyTkw`?b@ApF+0Nz8!j!_SAz^KzS65+y~^${ccJ=-^y7pp{#9j zw>f7VGMKHzwo_V{VgCV|Hj%;rb~I@KU)hh{OA6)N!dPgZShozVh7Io%5}LCRJzN}D ziayVcv!t9h)ons6r1uB0(mvs#*>Vmq!?A~W?3<xNG2o@^b?l`+sr>FbcA?Lpp)WVk z$PwFNo&ckK1ec)_D5@NQRoKsV+l6U-Oe$VLZFSHdNJu#@O*zl<`VQ(gwH`yMn=+JW z_Q53dMPm5hg4w%$J-dtvg%3phDu;C4RZPGJ*4THU+dMZ78_;j`g6*)UNt=7a@GQg5 z+361FO916_P~MhW{}w;3BR)uuS8e&Pq*;ICN$KQ)1vvr{u)4$DHLSH?55_0=;8*Tu zy_3gUq5d%`*Tn^xCM%o4ucV+%S$lshk9vJoEs1e+D=p&n$wK<sa(8I{uF@k4rI0AA z<$2)JzR^X)-542yxyh9m<vQXAbi*(xYR|D{+~LX-Wf!RA(E~Ha?tX{b8X~Y~=il!> zHUU!7!OCrFh;cj3Y1l6fiB^kQY{hIZkkz3X7|s1sJkzDjZ(J^E9R_zc+{E_mI$E}q zq$MDz_D4Im@YyNn%&D_qv-k>P_72+)OmpronP*WxG?ua}X`6N|TfZm4BwjV!F(k6Z zTPE>zu6@eRlI-eVtoo|&ys4LG*X^B-v~cQT=`}lMH09Xp`CNN;CD!W=`@sS~*WnUV z;=z*ihSdhBjJQ(@P5()*SF7z?y%WC8qe9UEp$J<gGG(<qIP{+c@eQ}!QSegnDE8J> z;h5*HW_?vGvIMkNom2@AsIWwZVJd9*kQu9csdd;}jB2Jg=%4UHMfFv2&^NYrlfLQ* zdc)Q}QeOpk4v=hp)dw;%T3_|1j7-v3Z9r06`*eNPGNiS&&$M*2+S=#otDcq_iu6?& zx?EcexN<~~9NQ825XkQc5vB_w{EaYz2JG)cV$$#X!M7|#zn{Z;F&&AxEr|y{Z8wYU zZ8VO|`u(XS?CEWGgkVA~(vgZeCe14)NA&4jvE6yU>BgaCFDr(STek1_Sbog#`9#%0 zm09&>j3u@3?5gkeRS%&A6ppSF^ybJ&IkRikt_ORVQ-dwh`rZC8hHIqxSP_spYU?Jr zj+k7H_-jsgt@|3goGiK8W;n4*_0_AOj!oAOW$CN3t(e=?7%#oe4`>OdNTGau0z_y_ zq*$t^T1$)pSu7ommc>(o!CH)JT83<po%`7}<_(kM7UdRI(geXNKn<xP9O~q?NF!lR z$W>h9k332>ur#0nO6=J!f@5}zNiMl<5@gVry@8}!lI`Z)3oXQ%Y{#=?7V-CLm)<@R z{b$;bkpXiZl52ZP?$7!9$Gah|oFKj7!DD+DfdNCM$_;th+L!688bH_!G0D_zfLXTo zQp+4$dzrp!9gK**<!am+VysXC)dvydFum){Mjv!EojPJW&e+(c!`-bLz_336+z{nu zeI2b+5Q@blTVbY;X-Z{R{jg@0$&mw(dHn>(gwrK3RtiaU_c@D9dm&(%W1PzD`=c`_ zM;z8U3Z$=>sDjg1{fw<cwqvG|M6QLO(aFc!hbYp-;C&49frMq+3kAR_*^#@7eQ;!b zfUn8+!wpmVU6X$NT?`5f%W8x-gMIoziuDvkM50VQD7X${9A{VmsIR&M1yfyLmyDb& zUa4;SDkli=dSGmAaBI7OKzWxEb9!63N$tAlp`1+VH%<Frq{8*%Z&H?~bk~DDA8MS9 z5`>~tQc-t6d-hIDD}7Zd62S8%_PJq(_txxmGmDr89#}X(_NIkWodqWSCwGOS>qu7b zGQ`%g{!M9pD#73~e76U=Vf15mxmx6&mn#&8bu1XUOp=*C-ot8}qOC_EM4Ag2fizP; zhafQxG!}gw3`~rDoF=yYpb&XW*E(Yd(NXa&O$Vnpswl{0GqNej#o<IncH#|l;tgcu zR~{(uro|@gN$9vEV+3j@me;9!?A*A3Ld3wM37uk;$`H%OHwRl)?t3Qk2gA!Zy&QVk z_24`~yloO;v}D&PQz$pCSe;VvH@Z$rCl<;hBDYAYu75C|EYk}89%6#;KTBGT>~h@h z(p<Z4-6z-7*-HlS8=wMNaIw<fKhH{){Swkss&yRckcgyIeZ3+E?Q*w7$eepg9BO4J zf=d-7xlr;=A5tXhs&r~L#yf4vKKl%2K6(`y(t4wCwIw2!786^0jwRp5Dvov=6e^SU zUG{C$1B0*KOJ0o1allfK1D1W4{o1Z;<Z&I)_6raw%(hl`=xDU{6NE}kw$zmJV1NpK zfSkr1Y5vf+J&(vbS}u?ymWquOB$c29Z4@M3pad-xr0MSvzE>y<KyjFiN`TxQz`XQT z`M^lIFNMslaTT(>(Vi0>nn5}>yi+=D=b#x-vAey?dzqB^EVoQnoHRE`nUUp>y(0zN z{-6r@?I-y-)(I^#?Hp((@fMY7ZaXPgm@-T<i5~32u`YcIFp=!<5|iz?`;D_0#szIV z5X6q9z~NUZ=VeA-M7Obs<H^>U;Pi4%CVpoQ{%V{fNy?ti3<P-RVARHJpR$2UETumM z^hd}RzoYsPKHV8e9g9J?l#C3;YLHt-rUn@n`s)5Tt}>@ikxi4l9k%D2P^G(cl;`ki zfs!JNa`Y*7{&?gVGW=ks?xVBglxD~=8KVGmyJ8pE?AnK*W@!!X<uc3xCBvgE3^RKP zsPe@Z(_XAnvOhg_7yBf60Ke`kyPDiH^tO+NgKHO#2$x~}`^X8FSRY&FGhNt#{&Qo+ zcpB*E1jqUSlVhw0gmlnspY4}xpY2|9?M=avIp#Y1p#Qwcx#$srZrxOuq8PYP#2j~8 z&v5Ge&3kOXfPBqXW*d;fH!o%l1N!hUEClGv+)|AE{R>%q$_xA#$5?a9AinR-jRX6q z-M#@$I)dzo@GCA~Xefua?iyvAiwivIP1w9oI6(?gdm4;_{n0wK4533e*y4esqACz) zi=XdF<K_0f^7uyWRNx3fR{8I;lLMook~#@d7!l6TZMW}~<H8Gev5ndQPv$cyjqm?E z%O2F5KXIL{7-a6Z{2gU4^b?hYb{WM}F2e|wcvvQmi=b_W2IWX;?1w>nd8?Lf9~|jl zXr$$R^8NS|x%5wVbTAHZo7fXWHt^aK7MdDo-TxbI(73$Iy9naHs3ke+$Ek(QXrX-@ z5w5u0u0KNKg4b8y0=TaI5aQP8tA9b@6CM7*+4N@!?fyr)+qExZ9T!J>*|pnb#LuqX zLdci!k-ER(a$#H=ZMU@R4MNdT{T}mJuK=8?RtXg|*r8OTW;FXTHM$p!Lsz2{=la?R z$o-1Tiz=rJ0IpQ-!6vF)r0Xi{mlhnf=U1YCW1;f_X(FN|!zDH&ZIEFK8H(jWd$D@C zByY^|M(yxW_F~!$e&98BFYQ_W>8tG7p(UDI%q_jQu=)t~shB_#EL%)HDuw{3U1P8< z4d-jpl)rKO2o(j60OHR48@>M#R-PVX)hQ&ha`#t4LPFvx>E>a%0Mbkpyrcw^DTfOR zPBtnA+rr-gFwVkkd+A`3ie5AyBvB*=={;h0JTUzevaG<~huG6bU$F!VqF(lD**sBS ztz<vUA~Qnx<%ijjj3|EmVKyzp#1~&<TQidR`>wF#8U6XWe=t|Z0OPJ#lp_aBaZ%R; zy=%MavgllfYoOb7L4U9bnKAt9^{hOzA3yyPdnI!OZ@I=UWeyGSdKulX8;Y)>t?I3p z*}!4ZL-g`&X>l3OAQrs>wGfPJV^`1xF2mPEv*9j>-OcGK#T~@`nsYRq_r1&>A2!)~ z=m1G+tUwLXO1qWNbS3nf5?ZK)wkn}gCG@xws!&1?${|q}y`0hg3SoyLvM)bD9psrj z1@!&$7(OLM!tKrG4iD>Mlc?J2Z1<2N4zS0DC;7etIa|UY@XRZ$VR$TG-@twvp6E65 zC35-X{mw$OjNKEdFGRHbDr_q>I|`dZj7mQmf`;oavgujbyx73DXN~3qPcSJfO|zWE zn9?HqzfS7G4T(sH(v=rID6$*qrx#d-DW2Cq&$gR}^c;MV#B)rVDv!`((kK;vEqeiu zVGC}Y)%NxRcGpyCT}Go*{K^u7jbIUQY1?SIQeJe;X(ADHlTZk5|GYpz*9agzh?+tN z&qVlIHo`}#07v9AGE3&O08SkGxluKFR|{N3UPAiCmA?qKOiwC~Zt&^u>5+H;Af+@t zd2^dhI68M3e$ummk*0nlZj%0}B4yR6?Mnf4i+MwQEYRw6`s&ZAmqcALvT+%f?O>H7 zgT02yf&3k8+sI)_0q92<q>`ELk=Q+5I7ioA_9yP^+vOO9KbONlZD(HQ{<Cb)Q!8}E z0MZ_^W*vq~g|vc5FM(8(UHyYaFB|z%fFIVK>L1E`G-@N;mFg)GH+*T8KP4T6RSoDe zgl%J+%vOHJJIs)s&4+!<3bK1dJ`58UCkXngat#cHac<HI5Fk{YE%Rh6+nya_{qZ{T z`6vi0EY#v=ECPaff^Kh|Z^3kECVyEFlHy*=uf!<dRVlF#YC~P%;By)Lp92+lKYm80 zy^%u?<e=O+d=?p?YYcyR(G`<1^&2=}=~Cin<RCY=>IGYS#;VaoIRFasIQI@4J1TPe zaVWNb%AoZeG5}i*Rvp=ye?khCS<6kh%Ais_6$&PaAp(4+x&R|>sZU5hm9Y9z-3Hq! zlmA*Z%ioDgZRZh5y%QJuEYpq7;!owX@uNcn-O+MEd^|3T2ZV7FSI5?lP70rcf+2BB zx4jT4CyiI?5oybO>jG;S9TWO37B2OenT|h2T?+`9rO&WCqet@D&oYzHGoaWDmDO_S zV)2AsH_VGw2>rt2iD)~3%WY$BD3MScD-VmhKRnq1A)WuylevW8pzT=F1;++Ti~;HP zh27xrWW93CfukwH*65Y~*}4r*4>59xC$r`x2Atk1i#b~;`VHeU$dm2M>C6AIg<Z;t z<@b9q{g~9y^P8!u5<1Fd$)ouu$oe2^D=Qq+kI&n}o*a|nbp=;RT!uHcu;XKf_c;uE zB;^;FKQIuXB~D6(dr4cIC8H_<CoXX!+{MLP(7w#AH(2V}@~~K>SQSG9nMyM|wo8zo zIX%+%25THUqfgv?s)C%pA065BPSZYz0f`hRU506Lifw9&>gy~ox3JHW|DGbCGsV$b zDx)Js`=9LV+`K|<u!dVdUnshU<^C(WI%CVY!ddbK(eKK26Fj>C(pa3jkPtjvjWBKF zY`25f^#}7?#0)OQEgOZr(XX`%@pq}m2eK#kQ7wBkZ$`jJT`6`_03F>mNr6k*mAvKN znIIyxb}3spE<4bQ8K-wtV)*11eUI$NyR-e{f_*;QBcBXu`?<4=<3{M5uw}-GLEk|I z-`T_Zj-Sa-{*FC1-l!iyYo5ceNwDi(hWJfv@AzqBkAI5-usci?s(-PB+aB=JSPYmA zE<@2Ks?G_i>^=04Ef#G)7$CS#mVXXSyB&$J0JhUY=4(sWlzX@gDVx}={GI_bDax%G zjc$TJ_m7QiV}1-j*U5J0$Mc$R+4=lve%a}b6M7BVwNchL-Dss)z7y8ZfYaoUgjU0~ zIj1hiUT~LldPtKIg*~}04*9waJvOq!2}yi@EBn`kzI^?Y?2`#YyTxfyB(~`WY4N9> zF3lS)Oh55HjW1g{@o8N#G>&?tTfK-GCiOD*1w}s6eU#>+5g~hs^FgT_5RCPVMQqNb z)S+ii$xP)b(s5vPp4$uOT?t1#F^}mucQ>e=$GNY|fndsR@F~_ZX*=KNO}1ik?8YfM z{Qf7{*eU63O^qLWWJ*QgJ(|8_LxiF$*Bc%#?<Re6f(1+s<6k(zlBSxHdcI2nmtDse ze<@T6$!=p`OqXaln=w^nL-OsFY|GUC{L@A3_|&*Q8BiCv8$wq^geA?>1vkF~PER!E z^RHnKo4kycZrF~W#0_Cy%3i@DrzP;=MY~2%>!%6txYmU3gb;;eokINLD7!mz2*0kB z4W3oZ-@Cz{ofX4Bc9zx8+Gf24>l?=-snVxF8aZz?yzMZ|y%ZO&Ms0WrSi1Herj_2d z8946bz*mWTW#4abxpEp@TH8<E$BK{NG%mvgS#o{)W9I%|uVFP>3f_lqV|}-->L%49 zXB1E>7eJD2n~P|(_%jw@cD`U*Sk)DpKvzh2giAMIR;!1)e$8y$>~7JgmeQ)B^Fxuj z;@Pocm0Vn|y&ySD39v_Ir)ZY3eY1^&P>;uQlhaH?n=BKstIFHp)4^|Y`&w`v$aWOA zn>U!jQF&KW-d9bppA+1dS|G>Y1Shc!?PwVME#Kfo9dn^S9T^y1hVFJYX-=F@^C*Us z+jq#M-MEZZ&Dn@bbekKMk-kChTpyQV?;|o+lD7UE%RDS;y&iG76m|`F-JI+tSi&mT zPxrBOH*bhVag@!9xxEH#g(XCEr1-2Fhr)WNC(tIK;DbLdH~oG||F9qA>xB5_(vQzA z44HuA`yceXeMjg&snV1p1c7#yt{-OR!dO&oVPQl!xs?YunER*MZn>3rdCW~M^_fB= zzh)8pxUk27yH(UVtw?Kd8N8}x#q|(20=8qWwxb%1hlAqmR@kQZSGincl)?@3o=ZZ` zBq;da&XS}|byzmxush7FGTVlnkf*=P5K+Z0%nSCZvUSLU-J55OKzdZm`?2h=lq$)= zG_`d@?@JnX5_ac4A7lzqcpz;_+LW`FV5eUP?drpzsrQs2phEq_hp`KA#;94mbl-r4 zt<R$wx9^$s=5KHbV1~=k#YT&{7Q=2db_dGvmkJKDd-sLL%mbw@3FaEkeZr&^gfNKx z{$ks5fTb0gdR%~6pK=a|P5%a>P9WI)T*^6`K(HaW^vv&mV>^lr{P(}HJw?4kR)Z1z z`7%Gv2j?`{dFL0gn?=zBqdIeuQe-}e9kF9EB2eWpiQ<u%?p~_Zt!iN7iW77PWQwe~ z#y2oqaq5Wmpmf}1@CJ_b8Fc&;D5QV3QYYUEJtKK#p!{RG;sNZM=+Z3R={PK;-!Ai$ zG8@?4;_mL%rytD44VEBwrQWM+)?;nTO^&R`PPmRqX)~~o@ok7_>)wsx-#N&V7L1II z+>TYCpuXdnY6rX<IHnp4f_hAKc0b#=px3x5R$K?Kei`kjwPOhS3btIpN!W7fjg|zu z8x@;dRF8?D1V&t+Guc|96>n<umefzrbE|45o#XUD&B5YrW!?*W(dfQDBDK<zByHN? zSzPpfma}kmWNRR<R{ghQDNL13K3rFJe&Hpby{D9_yf=V-x+t4pd5VQB9-`UH@)tMq zi%znwyXFPT9(bv#uj~{hfAw2T_rb7TN0&rt_=VrGCI5~HCtvlE<23Y-c)?A5M{!fv zxeWauVA1zyB|M5am!WMf%5qGpMAKa@rph}D$~!#NbLIHvwajOCg!?&^#Cq>u9bH2& z28h`^I-SS3fN+F-$MEDxJtu<8v@A?Bg`M9$fL}YGb=i~X{`v$^_b==1{s1A%_C#x5 zU^RQX@!vho5|(%8n^&`(<w?A_iFwt<ur14X^G_zS38lgA{~izadv!hC9|N-MX<X&j z4cy%+&4z!o*H^^rde27vVJVddNH_k?&aDXR6TTWFw4F$Jcj8}goI3fGdW<%2*gdX) zV<H|$TLq|5bn^EsX63lNN>FV^PMQv=PMjw&8mgNs%F;8%?>Me(i6CCg6@SZVxQoki zgK9vseUHm==cbPp-G9YFV~CgSTa6_f^J6Fbcx80zRaj!`tngGaS7uKXYKzeyF8vM$ zDzf=6#;HKrJ0C`{SL%b^o8r*q`ug4){=VJpa>MA@6Hj6=`|au&%yr6OzWsiuJT|<~ zrXH~HkNt}sKM>2$|JSbH59Dh6_8d@#(!24HWzL}y{GtQQaVX0Db>z?9J+#=JE)%iH z!-x3r2KN2o4ek@7QSkC3l^Xtw&)M&daS{FYD?>>34FAV*$5Z#q#~oLpWLyS#+h`p9 zPsbgHWu|`j({YD;2FhR`HC=EY8U<uq^9y{2eb<PiTX=p;75uJIx}4WKn|0<@=5?YU z8(tQr`|Z_E>bh6i;<8x&fk)UAWlMawVM`B<-c!nXm1!*np&z2k<rQRogh>})=}ffr z6}Hy$0&m>UqRQif-=B{P7qw$xRNz{ge6LHoeu7Oej}CkQQJ@c{8wgoYM|&nRv3#)i zK^s)pVdt6S?AI}29?3X8wbZdo<-<baWY=grJi=jUCT&|jLS$4O%UTs5^7C;_7`rD? zD`Zvo1)V^2+053j3JW{DN9mp)ABIU`kKc_TZiqE%Z+^z!Ta^-A@FMnvn8t-4!wO^y z&*YEUtyM<;;rT3Lb-%cWU|cEplPz6x#l!Nw^d{*U$Q5ttpj|FQ&-rZm>OT6jV?d%E z#SGZ_ZlAI@R~xObgC^g_A-}@2#E^c_5`{rrLpa^X8B!!m76ya?7;d%l<hYU!<)EJ8 zMdVL<&_r{=VPGwk$nR(#rp1UO=1?#AH7tJ6s58+Dx*>>ZNH?XD$zg+6W$bKBMaVcG zio`_t?gi>6+3I^pnS1loBMr#1Y&H&@1o4{Ca1V1YJ`5z=&zO6z>9`2<2j*U?Jokp6 zprK>f&Wf<U4|isT;{uR(qcvh9CY>D-RMxys2I<%5S-`tN%x_H?Kc|u<teL<&d$I=* zx-pN{u8HM`ezfbOHAgkpOsHh?FP_0{M&Is-+dc(L)VcC-y3_ChA~Q1K9KuzwW73!) z2*Od|rg_Y*s=oYZ$7szyFE9v2O_H^dlG8|;Uk3`t1Vj?K0=ZSD%Ns!W&5*h>J=0OD zvGf&+epT)~g=6DnrnJgHD*J07t&L&Ebs-TE$b!l?uLqMY-VZ@^cTf15rk=!53Smpv z1qXlfoHCr=$1s!rTqkqByoYUGXXWROXQAuQ@m1s4_4VSA5m5cq+H+7ZIB_VzsP1@q z2qWx>b{r-c=KLMfy<+4ba0~X#SN_fF9-PL1e2{gqCi^=f6O-#$O1;@0dF}%?-a3#E zoz1GPQ@Wfk06QHHS|-qb@cRSIY3-d*3o7O?Zi}3hiq_CN`-ePy7Z5it(<1_dc}4-t ztW5F_Uk{=qGiW_~qq0Y!m$Kh*L|$7blP|9WxgY<)bL?DY<jCUZumY^2sf>$Ejk72> zj6s*-1E6N{Y);BKmm%P9_$FcjcWOuE1G8BALy1wpJ_|i~<Fj0Q<Yi#WOJMRqK5mQG zHL|LQp5|9hXAuuS&5y`sA3SUhv1}$o&F1Tp#sKTEf6mmh0K8q*dndbNn;g4vI2@^N zv{pq+0ks|cKRr!zAp|m(&OOb_ss`~%)7cwUlY+mThLY{st@465R4AhNwVpZ1W>$wM zU6_JF-Xu*!QYjFB#MHKD+^L?J#eql$*^X5bOqLTw)yfGl@%_Oqh<U9p!?Pe^NWU(9 z_!MK+@qUHr$UrXS(gAj;dY<0%BC43(MD-1ohCamxKGL(-xA0UT^KK=!^`D282M9$| znnYhId=%NUF3Ta&mwch8r8h3@s(Qpn({<kv<cYpN-VyQo5cd3|q5Q+m?4w7A@e>)l z^Qe&zIM1RsMDSO?VHq2GcKdO#oJL=D4YpZFx|YFg?S@1?y%&3R!!G{l1Xf^AGUQ?< zkcBe{hjg$uAd%mnWY5{7d7o5PZy&<v)UccOIRAcFO*?wz!y1<O*!`hF8056wrLC^$ zexS10%j=$~m%dG9?>%OvcQ+GkH}B!4p}?9BMdcg;3!!hiPJnhU>mgq+GE4?8hz)<= zHzPCsCuT1%TWc0x3Hj`aj^t9|6KvT>-T8MnvL`-@O4~z=h@(#phAY(ES05?z8-!sm zJxj-HFhdh4@PxeBoDz!83JtflQsoJj_HpE3USEaFHz@j)G*#wyS>&WSe@~k6pGo^r z(u71el^_14Q_^*;R#X)30G^WCXz7BESBv`kID7o#zWn<`*+(B|@j^Yj_wf|#UK5o{ z=LYahx-X^#1#zYHdSnIkd=_4coP$fTbjz({mRh&tfXIjRlj!R*gpLD(SAoc>?mAWO z{M!nFi?gE6UIR^AFW8zjLYgjaC-(oF>ahdvc+HY+{bUAzcnSOSlL~%FE?d4olh1#g zy|X`ue|s#uxj&xYx|kX31wNlZDx0`<H%k`bb8xW9Vg<i5hn*C2`L!Rih{r?tor~Gv z$5Z&OIc&+}@w{aW+xYk>K4}d5`tc0@!F?>m5y9_V#0EPC@>_G*Qpd1?k6{^vx4<#> z%0p@A&&wKfd<pg9_`e9lK%lebJTD>Tgp^RsE;xqqmJ$~Aug!dq$5`FJQu)V6v8(?Y z9We`6tc1i?tQ-&~md^JY-Ht=E;;P@13)qw=dh^<S?4c*Zhqh+ZY_=HinSnyQ<V>&b zlWH_!#$jdodu2uyqiifmd2kz_kdp61*-4DA3FOxmv*4PAyB?{T>}DNWNPRXoK<cj` zAyR^Z7^UtCk{|^uNV24p5noBG5VE8@bLG^6bWK6>rArDjUHVo*3Z=6OvQRpuAgR(3 z1u2z2o@<rUS4bboB(ai|-fIdD=LIJ{tKb*~oK&OW@WFCYm4fdeyh6cWBYdfXW1w(S zp@L(j<)q09UQf87;7x>EGn7CJ1(Fr~Ea7nq-b#3gf=h(^Dfkt_IR$Sg{Es=*5|Pez zBuT;HM(3n&6r2u<q!tAaA^ebnTX9;zNp(sff$+B!Jelwv3Z6>%CI!zTe1n1ugs)TZ ze8Ni=yp-@_1+O4{x`I~{o~z&+2+x{hm1|u?fq_cIX2KH`d?Dcx3jP}50Sdm8aCZf- zBfNdKoKHRBmleE;@KyzHA^dX%-$8hjf}hn`5%@?6v{J-Q1(yhaNx`oWzFEQB34dI{ z>Gqmbso-?yQnDyGeNRPNsNf-l7bv)q@O%YNAlwYxiaw)T9a5?ikxF=yg42PEWK?j0 z@E`@xC)`WHrxSjsKyG~@;a3!VA>ltLcq!p$6}*D*;|g90e413R1U68_2MS(8_-hKj zneb;7d<Wq*3jP}5RSLe7@CpU5BYbHAd!sgTLN=xiPLxN<F=KIRsY_QV0~JaDDE$>m zj6#_X$^wNFpitl`*3DKZ_hzw>%|`2GY>tuZ7qh6*V~vRaPN5uBC<&mPRVeQ%6kH+E zom42>6-p{7hZKrKp=5!wPoX@ZPy|qRDwIVEWg#e^SQXMVg;WaC2MQ%yp;UnKhC)e^ zDPkq&Cy37}#3+SG+6BZKh3Ksi(M}@T6yhH<(Ly?1#;a56tBW|GV1u{l{cG3#WkAHP zW0STd@^jX)$}QjSihkx<jn(%llFfz|$_`1~U7tERX;?pm=svmpk_bHMpStifBn}1H zu3bu2r5sByjOgZ5(5@>{zybv<1d!j`6?IdH;)=hXMz<JfeZ{NAmbj2<@)rkK#&g51 z(XgUbN7p+QaC>CItT_J+Qi-}V0Bw(>av*)JkUj%&;;r-e14{Tq3NMwa7&S!ph<nja zXC>yJAlB@lhKWaWi?l-~b6naCcS~0sNd?Gnl(xDHZa5`mx1Q?}VpcNC1n9WDTZkj^ zRF=9e!hh2iY)5dy=7kqmgjd*o+a}W4y!a`7u0zyLn8X^k#dQP4`I&S->@{3kj{~<S zxZin+-PyJvwbxdN145Fa(n0c0%P}TIirh{WZ^F*jWw4XYY1giOMFvY=Vb5$IY;<i> z4uVoZbr~|jf;aXdn{YYYX9)X!dk;P>iG}Wn;eXu5GIxXt&D)SCmfj4Z!R016BJdso ztm{|FI>lQ_FJpf|XN0wK81eM&j>nY9GCA@BZJ(v#o7napY1XVsQ>H4tMaE@5cIEpj zkP+SJGRFxfQOXss=8A37bQp}d4we?lHX|A@@>hRB!guJ(VBfk7I%=ETLHa62o#QZ! zL4;hh2_1IxTDjLy*Tz9IN5}+vlS({^Y7axM_+$)m%8gMNTJtGP@jc{tua5XfmG~W< z@&0nWL}W_800_=TkWfq2g|K2!qZtaj%TV9JH{48^U3<0`{TqPBpCSaeEVM^};9ZkO z0KB<pxi2^Uq6IDh<Q*g@*1b(J;uV*{eGr@YLXKdPIZs&zx(x3ORLbmyTN?5aF0N*X zx&)P>tIOa}8R)o6;SqH=pC$>E4P@WH5W~C1GtU?M`#)Ru*Yr*(W20Ua_)%r7=Ebr8 zH&^}@Q@xV4y;#AQtz<<nMaL|xMhl`vZ6TmtH<LnLrF5{Xldx$8d*!7BYw#q@#l?7? zzuZ$AkPo~-u-$51JrWgVkKv{#x(aeThilw;sUPp4=hDrUMJaNdA<y#VY1-H)p@1=B zj!FHocY}-*1>5b`RUZ?jJ1Ei%E2vaI<kaDE!DZM~Qe|`d9F(dNE6<CJ{_+l~55l;Z z^o6vvBO+Fb2wK6`zC4JJU%}pcIhb$g!J1$0%|H1P`}5^Nev9ElA!}9CzE;KtztX4I zlcg?KbgHx%KyC1i<=9I=XEb<FkK&^r-{Y|6jpx|oujGZ?=!&9`NuOV&`s=HXV$nim z_H*p_SGp$Nc}yPixb#;5C0Z{JcWpQ}MrkvG#)y8cIijClzv&=uPxfd1UJd1w`m-^w zCiDBAW$RuY;QrwE@T0!}Y7cjJLcV!5(*67IfZTmGlfTuEWxN(1sc1nzya%s-!KaXN z#AhAwWt4u*^4bvI{tSEjwP@a>A8UDS4*zs4OMX3?FN<Z9UQginZe{CVj}N_?gch7_ zyNsG+$CKOURGRR>R`%iRkMP&Fu=qD)e3thiTU}q(&&4LZ8OH2yY~{PiF+42soHtMM zkx(#iB}QJ6$Fv{5!GNpYR4oCXG%s`F5tpGnk<ELnD{t)0R=qW-uore%s$+Q=P`o!> zcas*8PkT{qoXyk_AsT0cawd3_T)sPVt+!H*U3ah{z*TX)+W8WsAAua&Dv+Lv&4)@! ztYyA$hxLAU5fu^vxl4zD+KgOVCb$ymVlB244r3U43){Ut*=+pVrg6mzkCY)NlU;aN zKUMena?Gy(;y$;4^gp@JHwCZ@ZyQt3EbMHi6DVq+_|N$+XeRm3zfh>s+kY_*1q%Op zFw1=>%(|?T-iC35y687xLgEDMxu;N{^>F9o3rea>@dc37`lv2N#hp)wVF&cM<4M%( z<`7(ENCYVzBv~KrgUm4@;T)~+DN7&P9s4$e)Ma5Jb&YgsK}Rc8?|*<?`>xWboe|QU z1+oKjM|~I@QPPcnVhfv9a*E%*h&}u6$k_O7Oy*+jAaPM$uxIa3oc@zQP^Vbye8%tW zHEwqN-%qfHzl0uj7(y&`f_3CSPOt`o@qclG)g_*-+4=VqtY_zU7T0S&+q-kMaVK8m z{Obg(0arlkbBgK(x2940-BsQ#5iI|`cAuT)B&K81YiHTS_p|v$<!sCQL-?I~Hes=W z{rvuWe4zy=Sqt3vf9`Va(tc2^;pc9^xs|co$1CZ;q47V@tP-N-Gph#?=Q6Y!k@Y{G zSxuGW>y30~#h?D1CG1|){ZU%A#q1sbcw!aR&g#LMyNzAg-Jf6KO9xi`jvH*)o+N(d z4YqVoEWhPB_T-*m{;Lf3`W_>{Kaw5Wlg_UkiqopE!&ZF?O?6$?z_@Z=s$98$?6|b` zPj;uS7r*?^T|M`DX>_45rPUsLWfsfW7pDurog>*%)kQi#i`~Dkd*U6;H>~`r<Xpo_ zb$>VwF8R>vJJ_a9+<-Zz99mu5!1nCRvlfGjc_C#0s*dxG%B)+Jp1B;>ZP~K_IJB~W z^iQ*n96g($yK3ktOBv;X_*x-uw;_Vw62Ma7>?VByTUb>*mw?(5jHthSBFs@!FexLX z(-eU!z3~_CkeTew52N+#=yXW@N_hkEw};rd4~Mbm*BA_6!-KXUT{jE2X3FLYMK|F9 zTkt3we<F(isemm%(WB>ThymK^PuL0(o;d#wmfoJxS&;)kNi{UNSmC=7EV%)%9h~s> z-}MM`TO<_WBPD(IKf-RDi06kCu&9#>#v`P-MDtx#-Or7V(}Shk(>t>Zn88X;#wA<= zg<5f-Gsc;RYW7dpwL^uJ^%9Ccg{pLaguQ<<#;`+WuIOZzw9{GJ$^QJ-Y8G)S($w=2 zn3=Y9DSlizt_bF)1gLM~mW6QKjkDpWV{f_4DSq-hQPOQ(WPpXMA1nI(1m4g9m*H|Z zw)RwlF%TsS;sN?1hkPC9H2Eko@p1!jhbsQ9ZtTFR#3+%H*W<PIBW=53MPTmDt485C zr*LTBtYW?`c_CgzuswjvM8iJqsLRxDth6O;=ouRnP4zG3#T12C?}KsAXC#!KpAtTZ zuoJJjpOl$~A3>C0Pr)6Ct2TD1Wf=dyjk$knG$ucYA%lSoA6C7kOemsD@at@t(suV2 zn|6kcnLk~?|NCL~_NNQFXHEIbm^wZgYtunzu^>*#!Yg^L(!~^(a(Y1E_6^X1mk`;= zVHLx7a3-_Wr<48O2E{Qq23j_KGTU`}MShJzo-kc)>k~MAbqW?ma-uyC!{#}c;WL;U zw)VC9>TsEU9aj$^Xk0yjfkh_{FFuMweuZTwe_H{WuARgzpA}}gLtbP_7MbbmfOrKF zuQcH#0@mdzNg{Edvl!9L2(&lBEaLXU5%CN9Zk=*Fhc~dG&*$pCok*33^f+nRL{|BE z1fP-2wtXHIuz3j`LExpgO^rhOpp8q|@y`<sg=?uuY%P>cmqYND4wo_SF9r+?1v`DE zLcOY>y}1}Ic3E;r&}Qk~w@L0=_xUJh$y+oM)~u1XzDbCsR60L_t@&b5W{)*8(-TNZ zsjzuU5X3<>a`1Y&QMvG|QJB`MOxqNuC5xH!ivfK48?5`8WL~?NO*qqw5A4U*oiT@R zT0{p#Uev;;C^52rz_}7RKd^|MJ2QbFxQ6xpGBV2>Z00n0hArjv35|`fyIh|@^FWvT zdEmXYORJTZBlQ%DUy1lUczWT1b_97yz9X6VWla8dEWa4#{!l*BGvhmk1$D^bJ~Vz^ z_&y>M*CCK+?<*c`d*LpvpB*3AppTLnrK#hv^(3|@`>?xTCh}4r*8A+15XV)z*`-`R z%{Mzn<HN5Q9ILOgb7w<S1b;MgKfMa%n1=lWJ_CmLcP4>I7gkZD{?ahys@@n&0V;AV z|G`j}^;P_=*z?dSI1cd>HVcWkTYB!naOXMcW*(@i+Hzm%6tG5)?&J$Nv&5}v6w468 z2dF@l4|bdAb5yS)<eai*HqKN8@e43*{GHW*6_ULa^c!b|aD1fgBHZW#W^#GFRMMHN z^Z-!V<_`oXx!YkdRQY2vASalJ$Z62;Eb{9pe$85z`Sl6wnB2}W`{h`dtByvm(?`_K zT#n=#a>&bm4CU?hrEwv&m8JLB&EmK85h3wwDHTS%I7z;gc;I0i7qufBWdX5`4Kdca zTyR08LQP#ikY&+HU(2yER7z`#>pHvtn}JDl`=HL97Z)cep+b7!oh~lU_)yk%=!s*} zk5|}<ZwBxuzGa&8aec}vFxmg%;-W9^2>q9fi|c-46VE5{<Gy8;=i{t<X)v~VEXCMR zzT@F)Q3kWmUU~L%SnaQC^Je3<AWM+)?TLNRI$0)%z)P;tcuN;cAt!BFLdUE)Frpg> zq4*d}rznr*P>Y(#sg!U~`Q%#Deb}U(CW6q*6@SfJekq0C!SzB8sn`@s1vDsd^yDY) zMed-SDPU*M_2eDCO!G}xoFy)RPF9kYPh44YrQ{d!y;Nmd2^y#o$<>pcI&U!U%#a19 z6>t_FLSIWY;ytsDX(5dgnEQofYfU`eC~Dq^lCsRuYDSmgDIb(-*Zw+->TyU=y;q{{ zyiCALGk#P(+*CGv2n`@N%@v=3hPdK;%YO$djq3cQUlzVdaoI>;LKX9}q^KnF!(eNQ zFY;!wH!tXT_aydSYgbRi;xr+V{dU1%%uAM9692|WIiSUt_Ii@QCzMly9)gJGekyuN zt6%vqtW=nM(Lo9+XS$1_k#{Zs#6cTo={pN?@QoHW=VBE9s)apxF^0EU*vA(mecM-2 z2Lv6Jd=uG^7o+)93)QZ_E^aDK)gGu!XG=QTNKH)p?XS{gDKMcmKje=xWtif?5gUwp z<-pNLYG6BCBa@fF0rB7RlI+F*KqWC**xlAJKDvzEYYpW+%24&tF1QrTNk75=(2>uc zdA&7(8tdl%<YM5sS5#Q1!XqkdRpC_?dJItbBUG5G!pSNuQQ<ljZdBnrDm<jZvkJ6w zl1gY-VF11#PQMrx9;dIr;I~hOZ>X?Fh3izl4JzEE!nag-NQGxqD5>y{3~}HPGEgaC zunNbhaE=NqRQR|Gx2o`f3Qw!>I~BI8&|{F2Z;%R&D(tPo!30r0C#Zy}DlAsvG8I0k z!c8jNp~Cl7ctnM#RrrGnud2{}uu_o-6~?J>ARv{`WvPUTDqO0<M^yN%3g1`ZaTR`} z!mBFuQ@bQgg}qgnszUmbBK>BnaM=(|vP^hTC7e~EOvInbYF`agVPT2}AGwc@tcl^W zD66wm6<E`(!1ir`Peymu)!$GErLllW(z8d$=l4k!=Hb~9qv3W0D)nidq>;0$oV;u3 zkIQt{!T=>qCXD=3!_ZvvZ--uqhbokR`Dz%(fc(4Suf)SWA^&RBaG)B_Qo}Gq<zK5# z;m0O`epaqjC4?#j&Zvg5VUd43{gedQ4#~ePHB3$#`d#r=;&Jgy{=KG#yQ|?=A0^(X zhD+6Ov>cv-D@R@m1J+LYSEz>9sNpO%yjBex)$koP4DX06QI{HS_tfmVeXWOY1~-qh z;91HUx%u2m{I?vBkz2$q!{4QdDF;@>EtF%2f)>w}ajKkmMY_gnu>L8(DPWw(-G@Z; z!Dc~5MkHK`5EVhRQaSq~;AQyFh^))xf))VpiHw&b+>=uZA)Mo$dR$Xbo2t=tsnuvS zKSyT#w;Ijmijl=iZWvdv=SPi3&vy~se|i66@dopAx30O*z3<_NHMLrIP1)pxRQcaX z;5J5b+`sV*Y*%W%5BSNyg}a2gb72F5OAUoV$&^L|Hu?_f_tbT28jTOn`3&$#uHE6T zG4h92)E;oxj0x=EuIt5d<nIe>uZ_}bf<rp!6}>rbaN=Jj*Vg-My44nGHC+rHEc5$v z+;TkQsG!<rZ%ue|mmm)=sMw1O8tBOd4G1XhU^NWjxHvpAE4VctJm)cy&#%q()Pz`x zizZgD6ppLMvrw)1N#NiAEv)fz=X?fwP-{xN=-oLzO3@GS?JT9=Ab550bd=&P^LukX z6FoY4+y`^qi+F4(gP_KbuO089$?FtIOeV*T7{+nWx7WVop^1t=w1VSWwP3_2nA~As z(NE*abDo-KWO0+oHMPBbG|{!+=`_I}#NXmxd&xr+)`Rd;r0pVC#8a+#he%dV;J9Wy zAGO!^_SA$?$BgpSM9IRSJkMetRlyYaNEJpG64?O0vztfG<hU7lzU+wqn{YY=>I17P z2+viOrx);iJb^c<6{uHNcdjdh*mZzj77#jN5VsR^<UF2^M85&=G5fDXnm~6haG;J0 z+~}Jx_aiD*8{wsi>lESkLXLZT9>?{$Sv%276GKf~<)yi=Gk)1(j{6P|M109h6BW}@ z-QG(%ZW*2lYW04{9`#X#AG%rF8>6fCTOUo(Tty7|T?}5F!GfpQlQUpU7zT7{=@9LL zWgPc1o@dp3G~qs6xFvuKFV=J61O2)1jX^De^#S=By$7c+_Qp8!;`9@JOGzZPuX$_Y zYk&0C^y{o{<%1meJ|63>+IKKTY72FmgwDvfMUK0O2a*5s(e$o`vJOe;(#3=8GSG+X zGC)`FTk2ETQL)%<95))zKDA=CfB0zfgF9j$eHAfyS`oYN)!Lc9ny@Hk<oIwtEB_k8 zC*FmM#>3yL-KtYY&TgHihelKToUbNUp5eZl!98j4(fX*gQjb>pbNcs{R?36cpXdAs z_|zu)X+mpr{WM;k75U`|$Ni3nxPCuUyVy??*4f_bCXVac%yDCH)jsW~iR#-?`Rky4 zbd=*3-6AP+Txb+zy3x1QyT!9!Tk2lumd__+9DL`eiLp}6jNur5)Q1Cosqb3#E&nO1 z<}W$!HXf^*-~S}iXU;<YcsfKHn(WJUA6U$F*UaPsuK2h5wfNL~m3kI><Z~RA+r)E; z|NjO5h!1$+Q3|fTr_=QAZ2T({Ubx1y`Bv>ve@#>>HJ%jL;WqpOIe2!dm0b#4=j6E0 z)Ud{Y0Xi^%GraF#sw?!%uYJv5lNlc9#RX3G!CV3y;9u`o>RYJ6hkU%DWS-V*jIx-L zaqTiRiRZ!)BX=l_iqJ$)F3d7e6E-kK6Q)Vl_^M4S^+uQZd2oIhd43a>Mr*qJaosIl zxbDRP5Q{(8ePzg%;8u0`hxl+I#ds!r!;1CdLIwo2Vp^1j4$*NvEO+v`9>ur09s_T2 zJv4uCe<f4oScCnEtP)3H!1kq)qWqF&TT8=Ls0Ke#OF`Sr!?FtffBS|1w_o_bxnJlf z57Phr{X*@^K+VyBV$G=KEAO9Y$(vtRHgCcFX|+SUYQF9#a{2Qt3n$LMfBCBUqZTcl zKdE$n(KIewGs&_N|L08OU>g$0$mRFnw`ke?J&{2g<{nsCs+hLQY=Nm<x+gG9v)o&g zz31Urjlae!cq$BL6=tb0Lxrg-9H_!%6(*?AsKO8xda1D8L&^8D3R_k9jS9~y(8_(T z5?WMvT!l?4Jfy;U6@H|`Iu(AP!ksF7O@*6P__zwIRM?UKIysCTrA3Wcs=`7QPFJCz z!c-L|t1wB0MiqLguwARP93O_2m$VMi<JJExQe2V>TM5#R_N+>1QDKt`>s7c@g*#MO zqrwUm7OF5`g{dk`R$+n)jVcULVE~|&YUHI7I2E?LD>c2MLP>?KDm<&gCKc{fAuN8m zg*7Uq0x17VHC(E~LKO<`nh`QXs!A}b&`X7P+>{LdP+_YI>r}XdV6D-jiP-2iURyN= zp(mrm|96mD)WN;BAVCwMy)scw)^$K(kW$)}5CxWoDX@m}MKkjA6bR3=yi+U=-g6*9 z^NyRfJy3~|RM?TxeZdMn;kvTF|33w>3F7FHYYx+aYo#WDP*L=M23pnM4{E;a)O`OL zLd*SggfjlArc}AkU+r^!G)t@;R&f}~+)sF}^uY&c0LORXxPo5z*Lnb-?27l2z_ST( zeGtddKQ34a=)}_s{>y*?1{g=66CBnJVc=PSxuG049XP?J2v`Kb37*D7{8u6%G(9%9 z0kKF7_zRvO;8y@gMj?Zq8m<`dPdxdcw*wC94qX8}6>tq6;#mh6X;g3{;NWO%Vet=u zqyo0!p~4705~DN>t{@Ix9;C&@Bcv7&F>MCCEk|g$JAi%SluYXYU+e(^qu`eSf5Y<< zRR}PwC&%ps9sxM17p`9cpA2{#PYdunfT7q|ACCtQ;Fv^oK^&w4_`Vf?s5Kt|mL(~b zumJW(cO`%+39t%}892dh=;cER@InFpl8j9==vM&!`zr+m03N`z4m^hdhhh6&37#y# z+jyv3?*QJz274#yR&F52Ex|)#TnhLTo-2ru0LNiheFu0xU?X<TMzo*_@M}CIt#1H( zz!)}zPDXM3P~<~6pf(eg1MUSlcNiK0d?Dbj;T-oC@DBig9FFoSBME_NSr`ZCv+01V z@Q}Fb0bB794?)hP-~>nEp-LA5R^d4d{u;p7@U#No3HTnKA1EKd20Rk*Lx4|>faMN+ z^9U=)(Z8^A2Z0vAL-6vs_W%=M*f^!I2*BNVNGf%J=JASn2@afqZUR5SU-1aQuK-p~ zL}6$Q!9OQ++)~in0nbe3xDCMJgG7swzZES@p3ZTv&rtg2EkL)KO3T~<*Wjr`f(?Lg z;~`1x1e}lk{8`W!0$#>Ll_MBXprj=@ayI@!PDscMco+{!s|oPAIq=>B-vMa-3V$f# z8^Eh7?l%`Y01wgg0d06D1Fr)77!MUz54gNg!Ak*M^Ej>$JgN6_+#EcV_Hn?rVw`9I zzYN%KzS4kHK>Fehc&yw~Aj1}N+y`LF0{jF|3-Eftk&6`E4A`vV^rfZAa3e_IA$SiD zhtbY0fwb_@2ww--goi3Y@B_FL62MarXucl<6F9+R_gj^|`~iWZ%TPN+90v?ouC$Ec zbv!iX+5xjxph75s;2b<ONDBcE;-SK5v-T|>>ZTt6O)EL>8>FoSJc;KD@D{+?7C7pF zms)|$EJuOJv;gp(RZ0tX0)F=Yb#=ZmRh4lZKZi)rV7qUuOLNl~{UI~P7Bej7c!|r| zm9fQ~TeW7B1$k+_z$h=Q+?ZVFwY4sp>Ge6pc$1P)khZcyttP|<<)yj0W2pEC0Ygz_ z%g^^dhp~CLAD{2@JHPYX=RD8vJooH(Uho0CA^u@Fxr?nAJp~_S(E5TPJdEjz6L1=< zM$f>+9&8PMxVFW{6vM}&BquRJ_zD)$s6&V&m`-kk>-XB}ErkbK?HzFt_I<{^ON>}~ zzz(7guKSz{9{v(od(h5M9lVQaBbtZ3U)UKH8xGMw@$Vnv{_mopL3G1gn68YHyc0NV zwU|oSj>Le$xV6N8J-bV$;9qU*s`!)e+?RI$7c<-K7mFpn7ALWdtJ(h>DZc!KRdRA4 ztUkssI9goaN%o>k;k%gDI|2X4G*IVj^l=i57^jP=#&mat;OTB=0{<Xf{*B!ag77t8 zhhZ@CEhi8ZgrmKVGlL$3?|tVuE1qSF;NK@$d-Mgk<s{jU4#Tz|$Yiwm@y`q#{Rwn_ zvA-$evOeq2fi;+Bx)Bzh<|2uYlp?aukQ6jl!9THPbQ0dfqUd>;ch+9<M!4%Y@&G?? zP@Ex5oesl)eka@Ur{FA>UP|cnvk{Su;%-dw&G6L$)?bav0fJ>rsrdFEHe;*dO<&Kz ziu0^D9f{8;=?Gl}9~t3l&^fRiQ=b*^3Z{MHIy^aM?}+c==8JZjDlT%Xyl{!k=Hz17 zg6ZTaTzA<fT?xE|O%XE=w~pI^itZJzj;EkCu;?1oiWcu&=K%vP=1kbU5szcqKf2+L zX=Xh_P#AFy)8mwLlg|Ic(BfM&Bo}%+jAQ8;f=}P3@P@w#)?xCCU-`NdE}iAeh{=IX zm^Px1;Ez}mKkvJo$S8kwvfdj!j>QNP_3ojAOcpo!eo<9=#fYk=D@Hux`$ZM66(g!p zEiJ0HEG_2VP3uUyD*c>oP&&3{U<IihywmqLL6vpo7ghF?7KePT`f0|T^@}fK8l0%| zq~b+Y_@u?wxo2#nm4?cAIv@`CTBW=jR`Wt|q9^-Bx7Ljq5+Q>pi%b@u&r}<aJj~!i zJJ{|z9odWRQ%9MuhZo!V-5h^%u{}6KJFwWUze+rPFZ7qXL7VScXlIm7_RPKHW`$Pr zKp*75ph=pP88?oX?S;Hb&k3+@_~TDR-TWolCTN_}_@<~kv?0V}CkMi&-ZYwK6Em$Q zVLD8w={2X!uSOk>n<;ZA-BrNje+YT`UZGd)m8Cm=-P`Vky?U?F>#5!A?#tL;nDuGQ x&CC68+e4chdJ?()^xYqSD(03iEqJrKs-WhLs(0e`F*o)=#Pa=a;Sw^G&wU6cQG);g delta 28499 zcmeIbd016d`#-$*hQkbp>A+z=;6V{k5gbs&K(RK8it~hug7buWFmnPuP~w&%mxfuX zIi#hjrR9(rqzNjisW}fgJhc@~N>d!_yq|mTjrKg>=X<^HdtL8!z5o2UT7K?zuY1kY zz3#R4>C6F#W9uBsI`Q@ICU-6Pe016K+jf6%Z*+5dj@>T9|HtY9+p87qzx^Wxv$xj* zYv)eceiwM#&bIwag`cU(=W3XvVBZNj<0(z<KmIbiaa``34&07OXNDpuNC~dzA~;vU zh2yTmyG@NTuQl`Ln>lj=$Juk7<5JdzZyy`NbDb!-jOR9F0t<Pr7SUe()$`nBp5r_p za9l9EF2w5UIc}}+Zg?Q8=bhS(xB@{>ILZcz{`1eoaoH_r<c_k8;<%OV5Q3OoBHRtu zQV44uhg5yx3k3e-FNNbOTIQ*-l!Bl@oErpMvIU5RVsd@q>-axnrL>$eX7aQgj!Usa zIEt;5XX(!4LNve3-rotO6D8fICXCrf6>RLyaq=xa$7PhJnd8J#F<UG(R6GYW$DiZ& zQsOp4*)@)<IapI8mSmZo#1f0ybC77gYm#)$5_qKbRFr;CxRfCs$tdmeTz}wXx^zU| z=!GPuYO%P&5+dnr@g#RVHHlBxr(H<5Ri?*4IIY8T)=?M8d$IvKQ$`XpMc%i%K(w@H zT(^W<86{)lWM5Z~OO<MhD-dRah<<sceDZIbO_U&mG7AZ(<PGR*^2_f8ssyXl`8Ayn zZiXT?&&;Pv=Ti6BqdMu*k8(3V1x4wkEHj(VSWveF@p5eGF@K46&u%Y5rN2RM+Dpge zZ{5`tQ%et^EuR%5-}+R&_`F*}g>+7K)UrsE4yH@zP(yjIf#i=<(sNKslpN*GeFi3K zAf4B*sFZ*E3#BjZSu0AXQ}<BE%LS9w{3&A0-zb7S4I?jRemzwxX8zrU_o?JBvwauU zq?$F>=p0ol1v#tPh5lQ1g~+Z~X;zG_XVR4!f%@ILAOw$1?O<nd=^1kk%$a}mue0kZ z-BeoeelQJ%EOSb_bSPaKmY6Qh1UXksmu{wxOdXLrJT+^0<471VkcL5A0@~#F(m^f7 zl6mHA`8{XUwX}t6SGL<RJh9<2#}!xTi;7Uq?`Sa9&Ce*+EqY2a{mtXW(hT!p`I^#e z<dxu+&da^Ln5R>-fcKTi`GE;ER?OLG$o7aRpE9saCtrbcW@DXN3w<uL_no3WiXkhV zFRsXUvl%8_W<NPac0ZcVbHzt2P6?-MhDVn;ZV%-zS`S4Xq;c#?#WfBPODCHP#f5ii za$#1C2*c=0I3$*I%M+z138xZ{*bH|ru~(g&2wz=dlbnMc9iN~G9iOqcokI!|kSa>` zya9Pv%kIx?wuDnurM$G#F5~6-y)c0u*=)9|G;`i~<ZUwyRU!<)NT`-q(I750TvH>T zd}_1p{S(4ALt9m-nuPLIp^K`}77}`>2zf%NwOBgfBuQneQa`IwZ$e5eb>9T$KB{t6 zx2+t|X1I5e4Rh&K(EFlN>M}@YNL7d)iU?^^7PWtW^I%armR1ttBJzllMxyAHY@4A( zBL`cLqKrX}KI+Cyo1uxCX0BR-AD`H4hz0(^3s^Z`EF6=m)!B|hH=`)sLEQ&J9An33 zh(zyU!l!=A@?1j-ewN30)3C`8fJ`9Cl=g748Dwe~={zP<evMds$>N%BcSV%`zI}qa zwbZ~vFm(?tkTehVPK~p{3;g1W4Cyef9KI--Sh~Qd->n~uvEh{}{VX?NsZtg?By5Kn zRDi@H5eZS`e>FhfS|w=mZX|yM@(`6p>Dt(|4$Uk9`rWR?|3c-vh}MGw0-EIiss*f6 z0xV8r(@L69peUV?voKiH+4{$0lvpXCC^gXfP%s^tj<t4<$ZtN@niQc*{RJshfXdXD zf*X^v9u}y4F9ppgIZ`3P<`6^?sG^ZnKrgi%0Tj7T4~~9!wfr2L78YqYDwuw^-Pq!a zi2Q$vlEs`UmJCNzVkr=%$!1ZMsxWs<v;d4e$UNT*4929v27^hHAEZs1B6A)%Hp7N< z>@D}O0vkN&)dT1Xs$sr!Mrn(fKd}ZxWR#8<>b#}1RDhmp0X)*JKM1M%-Q748A^%oJ zY1`N&p8J*-zJw#`(%CfW2b<xIb6C1MBzq%7I+^bZA!m@H^^PExE;VOkKOftz#60k@ zZhJJ(MP<roA2+sgoZ8CT5_|*GQY^Kq`@wV8%Ok9Sni;EmsllFF8r`q~GHHvOiCB6v zW1eM52c@%nZ~w({Y0`<3$>u1rB+YEH8LpmH#+kA`J$;0F<|utQ4w2E!x$`o`(s|}g z42N8EriF%`bXM+y?G{arfr5_12+EhoS_@ujqIUEzNQ6deQ>0Mmp*E4X8UcMoBhir> z=$En9SHjg8x3m}!v3xYfsP$;<)t517s-iDXWR&EZQ)GwdjZJfzik(5*lT7vAhGeM! zQ<N5Ygci9=86adiLJ}&{tw(Iw;nStRN;8GAe{9DT6d(VTQJP9z#s3%U>Di(n0=<FA zRg^j<-jJ$aaR_P3kw_*3O_-PgO(HI%)SU`|_QigP(vf<B#+__7$!lpV!CXnfq{Sjq zgUu#N_Q7^Wo)>>($Lhmqdx30{`@2S-%l8s1D?L5OKCZOc@`VQn#mXa|w^RL?yH{KH z-@>pSa()n&=Z3M~UXj`M!Ia5)>ds6Wr0()(ii{C5b>E?TTINT{<u9V4M(PrifdU4b zpe~jrRMdq*)#)Z35>*}GC&Wd4v5?A#b?l5+YmXSj|AulcG0M9`nUA+wcoNDwc*n+L zZnp17%2b9_DN62ZV|kADEBUmt#`w!yLY3m#44;65wYpy@i}48!JB<XjwJ@kdnmNT{ zkjIlGhLx*4B9x8yX(kMB!QS-=4et}qa~40Tuo~GH>wKW*<~zwLF%T}X!{il$*-0N? z=exn+JIm=2?4Hj=_bDMSn(1jU%k`b^{h$TLz-~Q_A(MXhdHH4w_N(v6&?T5>`LFjx zwsXYdBl&^y6f~#Sz)yo%u7MSN(Ht`rVJ5jQ0Shfk$;k^_gR&7-g>+Hxr4YhqL{-0V zk~sC$7qK05G0=`J9@Lai5F7cp*KLvK1W_i`5Aw7Zq$nM(^OjFTN*?-xnNY!I`5E*- z=y~p7iqg)KE-{m>@(V98Wb&NMwWoH_-F}72Sdnio&2&OuB_R#H5qPtm5?G@JI_?QX zj&-vVj$jZBLW*LEFE3iZj}xtxPNMCY#Tm`H2Eui}Ks$s?Z^(2j746cc>WorDw`3m5 zXN6eVptD{NOJ^PZll))lh;v5rhA1p@7M=VP6-IJz`U+d>Z}iyefgF-9%;+qZ>f*fF zm;TX0eG<Fs9}wV$9j`{MjVN8TKJlIJW3BX*4<s>9V}SA6adpcrHH0WB9G%e?DSYh3 zQjM|EBatDB8z<T#S3-{By6gANFzS)O?Jg8p+MNRD9%tpou0m%ocG(zY{1F4Xarh1a zeR~=TEU)j(^Z~5`y*p#}=G3C~UXkaJU^H3@<9w=-4Gu^aS{Ye+KsUj@GrJnlLHOLC z1q8-~xFsun>5VQIrL)w#Z+9a7t<Fsz<IhF~n)zq!%|OYiDus%BD3Ses(3iCcYUMs2 zlSmt0e;U|?pss?sGusxlNZ8e##RpIK4d{+7M>?E=^=mvf4NRCik?jr+5&EaFv%#-9 z?nE~7I$vfE$@gwgbW<qcT7o-$*p`qr?sI%za2$QupwL!b+KFhX2Ad_?+VHm}Qp|zQ zbyTc;qJt)TqAkx^Z-j}HP7O}C^R0&3HN_7Dzzak!2vNeqsQaufs}60Md>l%Rmenwj z*5kI~NBPl;GJp)E^F5^+8aJvAc0x`-RWJ>tb7&oXI!g#k6><aEtgtY_TE)u4;@tnj zcCMwj#EYE?JKQ$<C1*EwRcp2F_E736msjDL>qOl&-`<+zge3+GF_ctZKGBbzFhvLj z3G9KXr%>d{x`g);ocpmA;qk)Hec6%lXkmUl`!jr?`y1GVwCo)8Y;Z&Z^N$RP4SvBz zZgmr_wf7V5m82R|tqnYulh3eNOMls+7RTh4Em(GBg2z`~X$!4V`lh50`#3TqIR~4S zmU2CgTa-`>=yxvGXS_vceJ12zkv*aHmf@5|gXM($8&q2?*`UfEvgoGG3O>X65{(^_ zfK?0%rM}3DF$f(oKSAse;sK^5U!F7otP))ux<zi*pT^%C&ho&1MC1=b+{dKFAln{d z@gvKtq(g=vP+7-av3^++WOwo>Rm}<zCD*amPH!aV>(v2lCy(sQPB-ln(WVtPgoI1j z%ZeZAS5zQnAK8d1HBO!lZtT@&ttR%y!9Alio8N~m5^F+@-0?T;xg`#?1o_I}Vqw!( zw=Y3T#=5|s(hup<8B+BtuyR7m54wuQE~BK`UPmM$FT*M-9jr6Z!N$}r;RwbNon7Ry z>{_#MHaRLTJ`1xFy&yHn+o&DT9}cOAQ!QUon?h$jVXN~Ir5;gKR~OlIj~$Q7GK~Hm z?M~8daq3XFv|oQ`uQYGs_sa>J&0?Q$sV-5>(HTyXKIMMu1e@Kwb%75`jWR}9lPCM| zoPd?Bc@wPn*cmwaJ5%GzufrZAkH<=_il%C!e@8$7gO`(+zYx8MsPe{PXj^$CQbNzU z>_tRQ4uOcxVEYa=D$y0TLeEM=xdQUs@`mTFA(}90pz`n7GiA#_jRoC;ldpoMsQz*m zN`u|S5iN2A%nV642dYFyiEb`Hlnhw_$-OWBm4CWTO+o4(1{g)iri5qWpE`f3!8$L> zoPsSD`~hIq$dmAI8Le|!P@=7e;zk}+6`9wDu@lk3O|PJ!Hp9s8s8e*+Xav$k1}hs# zf1G*gL@e*!!@Ocz6)c9RqIPD`;VMId9IY1F=;(2KNUav7BJ*O@3-#|$DV1toYXz(} zy6Y5O(sfkw&~5P|Uy->?mEVfMD%~P5o$a}bD&0I#m{SyC2(M;49Opyb0L_zUGfX^y zbzXNDf@%}q8A__U&Cnfu3<jx!`lc;B*5tzw;__Wox$hxoGvt2Dar-DQjB1;~;Q$Mc z-P`Vas6U(~6tVY$k}Z%#r+6sGSW)`XW>^Qd)Wbe?50x!l`Z?Y1guH;cwrK8PMtX9= z&W<hIcwu7bPO;?&yywRKbR@J>s&QH#mdzftYCUkW!qh3uWAf8qZ8i)*I<&dy_ucod z>xGSbh$2FRhKByRmm;1J@v4h~NH0Z%CLf9H`jyRzd&P6)NGh+s_yU$an=aLj9f|87 zwkeX<1RJakrljQ6i9Dw-y-U$@9-}#@MzKz<qq1h3m54<<l=KX3p%93Y?;lv;W?-;o zNFhzYNtf&&JW4TIaaNN~BIq1~>U7Wo>*m?2Zoh#n_YOvU`K?eYR5k2b$ly*hR?*ro zWeLUysu>FPqN*7s?)F3$ZYfFchCkhEe#X_AA^)iw+CWNz&G2A1O@)x<&6r<&e8@b^ zJ(2Wu<xFUKIh;$}+j_Dw@lDz+ehLXPpA^@^%9tuONPkEdtWRuM0wS@_SaZ#J7#bO7 z3pSuV%c-y)(QJQwP{12XMjM^bt;#V4iz23gd~pY>k53d<?O@RfZ9880ng*EEU^7&K zqI5)?VHZf$%(K{*?`C~w!wfzqrzi_|DV0G?sFAPNviB3(`mg&6BhFtPabyO9n)3=G zv*~4i#yC;Bjkb9Y8dIutA)QvbKQJr5W<G7k+kM`^u?1~j8}|YBDO#ePp-+7WGf<tq zXsz(1qz)=alezy0U*8z+tAuO1=LLDtKV%AC$Ut4{i~wkf<DPIVwrx{3ux*4ebQ_z~ zwnsWnJ!p%4WL-`;0$WlX?HV@2j$J5CiLTNO-HA!k+-7(WoK6li15uGJ;3^i~)V*i~ z<w%jYi+Oa37YhGoUAi<2NP+;AUF=^r!&^JmdbO}7U-D*)x{Pd6hcVkN;gVQ7-!rwe zhXan~2h&Q1yJeINw;%gpi&&bx>M^tLIx1xQpVY$C-+9#5((h8by=D>e-N_bp&E%i5 z(_K3Wt%ozOv=)NPFn|y1mu3=v9?J65)(K-Sv4C!Eg#}MpX1CTIdZ0lpU1$>ys<*jf zlPlFZQ4Uz{;>4tCjG~^G>2RuxfS_3N76vr}b-qv7zHYCES`cQ9RK}3?+28q*Rl4^u zo@k-485Zwgp|6I9PJY1{h^bb0#`^4ezORUd)KsO*bYeqa?I0}tgROWqS_o-im9M6? z+O%Drn7>d?boP->*bH+t<_`+9S1=7`EQ_4{K5O&ZUg0|jcK@{yj|vlYX#XkrCuIL4 z7APJR)|_M4#Kl7XU^XG$T%i9Q7G;|=j;qq!RFm|i8mi%WQUz8Mquyrs=*?;ng1#6N zi+iMVeZ+CRzW5$I2cTAPP*VOh()QBTzC?Eu<_xK;Q;F`XLb#RaE)a1J8fgD-Y_<}V z-uhHG-yr53)$dO0;p90swWLTKpU!-{oA~yuZTIlzSe<NDbyv}*lOIz4PE<JeY|v`q z=A5I#h4g;N3cCCGI@S~avW1RJaxVnQ0gu?H-P;;A2BJ0cefQEbC_6)0tJ1v_$R2bb zA{0Gjy?T5o{PKX^?J*Wge!q-papup|r&1KP;LR-RQ3=~D2K5n(pS1&y^KzemNDNRV zJW0C4Klm+vW<O*^6gaCa5y9UTqJ_km6Y`&Dlnij*Q@p>ZUK}hr`Jze>rl3;Yo<9H> zXMxte+2Byt7Y2bPNhBbDPQs;g2On^tgiCevNxZ{r^%ieY(kp6{)e+^@$;D^bl%7Um z(i!$v&rqSq8Mdiss<7)JyVEmH`0{ra+^e;){ZH1rSG=iilX_Ug9GBc@VXV_)>25Q` zYzFC``zQOjSA=k(lKs%Dm9V*vJ?hm(IP{Qp?A_5b^AmKx`x0~wEr{`(SW)lr_9NlN z=3p~;AYJqd)&)P@kJO_JYzA-QSz$>nv8!{GO{ZYYxkSVHk5Aa;-u(+Iexg?DAyP$j z@|UV_u<F~O`o^oi_f%h=>RYV(W~;tAiceCSUP)+z%2=y5nPUaYpv>ex;D4u#;S+KQ zjzesFW}w&U+f;0AwmZnipV-CBSl35rXG<U&y!3bG)u)ND|0HYIC&p>+M$$h%`h!jA zV+xx~eL*#k1{c*1g(V)N(v60oq2)%lxlcFYt&{A2pVtJB%Pg^P2mTV9(YHg$sLyD5 zNZNqdh(!kHSHMwplF$vTx^JW~avi(hw|%pYb<{k^<Tr;?@G*IohAvP`;m0a(8g~22 zP}Z|wVZi}XcccpzBNUmPpydUxD1~y8jzNVXM%xjHRcdHS`Ifrd)?;)nKuNK=42A!6 zGW<^|gVK=G2*ZMV22ck(h#eJEu$_dy;v^<rn{iLHCJ&<QEMbV)i_-S;RddtRDd0#d ztaLU*J1>^qKeZKIyzu%WTCA#cQ^|vFNn0G51G4y%zW5CFlH|S%vkMldkJ-uoeok*I zp2Z)t-}-ls6+b3{^Ku&5(Mud{u-LhGQUqG6A@G|Of9S_-@PO9CzF9}bcmEI|??z{! z(tA|ar^MO-R#~^=TNeFZQnzGJ939}zZ(5}rjy0Z&rv^M$t^rTT2Nkm=M(xBk?ACw+ zp=3K7Gq9U5@dn#EuxZF0G%HS?`l5WqEbV0{UjPG*>TXRAy}|AeG!~FyKXt#a9j(o} zYHR|c^kb@YMy|y6n{b4bH(x}Bp}HjrDmR8>OWHO^`#{^Y83wKf7Z%-Lk!Unhplw7_ zuHOog0J_HT^7KWPOq1M6hP{&R=XSP4W(He@Xl>{;>owx60|#li?qDAb3K`r6{r%q( zxCQmI24WT1b(BNkkMcsSfha&KjHViAn##c>F=Rk4b&scsw7ehXUPGDZVE=Zdl*oS; zZT3HzYH7z1OS@tk^${CBI7QI+V`~Nncy~h0MQN!yMF2vn+EvAl4vq~vjEvFZ7XQ6y zQgpFeju30|^J~m&NJK#RbFR_yCQ27DIy}L^Ed7XO4CyM&|A@Uiq?zX~XB1Y)p^K#- zOWfafX4OMl1?3ad`u$*7q#mn*SU6_<Ai1YGGw-2ELNjO9d#Io9eQfDs>0*kE0crPh ziQ$S9dt+#tcR2-EtDKTf&s$vQAcef;#7+#2@pSo6Y36QX&V7u_Tqou-tfdh90qZ!d ziJ*64BZnmhM6RN$%IGMYr6<idTz=q;GVlYoV^}Mpcon-oEWs(ok><#aRV+B`l@>aT z)r5P{e^RYASS60jS=F>!D0fv+IH{ZI?D8R!m69(uu*F&VfjJmnvm$8uHGrK~x<*F( zr6q;*Xkh-shqPEan2MmpZ$(G_=nlFokChn5ZH9N=rx>fX7(YB_9}UlLar%G8$b1n) z__r8|k6HAHp4kKZd2ap~G3Noc`{t;PXzf(bSu$}psDG4-V}_^P`z$mnG=hVz3da|- z^_lOy))0Q~0AWa}XM}+(si^M1s?6Wzh=a<p;+z9JH)4qAVm}J)=Se1?YI)8`mNIgh zvoo0R-5AOCjO^xp!a!%DLJXgb99%-{*65f%+t201Va4>IE6}l4*<IX|bUfEx^1Xqj zx8X2bkUdn`aFbokHtA>3o>%HtEtcqQhT$ui^Qb{Rf^Q-NoUdZU;(L}Ln7(+60kgqo z_+SMU=SO*31^UO@1a&^>DcX+bpTSCd{|1HiXrPVE)tbcTJJ<}9R<JFjnt6UkL3T$_ z>1v!BVwbZUqauVgcbUWJNTJUS)?#$HFykuA9NoNq9aE;JKW!0{zJvPH?kdzD#a5m0 zsLnmTB<nA(&Oy#Z5bP@M79?ylj9_fX=vZOxb#`@hOQB&I^T_GwU!+5xIMo>BsaGfo zv>7KqyUa%BjOHh??{nUB9}k5$qx4lC<9JjS8<X4I6i+H}VuZf<EfO}vS7NMNAP0ef zkTHvG%T4Tf@sc8xpCF$El~!V^YYvM_)L}<V<iGPn<bB{);nfT_99Q{s#iQii@e(tQ zStESAk5!IovMkEvx&NZg)@a!hL9@81SbZTemB`=YVx)@Q9MfHp%2=zhNo>~G+1?Ye zOiDeBVov>|%0>D9^5_f8#svzmUSPrFQezK(OTAJ7UH`nrL;gzjWy6>6iaAv>Mrm3_ z+iX6Dm5ggGtRBerk2AN3eTGHodKj73#Ysm7i*|0`*EyoXhb}>BPIWR_{1Z+|$6@aj z<jNf8K0ZolGH_@6@vV3knA0-gJhUTfXruf+#=F+gH1yfe5wq++eGZ~`lK}grL`-h- zp8K0nF|}qtF^=iY&6{L`qcr<cYW6Phl^Aaq65Wo}(NcHoI5y1OA^9N`NXX^4k?(+_ zmXAiuH^)U`E4ArQgZ-lE4=Wy}>HC3XKbcL;xwSi+IKjoe@MW0$8Mbgjdtqrd+czOs zSoDaQCPoMz=UCdr)vocUq2$?05@qMp?B2ux^CD=Ndh1H)uef2!kQ#9LHm=-eORu!) ztiOcykd{6%jbPD}f}Br4J4Ce2r&zB^6Z`JDM^jq4k_nPqAxJY%rYR(C+_!5nTBV+1 z!YRe7d=1+G(k^?C;+^jy6(L=+re(#Ys2=7rIe{O+I!`v?-;tC3G7y|<cTu!`->o#e zA#HJ5T!+-czxe!?jd7P`#C1&VSZxWx;V@vaOCwv2@B~pQPDiZ<m6Lr1=`D6{ve~_O z1sdA!2IL!1HK!@dklzPWLd~1rR0fj^EL6n`HMYb2%a&2tXFp$VvuWa2ruakr&neC0 zKO2gRn)!oW5IzXu7JZ!~$PAEai<^kHgBeLvWc?yHsBm>?MkTMzsoBO<Se|d`zjp1S zKTyQy!G~SjM{aq7RZne#l<!Ur_D97Le{ovt4%Vl3`Cd!xPzNe%*ff(+GK8g0Ya0KC z73;lJi?{}xflS+2JNF*PP&#J2eH7YC(m`o>E!5-Rm)UIH)y&_Y_7T!=pBFaXrAVos zht05Z5mEZ$_t0OctiGre1Zv@gXlj{x?onEtlW0lW=YpWI9R{4Yq|jr5CEd`6R4SfB zL7kU$yYLTh2d%yb@?s;d;<Tj1MY2oY#T7Uu;aBI@$eZ;pbDJK7l0{9A&pG-zs_^XZ zRDIfI*sh1z45bT6m%wuZxuAx|m|~(`u8<gA-0f5-i`IrI`S#May06d%?<44TESOkm z0_92YVaTSvjQZ0t_U-i4WIu2cF2Tfd?^t81w+|q_giFOo^ou_Nm6Sa8H<}plg3geB z&5$0aSN@Ibo#b)9v7{N%#_foR_=}bJG$HE-!T$DSBAY)Wyv@KDsmhZSIdls`$4x>% z9Ynb(9FbeS6x)B4othEl{uB>swAJRVqs(z;qQ_U@G%l#AM_JFA{vlgqklV2g>3baG z%Ko5*>XGFKG3nV%H#z?(TR1bUKzo!o&+M7npDvQ2X32>dh-jcI1C<^xDo2s!yW3tY zuKD(8F(3J&%25{B*Y{u`aW-AwQyllWs}a^AE<Y!uH1bp}){2~lbZe*QTz<@6GD68C z{)Hu@#Qr%{qkJRl+HPvddZ>*HbBs1zV_0*{*i~s~&GE!PI2AFM9}@)YefG+aCxlfM z%=5UJb+Uvt`RY^bj?b%Xwr)78m2_K6*EO+n9T-}p%&uY!EX{kRF2yUB;*BUB?Wt?9 z3PZ_#eiTY>Qk5l&uH>3z<Wyj_F+vbmt*O>p>;zAAJttKR?rB$aghp+h3tA8&e@g66 zOLLlYkGdolT4LokFSDzJR$=jDJ@Q8h6$0CqA0X5U>`49{myI=Q)}?Xm+gaU&5jD*H zr$83^dI+D)I=x;c3^>L-PK|QvoNhhrBtPuCbL#A|ykLKoT{|5d<bXYX2Fgldsrm!Y z#hgc|>mHjyH=DW7NpVU*n9cC*>ukgsqm!%R`RH}F;7qXn?pU0OSD$&^o=XMcIXA$* z6`u337IOpbe@FqR$J|!J-??nw+!*^W@XCd4nH$Yp*|nd|N!|#i_Z!r%QnwwX!0MsE z%5y$e+B&0Tzz%7iIgRcQ#?$pd>NoTd7OnGD5o<dyR@l9U%{v>xisyYD@NI8QlSfD( zhAtZN$fB5%x}-+_mQ6bsZNKbKn{DT!`3}7M8En;BuZ{YatymD*e;alHQlRra<#HvE zCNtga^~L)c98BpV4?6YznJ8-m$vN2!GiG3{3kem-&l%-igLri{bhd`g8C`yjc@)58 z0<P7>$qoBoOy+lxbn%hdGn0~Ezs7MhC*fgMhIBu@@-Ix`w)3vBw+jOKHtf@a@OH~y zM3OgwL^|qZU5C<QvIfcTs$6-(OTkCqEGsm!50|!f_feS2hP?J0c5!JFq5LCeTRO@0 z6UFYI-%OtJ4a+ah3J5xa*+T{qJczLS=0($#?PGUJ*9nEy?CrPBeu2}_LOBhn>}=d* zDp!HB-&t1ucDVP=sWfjb9m&L<f{Fj#2=?slc23t;VOIZ8cb0|t206fD9dfIZ4SlDx zF^!ZAn5;{5bE=RgV(v%a+m&p`JCR053M_FX?ri0e&CfIz3|HB`cLD?3R;ojrN6Hx` zkxk*nonn=4!W9-?mf%;o8>c~x;EKbT_1oW9WOf~96U$7(#i?v*Su69$P%zZT8Wyh% z=?CQjOtl;nK*_?4h~rJcRCcYbg}xR`k#e9K0u6uWMHcj~so)Fnlq(^!C~P2sq=S}F zteq=~rs1(>GBrd#cIo+`aMK}>$x5ynK<o51q)!t))p|1w<HPPEMRGgZbJC^3T@I%O zIKiS}al?|a0JWfzhml2B7kI95$TSjfK-s=`jlEh@Acm6zP~CBEFI0!qn52;63m8tr zMd_I+JrI>0A#w>=)?abqZySvJn1}iU8HrBT>%65sGU_srnU)2%{N(G#q;RJS_5svJ zs>0y<908P9@e2Xj`)f9SSxaF~Ia|4`kC4=a)h!DYdQW1Gtcfu6(9T$PlrQ-FJPHJ> z;v<ZF^lPjAn3LECF69TIR=0N{uv0QjLbzc(CVv)+Mkvj)<CoYKRpkF#`WnBF)H5*$ z(+L;0>L~evdL82d%@K0_8z?qO%CYa5JQ=YIlafpGcuPw$=Qs7*EeOY@T#Bm>qM0uT zkxK|GTVV`diX<rSY-mHGHF6%j=;~&$FdaUJVPs^NSNQqe_(B~@-pE__QYCej%;~)X zVZtz0^4=w3=}<PdTx$R2&uVR-?L%s%$u$_!su?B$$FFmTbqA^uZZq^BN129fgtS<a z?C>3nS~*C#a*Qop8RyXhyL+nbSV9Hfu_hg0->qyTOdresTsgoidMw0qWYoQnh-%p# zW5eE$j=Dca8D*|?a4#sMP2{06biV_iUKJq#1Nn<F?DO|yoi7!G342^I3tZLI+evu{ zkHh0mmA#eNt%SNySdUd9UC(@i1#K41V%(*LDXC%n*$i$V)1>q1372e!@wqSQyP!!r zu`%#;F59vyCUgYSNq>Agqa-8?lzRWB4B{GV<ouKD;i~t98`Ig+58e~H4`${M(~K*( zk}hO*waHUKHL4U(x3Izw?+8owvdJIyZ<5yo=2JUbdc);5TN>qEKvi)@w6SdajNSaG ztxz<I1+MPr*J>1UF6maQEb1M_9D01|`Xf8KIw<y4EPq&2=6*)UUHl^@-u}Rz%83~~ zF<WWd9%_U`H54J#Hi9Bx(#Jy@&(3RY276jUFXewKU;C8Vt%-CS{~8idG8tOK+N>F+ zm+DZ&Zq-!Y0C~ixY{Qyn&40w&hULM3tabjGz<f_JXF#>&DmUp%YNX9_NOC0$=qb5n z-Oh(=TzH?a<B=x%zNRr?V?6WvI6#<shQ)r|S(vqnP5#&<EWgd({y12eb(?MdxS9WT zythCcebED)5*y>yv}Sicju8%;ng6<-Li{jRvo6-Kp>fA+Yctg1xInAyC(M0)xZs_} z;?}nprfgsn)|);0ZG71y*EX;Z)=vp2!XT&fE1g{>`|k^Lot*xb^z!XA7QUf?89$2< zW_-vxeKuPt8_d4`EF*Zv5j0y2RQ`E*&<X3%)+)yuC@E0s(%xdBTM~o^gIK>UkwV%K zHfPJL!t=rG`z@V>S=Gz71`BC#ve2z<go%S$|E-<d429JQ2fNZ9^$R;(xuPsHHy6>w z>-~~(3JmEmy2(jQ_)*SW$PR3aWq)k-7M!fi;q#S($NOyU=ZV7P0qo-EuLbvm*+wZF zt+ZHyzo@d)$`)C~lD9<*e^s#w+k!e)^rwMgG2lCf>@+-!!^aRb%e=6(V=Mb-fi4yz zf2?jjVR!sd9$bL@I}4r*+27m7vn!rXJI8MCZ&z@59*v|Pp7OUUVU%~Ngh}3_5>fIx zm57tyR|r@6U6qj{FIEXrE>MX~d6r5HmZzvhww$XH<K<y0ktp|7i99)dUV)PPY&k_? zNrg0j+NktmqN7y0jObvMhS?1EZk1k3w7p7iCc0rRB`CqV48>ig_YhsH(iKFXQE6Ha z<$`L}bCNv!Rr);9J5;)s=nX0@6TMQU>xo{f(hWoxsx(>NWs6F~j*5pmDh<;<CugZN zUDC;!DqR3`Gbg91o+zRdRXUF7Sd~sB+N9DcMEk0=NVJnmXA=E<j#B$PqU%+9Hqp0K zx{&DeD!rKKnmJ1QOJ(G#Py<#Hy-TIX6TMlbHxvD#O79@LOr`e_U8K?#M9)^~YN98r z^hu(#ReCMa{Z;xrUjUD&dTJ@4lS<1($EkEZ(PouyAlj(Xq<qV6D(yrxr_!E8Kg6>R zi~}RlvPzqXzO2$wM4tp*03mu8ARkf#5{cfU(qyrhx2m*A^jek9Bzn0@4<>rCN@o*2 zPo>8bou|@yMCYpXY@!FNbRp<La)#<zOaUn>T}E^pm0n47luEB9I#{JQ6YZ(eJBYSd z={-a@%w|iz4(aprYFuyW<WsYiNrr1L_hTxjLgjdZQ>k*cshq*!JXSd$svI10-S4TK zC9_$>*QSDbs&AAUZbJABmD5}0M1eD2<s_>d*d^UZsGJy;lL$_Kl@p+HQo!l1avW8T z2u_O1`SW$92IIl$RiLtNs;oS)(p1h*DrYu09aYYE3P)K~z-*y1KU0~sx`1g?nJZK# zEi+*HsLVoQ(m@>GZE3HDVc}+ed-NVLFwiw>2fJ6Wggr6BA1he)p6hJ-r<<G_pV;q= z+xt0Ra0$mpQCdtZ3|YwSZ9moFrFCcc=mCuKrVW-(-0I@OH9-WsL|61#V<>$B<UXIg zCGHDUC{UpgfP&F6V+wI>k$+?BS3eoT*FqMvaZqai)B&k1>zh{!{)A0QJ7+EeS(gF} z#h;%dmgN2$;Pz4!4y->^Rvn;j9{d+o|7r41miw=ziYV4hC(@v2U^0qQ*;=ZYbTlJJ z*M2k%WJByNKDglURGw_t+KYDIi%BeSe^X<Yn%DrqcZkW+S8q1MZ?Ce!`-44lKY^Ny z9oY$Q@{D)^v%jzVC~RmymEdy$N!Mo-yT0G-4^G`_IeE2WMKMF#5z-63V8P#xOHAF2 z=0GDUsP>kk!IyT2{R+Anlq#H-Y=(Ecp|mBsRZ#x{(k}MRx9v<P&_#5s&<|Xjp&LXp zq+0ls6o1=p%<Di?VL%e=a3DhXY6}~FAW(dt(!@U8NvyQliKW4K_ynzOdzzB$9y|<6 zmvG~>M)4z@zQbCk1~Mh^DjmsW{m1OP106yXQ&giyJnlU-kc-l&t~NvSG#0Wcs34^u zJ{nZ|b0BP0xNFj_dlOmF6QVS;m@3H`(jOVpZMpsfvf1JB?o37DZO|9fu>j*M33{T< zaD|GXbfLb8QTLS^yb++3J)FWwW1B+(RGadyibU-QdKxMnM|*`LRhyxK_$2VUDzJG3 z`6=8%2|wQ$K1dB8|04XijTC=RV|aTIuyZ5gzYsvp8G#aotK#^4woy)>MpTJz52Wfm zQTgpe;8tJv2?#v2+eFb})*79Wbj^Z;C(3rK3l++}ltQF>o8e3s*0CbpJzSAIVQFJC zEa<|@DtwzcX&fKif>9ilg04#NRDhmJyI<HqMef>#9jk~CnoVZ+D_VOb!eZ7q{SQg3 zWu++Gkl5_X9v&;Cmm%R2`>ArapqE&W@53W9alus=F6l&IiF+6F`N-#%yqK4{OW51r zM~(e#DyHW|vG`QJqkMQ0=q%CtwD$F`D9I#fKD67Sh?hit+Ii)>j|_Sm(IHi-B#NFt zt%L5_YApL0BFE$->qyUs5z6|kcGkDV*$s}|c?k<X7!(tZ%#}G2{EDJ=A1T2}!LIIS z*{LzWP7T;m!bTr#8(4{O?MAMmE;(Jl9oMY*OjO?3mVI$BS}6H~T{+m+ZS;1e3LRV0 zl`UkUhgvjmfoS20vV#)a55<?+V#Tm<yV8L=51)wLgb{w(7G^orvtW`B%5qF@V<0V8 zUvw1P83J2u;c$s*Uz_?U&C#h)U$hf!d3m`q*73S53+rozGR}2&knAwTx4Yz4n=ZNO z^~(?91c_=;V)GFTnzeIE(Ib5k?aDU8^UfR>KF<uB#1%}WtZppjhX`TaXDs)Jc>7O` zFc+@+p=o3+2po*`MPJg)upO)mR^QRmhgW22l3hviUq2*=9ZlA59P~xKlv#xL#v)#C zny3;#e9BS}2MF=WEbDN4Vfv?R#o=(_Y$vwo@Cf0}cxJ2$7joiRQdN}j@n$xrDl*_y zN3_NH+jmh?*k3Yk*MUGfIh)z~syBs8o0$8N2;r3^*1LMY@B~LD_=b06t&bcR?sZ`N z(HP<UM%MCZfPc3R%8|j<4o^?hlKw8(Q-{@vOJpOCwhjCNhZOC`;37!ezwLQKJJA>I z+2==t`y5PEn{MZGoF(R&MZ8<zfbQ7?(c7~jNN)h~sh+!!GR^EOl@3a=xTY4)=K}Rb z@eq-FC|Lwk&u1@UfyaV`Yol2EW2x~R0<0#eqzfC%aQmf6auDX-f0!h*HjqhD@C|2& zkD1!dco9o}3nUU!ZIX+^(OhJcoUU@^^p}EX3&|$grY2BuS;#VL{0+tHk;{A-K1E!6 zYnH0@Y+g-ZK^GI=2a>7kQJN9g;4v_teG`WEr?q8CdukOjudQuMT|{d+29`no97itP zS-c5Q8&xgRYNUlC`N&gV1jtXJdSe$QqXdQsJ`l|sIpSbV@u>wF%5K0uo!M0Q>1(#I zuo-KZ87f52#sxsvCf!D%L6;)xzZw7Mf}w5iP6e>W2ePu``d&YE{pSTht6k`=Qp3_k zbOCVq-!1?)L-7CR0-(4nyLbGb7XXJAyvVZ80_6gr>xogOGTeVyuj}jZIq*?2rxMQ# zD$;W*M7!!&^<Nh`?{H&>PBgfgi>Mj##tbif*mtuG?8?cO!WAoS2ign!P3+a54hv@% z;c8%<{Y-3wJKs2!%L~>OxE3%8zb|C`86Tmjm2wI2<wBamomwfE08124a4Wh55Zc{h zAD(%GFJSI-gYCCrMI!q@pT@+w@xuIIGX4v3+u4^t#|pxBR{L|4z;_>GeE%2Q|C7Ni z<g7_Z=)_(*n<O+H1PlMgz-Y(+ZrzXmoaLQsF4(s2{NS7u@1C0fqKS@M*pBmN_h4Vt zoGdY3vM!%pKOYt|I0m~xnmLh}*nloHS7s_%@m&zR4yHTJia)L#R(wB}dZB&%Qj0om za)FxZbr~iXEcvsNl5awLm^t;|Ecx<#m^OZ_;)1=f)`uOw5T5wg%*L3qx0;x!4pcWP zAVzAi{1hTIH&4A39XXSQT@2UP&n1})>XnwyJFNG`&dsfNqpVN<#y6n7UWnIqL4jH( zxu0Bt0yPzL=;Oc3E7;KuVea}DLh^?**rN?$Li<U~e`9EeZ#K~wZDE{+eW5gR6k?)X zENx*%l-DAN?yRE7!$=NB?40vr<ujdJw24)23}Mnn*O;M-m=>q!KjWnQdz{u3Co0BK zlgIO%6Y^6ip_B*S^PiCK!Sjziu71LNKWQo8Hx$x7NfBzx*_)pXX#Z^pH3dDp#KW$d zOp1_Y&ZE!1XJbmIs8bp{clrbjm<rK)gcm!w_b*}HH%$;)_hnygnl0QnvsRmvg+tTW z)XnL_7&F_uIZ|-w!)iB+!v0J^JLdLdpo5>h=NT@I5(}ew;o=yUyYRIpzxTpYkT|7r zDcD1n?O3@P7}`>>YceZb>c?&^93k9DW@(F>2;U~}oUkZ^cWZ?UBbtH!XE8G>tzFF5 zrxW8ru}%MWfDL!^iGKPbPZ0XAJ@8uzW%4Lw&YBfXw4aFuovkQ3B(OX-ta!1#a|25C zYjGhj{I!Tpc*`t|o5tRG%P9OA%z~0TI7Q>G2A>)Pv#jJ0r@wGJgGX4wY+-V^J-v)& zZ5Cg(|KlMDzmj!A_NzPhmAo$qeY&tssUaS6{^%Dq5k{jXt=O82E*=N5c+4Cr<~+eT zIZ?!RUknxSRPWryreRgsgs~m{2}Lp35%;uy@)0bS+TM0`6bru8)+2~wPY`o1Aa=7N zHu6%Wux1E*^HP+_PmTQ-=BJw-U5xw5{x74)j$+3znWG%Qp-FKyyD{Oqo*3n=liahd zNVvI}^Aq;M$yOG4Il{0XLL|KWg|Hl+&H7w!oh?9^?p|==(nD(Vi*g#W&P#CP^4pa5 zc9vhkwpgb6@h&kV!E@hQJ+<6H=tPgt2c2s2)RvauwwI0wvd=<jcz9OPLvs5CvH_l0 z4-M?b<tSm!BIb4_Sa|NsqOZh+t}j$9zK+u2+h0R5LEpcDnE1Q~`h?FSHuFl)0uwQ; zb5V+LoL3q%{RArlwmR3e((!y6eZOC4XX~nE{u{QPZpHWVb4eb*Z2{3oU9nfXseT{$ z>+tE;aYg8r!w3>f67a%dD3<CTF~RlXp7m18eeyb~rTDmxy>c~12wcb}Up1K&ix8GY z7(y#7GsPTwihrvRXQ~qWlU7}2A=`R&obclUX1F#kENl2nja$ybDt@poSCj^%&@aWv z4?43CuEl%%7h|e8K(LC7rdE5F#jah8b2|d@(vcC^um@$apxWs|tv7qSHb_YDVq0oM zeS(S*%LOvZvn)uxH;i4b%@!6fWSy@E3X>PIq1R19UvD<=`bhV`hQ1iBONKJ}da!US zi8<T|^{gLHD&b$L`sGz(QrkP@S(_VChS{@dt-zU4Jx!O2&hnLMZ2FD(&eI`IA3<st z_qsmtql43_CDP<gA0u9gZt`eUB4;gi=^Jmz%ho7bexCef2)llxZE|Pq91!|-9HmrY z!KoOFW?NbDKFT+#?}7tVp;MaBYE|gqIM(lGypXV(y>&BA7(R~ez1du7+L7J5nHF?+ zEZrzMQTu&Pk&*P=x*16M^jOyQR-cg>DpOx1{*BNa{9J;$8b4Q1?bNdbKXIcQh3IJ= zurgZm>C4R}AK7dN1SAF>!2!SRAYdP&7D1{5D10Kqd(tlhoDfTW9c0J(?7^*w=HGcJ zqtOGqmArdUV}H_eeIb#?q76T8g2*+Atn<HuU1to0QGxs$6WN%5#R$(6*|LAVpS$on zy{=N9sb;2?zJ^~y!UfhOaFvVJWKp^z#^7voN&j{w3TQKIc16v<qbD4tgK#v&FBahA zpY>qUV^>U$*78u~q`mFx4N~G*MR@?*{7YoS`Wo6(Y?u=()kGHr)m@Se5V9uf@?GV+ z0qp)S#)7{vo0WUuYWkG_F?@9c-WZN+7*YBeg?qDBEu6lnKUQThKF6x5sG=Xb(V`#* z*~G`!MB53`ir;RClot)4^3YcTaydxlls5y3;+G2`pfB=3Do7kpvyoQwT3eV>b$!ue zbX`KlZ30CL%^)2v<ofGu<F7w<8H>ENt>r;Kn{5xB<?37{-S?A`Fi-niqdiN$S&0PD zl`eag1K%xpAL;sJvCKP$g57=a!JQoyt>*-~+l1~wH~s<gBP6P&Zv`omc1%gCzwoes z6!y?l#DJ;mK87J|Uwdf)-HxYZYR1A;<b=c$vESAXL*I}>;wGm<1f1h4$CmHN&ff|1 zosYIa@h;$d9^3@sLwQ)zSh*b93MW&kpYqD``$Z^HoKhrb<YMh%H0`n3kX<6y&qv>< zSlngre(aW<8v9OrDs1EH%Y~{hyYcnqfNRRiOC|;R&#SD@?Ra6rNmhK@+~TnX$^Mtu zm+uVtAFnT$USVf%#|n2&F#9`Z=2mBnjw;8m6shw^<gt#(J_#s=I)Pe>r2~28y1823 zI*awH3l>IavdMLElg~%c_i0D=p+YHXSYS;y!@r=+VtNi7LQQr^)C`i6dpCuFuWH<= zCGnKm5QO8p(jm6U*&uC^bCiFJ@Gn_BQwFH7sP5p#DO2v)o_XFihM3SAWr!4qAHPxr z;_wSbdyyr+OP<bBfAfJ62fsg7*G?5PvBJBCkat&UzQ#x<ilmI@zSG%{cLS2!Pyb&f zg8vaj#d=w;y_hPvBgtb;?nQ*0&--sFpldn#+dTH#y%5($=pKAbdQ?tr$EMv2H_iBO zF)*~Y^`kXnr9Inm&)}XZ54-;&VRrsrrm+aBm3Bvo(`1EuN9iKpW+}gg#BG`OLilAl z<s;A-sL@RStiRMVMgEYL{uU^Vn#SJ#Eg<&Qf5iQdiehQPFEsok9KXA_uAtCF)#JFg zHC(IVHVvyayr|)C8rp}e@k|<a(y*_F<25YOaFvGJG(4bU0e4(u{GwrlhVz=L8BEkL zHbV6$YM7>BmWCD$-_&rWhFdkP*6<e%pDI|uaUM<7grhX<q+xdrXK1)c!(|$7*6?c$ z4{3N(!;2dJqG5xE4rVn!Uqa;1g=&nJ8m4KOso`i1XKA=d!{r*T({PuDH5#7L@Ro-4 z8VZqW0fK>)Ki5uUWN0{A!+9Dm)o`7L`!zhO;YAH)4I4Cc(Yh{F!<HJRXgIVf4+R^4 zi#3Mg!9TtcJO*cENGB7QLS<dtO~t$|n!gzMZn%&8;5#ZKI2?#5VNJE4tK?`E><L$o zkGSgX)C$*j<(1?L({>JhFx9;vg;V{czR+K-O-+rKcKR#S{MZ+izc|g0Jzx1d`L`<X zrTOzTzqjT$X?|EQmA{I=RC%@1X!}f!5ugchnm<tUmpxY_1ZjT!WTR36GMmxg$!DrR zRP}O;HGi1quXw74n>2r><_}l=*io_@RDs!=K#JynL-U(7{~XQFY5wP$zu~bO-=_J` zKjwA^34Bx6-3#6M*Ld@QsiQ_u9%HghGv&+}Gs-f?ls98q&X}1qP5Cn?OdV%x7CCul zvvRYZ&nmCb^8w}W>iJ)Huk_^I1h4JpRabs%!FsSyyME<Tx#Bfl`B&cj%&5zFbwXWx z3Qh7ST-hBp=ck}O&@7Gq@_9bINw8#>ulC`)ccfS}1ag~k+<dstv{+ujIxe`2vBEFU zknJ1CagB23DEyo#T-lxS2w%QsW+UG#7C(dvcdaqLuLI|s>%{rCapZjCJ@Xpn+O~jN z3)d*e>m4|Kt|O;!1B`dgD?jPO8w*I9>XP0PLl5qdmg;Yy?OMI`d+F^F6R{DqF^8K7 zzX#WtgR_f0=hDWRbLs2wLS_$YGzc$lo*-R9G%M%EdvUzUjWgwXmG?97sosqVo`v=D zQ6k4}tt<b;z=sxCvQa1<N{2t*QQ#c;50oaPW+WHT%9#tbe8q+4?&3n*?BqiE9h_$- z?ijK;E)Vg%lma<&F7Yo56p_MlZ@~H2amrsnm=ovO#*_1$;dauc!YR)&+aZ(UqZmB* zvW+Oa#c3SZrrXP?UJe515HG|v7XN|Ban9)+7q2C)X7BJKdur!>2tNeZQ<J#>n(NMS zQ#3!X6j_bq<zvtJw800+KJj{`K`DXk9#Hn-8WUIp`Yhaw1ibAz?>6q7_cGVa^8Wt3 z`9<sY?#*$-(K?Ik%9r@_5i~Hj`SYVEHx@q>det(xt#!x-e_jDDT!6)m3&?fl0@{G? zYpC}*=~Y4URFaEBv26^)cc+^C$MCm@|FP!h4NjcF0*66oz@Rh4dsUQ|8TsHBCGsE6 zaRcDI@0K4h@?rk=JZC=|?XHYeXN)%|PTv<Jwcf}_`Wsz1W3C%#?C;7MQ5s`k-;>@I zo_XcI0eqaHu@29t;n#HYI4<rk3Rr&Iz=xNA63qKiT@M8FfiH5dT)=S;;NtF<e-gk) zck=Rb;JlEkSG;?LYo1GXV|qJbmN^G^pVGr0?~92>ldV4{8|p+8uG}+_&m7Vi!~bpc zCtN0C5b|ah&fF$|Gyf4-Z>;q@>09BQ=auc5$<yHJD`cvJ#D#O2@p6m~hpDs>ZiZ5e z-vjw3Q~|#rz8zt&Al^^eMR^y2##S8fA#fh?E^$hd7>`Op@lUxWh&TB)7VO8(9QP9( zjpUy;mwy)YkLDQj8JYv`o4e&_gZR*vjd9Ly;kXaCa@+;2`T?Qnk!7y6&L<r!bb0pK zc9}vPN)!>yM>x@>F$E#l!T{bkr7?x;dpXW$pVlGB`G0L_$UKH?Z0PdI0esYp?ix^q zgB%>XYg-^6NUd-*kZ&3h5a-H;wfTe#<3Hp)>pf~;PGNNsIB8%0TQDE}BAw2cIBq1I z)4lTO5I!`KN=ZX`_+^eOf@^XQ9{kyZ-gt%Mx@vyjfZ>Zp!tkX>o_n@iX8Fkg-dJ9U z?*)SJO{+7FS#KxKyT1#j9x&dc!Y$7=yZn3zZ;GIHiL=LI73s+NTiS5`Z4x+tK8|y3 zT&Vc+=uqC2tLMUI80-CNm0_J35bw^JEU#mYoW+^?=X0hu7S6=yaRIgdCk++a5{I)Y zPjjXVmPz~yVB?<yX_fU@0!=S@Ch*Gsz;9dsA7%UhDBJ&kD%<|bDE*(xcKMQU{%HBw z2>!ftH@=@`2L4OmJ*5fH?04rS@|nDMVYaF$H;(ec?A_i`{4{62?e0abc@MrIO49?) z8k#f=*3hV-uZEr)I%&vh*ziczCuI%KYgnUUg@*ez+@oRvw@YK}&~U4Un>E~^;aUwp z)NrMS%QY<1aIuE7HJq$ru7-{2XDNQ1qWWtA85*W&n5bcthDHrNHFVRE(@_3XZGrO| z;zzxdZKJX2Pip*X4J!!g6t_oX?9gzthHEt})38v(JPikHn4)2vhDHrNHFVOD)3D*8 zTF`o60WEg2#;DcsyoM(=tk$qX!#x^q*04;&LJjjYqzov1{BVwfnHr{O81<0rq68Q< z2B)F?K#g!&!!sIIXt+|tLc(&>B0iYa3%zwk9WmJ74G;Q%LX>;@@b=|d?f78b+Ac~o zpZFAeb+VjvQZds-#WG44^~g+C5!P1a*p%k5`}=nMHoJoJf*Me*VPisF98^Az1spjg zCtw-n2%OyS{}U|W3beoFTEcl+!vAlP3PlAa{O8}+U)2uz?}*dPe-HRy_~joZ@skRS z*nBOX9CrzB7k-TIGLU{7v_JSKfqQZIP6NFk*bH}1PN1WJ8E`8>6Ly9@Itp|Oke>DI zz^_^ox<jSP1K$%E2uJd9z}|4#_=&&FAe`Y_1mj!@PaMz+XU5MpZ2(?}^96ki7#X7S zqku;=x(0Xy2X|utWPtO+)C>sYO=@L20fWQUxP)Eds1RwuO$r|~Z!?GsiihWNBTzas zh6t4o_yJrs=ncTbaA!bQ18q%l*agkuFuV!wA?VG(AK=RHOBU6@M>qhR(V`8&5zW=g zWCO#Z)k2tnXA5G`ln^=(jEm*C@t_HBz||U2GvElEgqMP!4GhA#pl%8VhN8z0fi?kG z!JWiUS9}O0E!rMG9Agihi*suf=y|}m;i&dyz>ClxGr(64(HGjFzu~zI+}j>~26{ix zrvowu9Sj`T5gCIX4}2ev68aG6nuOY;pl-luII6t`xEzjT2zO~T;Zr#3V$V)Ekaj}< zpGTr8@C=5l1)U8X1$PT{9&kFG4B7%b3P*|70LLb)H69Oq2uH0&c<U98y9;^3)hYO& zl%Q4C0^?HAefU*X!i`<w2Y++d0*-qV$KnilU?k&qz)=RZuX5ZFQEloh;7&Ly^)6s@ z9DC<MJ`p$^j^wj}hVD3UfNvy(Q@R6K0Y`BOAN0Vufy_f-eg^pX7?hh0qGwP1FFr7B zGJz#<6k#cFoks5m+BBN$g-aW_V92)tPJyG$@_=EzRoVpHo{7GPj12VXqo&yj_<ldM zHuxU`3$EcGs(}m)>aPj~1AiNcT0*EEICT(YK<5D;X!Jwi8-rE;JfJ;J-!$(Dx4~6| z-T`brRBgpr;A}Xm7-8-(==pxwdGkO#gp0$#A)K3~_T_TmoZ;vi@aF+5;HZ{_pNv3a z;L{t~`jHrP_)%R#k8E`SdID#{QDzq4yKq#=<v^Rp=SHFXMxp;#BB3&PuEK2wT??F& zgW7;D1WwFFZ9q>3zBxwCcrozjvACiE|2!~yyjDwK?`ar>;Aa9S<#F6|iVIu@rxXGh zG+mVsE&y>4E*7J=9@uOKKA;301)K^;HOK?%W~$BW1l%<rOEP2#yDv}^$^gcoGULII z1@<q*6a-Bu!%@ce3$c2_Z3e#v$Xn6>)O7YBO5kV;Ed`22YEu)s7DF`x-woImjtZ0p zEQX^*7X#O7G-2LbYC42#7prRn;j|^{LYD^|D`6kOvOQkH{LfmduHl5s;i!)ZYc!g$ zpcLZ+5j@}KxIS>5K=%he$77}}(1gd|!+-^S5-60b;~)<B^nDbBWPr<7p({Zz2mbK^ z$DI$r`u`Bbj1M{Ge|b~}JhTQ=5w#@zay_OQ=v_e526YW5q~CQTek_n4LsP4)1=8=} z#evQSI&Fd_4zvmQD_ja_ZZpSy1t)^u1H4p#e=^{?415AdO&a?t*8DBdOTdT4p4$mW z8SDZc*owskbOrDn9Lb*tdVH?7oF~w@9jyl$!jIsT!U3o4Q0YA24mgTiK!o)Rbr2N+ zUA|OlH{j&G7(7Ubu*W_urC2^QfK9$Z%|M%h`Ec2wX9Hh5tPV^Ocm<BSvKDx-3Wt8k z90K}RtA#KEU#~|0Qz>SX=P3FKLS;Z*4ae;V?Fk$JN9*=r;2Moy3p@u$H9HS%{-aum zSm0qeT8OHF5f`xBBCZ+u6C4fjGr$EG(cKvMg%>eZ9>PO~AbkH4`W^xFTRk;b&}X0t z>Hj%Z2%2yM9PM(1-D^REp8<5dh3*CI1hm4DrnC|m_X`#;@CpBhy9}E9mE&5$tt^00 z9Ed#ffF}G@qxS&4WGp6#;0s)T8#^254ZwgqkVhecfrH>U#KmO}H;4G3$&OD}`ZUN8 zl5O0AkxxiAZ=wkc$hb{Hgk)DHnve{^L=%#|mT1E98cjCa#TrdWmRFJ?B&#aXg!eU? zEUVskRX!nEIUzGyA=0$~GCY#ukRlU)14kt#Bs(7Q3CYq%G@;Wyl_n$`81V@^Yc$!x zMyT}eQHgwWez%jzPv;AkVb2rLxb|o~{M9eVqKl4c^c&y9_Tu!?U$GkNmF}fKY6HR} zU;0lTf#36`fA&SlBkjiYYOlhNd>Z|Ncy0I7blx$uNj+vP0`{z{SXaI7<T`m>{kpjI ziCA#3yMw=Z?Vh#erd52Dz{QpKTE(CCNWpkRK;F99>k8K`E^qw-?-Nyvcn#~g^-k+O z*BjTH)<><6Tc5dp@cQia<Jae{pS`|t{o?gy>sPMdJ^cgz9pT+L$M;tA0q%QN<n}86 h`Q}%Lkldm2rmOkB<qKEyW&gnsUBkyYpat>we*pmr!(0FW diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip3.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/pip3.exe index 1bcff1a2972519891cc3398d6b4bcced58b6ea40..996a84675b3e96882215d1742360674ae425bf3f 100644 GIT binary patch delta 32096 zcmeFad0Z4n*Dv1FFzmx1GRW?*DGDlz3kWEqgEk6^`-X}EagCx3ibgXe1BsK)JRz0| zY7#a3o|u?N(JX=+;7UwlOpHqQ#xaS}L~)(p_jETT&->>6-TTM=eC|JYlIEN`b!t0R zb?Vfq>h{<R9-n{WQInyOp3EF+eQ{{vs-I{1$9Fsy!_)CUA$;e^%kf8K_)`3nGOUUJ z74X;C=i<Ku{CSo>ey>d5q4IB1A(P?6MMVoKPfq>iXLDTfBOcuPo_7m6XjizHuADFL z!*Q2EdPgBQ4Q64Qp56gG$GLM{c{LlO8Isgj!(~wP4h{EGK46W8i||$WmA^c<RKsz} zZlJT@`9AI%u8w~)IxMiSWqySPEWcjDp8!0UnGYZ3FAbq&NRbL~g$?GrduJgOgir#T z!-x6HxuX=$52OtC2pCW{HxVI$y#t0ZZa_JulHAr^XZYTlCzHK4x_9g91r|~JYKn#v zQVt3Bs<>n?*4sTQ$;fe1Y9PnuI?Qp&f<q`29EMgu4JWyD9Jd#lb1p;8Zyb00;PK-d z-6Q#w6xQ86q31(|NO!2qf2Mnj(v1x5Oc(JRTjt)a_pT8rRJ<jKzo(qb6`OM%Bkt%w zX~_|rrRuK8Kx`7K>n)L@Rvf|Jb?>k7V&A$Otv!(!s!}?UKwfHg$`85r`KeMNT5b}L zSJxxX4=nmk4N}WJmrD@AgC_7Pr=<Y2iNezI2nES4x?*yCatCbHE|Xrklbup8ekrwe zr@|@Ox;V`z=o@7GWsZ2lEFO`x))0;ZE{HACPjdEWsgm7Ml~yVyh$5!Ej_iwEIf-`# zx7%%xLR253H{Hc!(ldIcV3dlEyj_jzUHPlv6>7m|@nDYlr68V^YP%8tNy@YWrJ;dV zv~Qd*+K!frXZ24tNSE)Tl_*{iKQq<QkdV&2CYMhM6Yrr4QXYgOH$=?7A`2v?-YxsG zkl2pa-i=aPn=2kcp=g>Ei$YNn3eqVBQ7Mn06q9~W_B^g6T^k5`xi(My(OE*-IcjAQ zo<>VPhX|!N|67)y26trnq0isz-XFP+a}*@H^3u<*4Abw?*7;~yjYqeHRH-7G#!x|A zR*rZmN1T<MBbEVuK*$llH_bK8G0ip=%szOWo%D$C-iRqFIQop9$gX)rrw+vItghEr zRY4T{X`DLO=Q^}eRIX!k+(N;T8#i6LjVcNv1w{nKv(kk?mh0KG-xp3wTppH!(P)k< zgbY6O#@M}&`T_JKPOY4!oJy+kVJ~?0=N(S=g=cU7W+WBQg2~Tic-zTzUh)1OGc;Uv zv&Adrw97ES$+Emcd%YD`c?13R(B(>Rs)4o^x<a0K)ur-GyqR)3rP*ajaI$q?vHZ15 z?0K&U>vLYH!w<Jyu7~ojaLFW>>{ip>8nkU6azQr7T!uwZ8K~|?u!>F69x$hzrnZ)v z9U~S>FV4Z_yY6zi8qIN~3z5cU*eoZQQZImg6AfjDp<GG47K!)%31*j}jF_f4rZf@P za)ql@;hI8Rf5=?pl&o5aX^g_OL}5w>li<)j58^)Ri^lA?NRPP;iI>=u-Wk^FHd)@; zF2gxsf~`0%8Lh}GHSHyJY8LBLPMhtyaR~zc826G&bQ#uwGZ#Fl@O4iOrfOqKvlMaD z<r2-*pOfRJLm+mo&om8~&6F+rxX@*IQbniRj-qAZ6zWO6Ps1V~H6nt;a0gkU879gv zm*@v2j$@kDU5%#6rm2GMvQZE(f$vxF3F1wcp%3~UTFCSXEA@%Ao{{!KcVQHSg6DnW zag6nH8Lp6oMA8-I$A#*17T+AV3xar~?G$yN!>|R6raDsIf_O?Q^6r=|&x2lFj~0?T z+t5z3Tpgm{<6nYN)x{*9k#bR(tc;Pdo2nlEjYLQy%^-f;5nko@BmPLGa*`lkC^4t? zu!QRO_!3>K(0v5kK^_sY<v*(tm$3vu!FiRK?XeU|TFH1@Rz;}y!_+{zf>30nCagbV zbeGtM$CURfv8Bf-YQzC9G=S=KgrYmLvNiHlKjgCu9Hu%bN4fp;F%wXxvBYMLim`<L zmC*;3(Nl6p>yZ(vNP>u`24pMsw1!gdPCZEaJxx-wLiMAX>-V^oKuSLf_Sq-}ibW8Y z#tDLWL=Y`;`GW0`5n82S?m<DsGC18!u$Kl1_KJ`x==Le5sitXy<F!-_3QXM{o>Wim z_}(2;x5k6UrS&I{v+j>UBTCXzEn3I;drnXNo>PLY-d(@vQ<@uFF{V?ln;j)yG!VRV z9eoyJd2z<(Iu`QIt|swws`Gj*o@l*)j_oi{8keJ5e-9yfjUge?dPExHK9p9HmU%4) z!?neNyaq`?M>&0geG5#mU4mpCrC9h&O!k3Ok7zffX}EkT7J|dV;4tj-ko!(uMUtT$ z1c!}s7sPukLD$_1E-X+E!xLbnzP$#OkhNim0L7S}cso}-hzjp*yGzsb6x2$BoYroR zGr9~P{RmxnQeKQ7qGF2LDTDWsL5bb6E?>aPnqQIzv0E^j(OI8M%P_UkYHtC9q9kod zxm04c4Ud&8T1w?({9BNmdRCUwaD^W|+lZd^lzSHAE?Q2pOieN3?<sWukb<T@#=$AU zZi&m1E@E8E=}V~2RvOo>RPQ^`a=GHVxGeExo-C2ols_WAN5qHnsCDShq51N{{kc@5 zrM8r>!?KepASZ{T5Mtd&@{uxNKd1zk;S~DNF{x{bDQ$9GRC(_lsB5ER=C9H$7;0h> zb<!{-!dS_}m=t9hu0W9`_8~&iLBa79WsA%zF|6_&?q-+aQDm6wc$o+(^*Od?SB`iW z_LL)^FS+_|j_49@e46Vpf!ZvG@31HR`&jp(G!*rrRxS$VWa*zef@yx?8k%ii8kd?2 z=_Kbmbf1C)-3~(oBFnX}i%ZD07snZ;G#?r?r=%Pxoa4}t3DN_1fc?|2q(tAn3Nr)= z(siHqfEm#xNN8vXD7n$#a+UMf4hjv;0c|HjS)P87Zfz_U3=XCPCwa!QC-m`!HL#FK zjbfDL(-hK4gJm8Fm2>Wg;_6!-BgJ+mL#q^>3b{&efm~CQQt#{z%QBk;Dw|ldWm~Wv zONKPlg9Qcj_uqpl{2`TU50OU3ut@=Ne02<46Od#j&5#eRAb924vqEz1f^pK+Y08pt zGo@arZq`3lk6Ia|zQ{tY`G?UQXKQjf#}dP7nZa^VaA539t>~s25}X^VCZ5d|Pr`bl z1q@p0lyr-TFxS3B&304Wh0&m9V{AcXL0iq8GYh6SR+DDSkV#aDT(LnAbp=B;9N8Oh zeJc;bZqkdW0A%Je%mf8WXlyk5G9aq^slJLRQQz=1b6l3iAQciPCZVr1HJSx=>B-N? zV3}Q_hTVmYVhI*2n;?^F?S81eaxW=kD46YT(1Y*v!oW!z>17EQ#E}8w2ANH_nb^Fg z?EY*^m&LmI#!i7;?al%MR|Nis9TMg~%|N4m&sphW24jJ9qei77+3GyhVl7%$9wyC4 zm#d<@8pQ&-GVA?AXrMNi8>OE|VD*C~$4Tq6fVl`eh<qUi0Ze2WKoZYgAj%aQFG*?I zpEJ=If9mc(ks9UboNr1ikx8Sxd6E`%qF5T7fzl~(NfSGXDfKKS$e>>lso@T0$zm6^ zKg?#Mf}*Vrvtgy(!NyUrYoE%KCsTQxBj1axk;s=Q@}d%1p+<Vvg`vjIRVmFFsBXwn zum@=b+ks@k*5D<$jzJrW+Oc4E{zL=AVYnTk;j$fqTaMV2>oEL|^&m&A7aH2Nwr{)V zu%Cj`Gt+Z8t}?T5u!iF-TIs|nSr(^rk+tnmLfbqjxZ_H#1@SA`Wz&(Nkp9)mVS+>Z zO?NiO(0kzJG3b|lOG5NC#%4jwj74>T39k4rzQJ~{Bl*|1n?dWl+8P3+ePh^1hR~1~ zDHh|=VQ3DgT#v3Cgk1BwvtJBJy=S3bXjL+DDF7d()#>-G4ACQlwqX?M7(<c6zhNVT zM-I9ag&a~23-)Z7R@d`H*eh45#_086>2Ua8^K%05w&iH8l#<6j4DJ{9LM}95Q7ig- zxU`ZOdqc#S$R**-y;~;VGn|d=md)?TWsh}B<6jPAhr1<4K08((2!R+cf_R38#w*z4 zif5gE((Ev%3yIS#U;{!#Ui4!7LvnP5*vP6Q@|O^12p!4S=dzih5A#1vWfwwMq~uJ6 zK`0){g;}vMM?8Zm7e0jwP_`ttc9&%ddM#0ETLt5pW_ndFdpK;g=Xg{@+S-k^gp~(= zhF*}Ou<K9@dNY_!3*W9=3ALnBTn6SA(Qib_WJnWP_O`X(v&0KUu)&WC4L7w|dmb36 z;cVY^7nYo!p5qqOgzgTF!&vG9BmjsYVlB{49my;aeKU_3Iy0UG)OONUeZ9OlRc<GW z6wl^~$5q_~<NB0z45fDHrlM}_VuUF+peNSEJ<a+(?t-(2Y?9_;Im@E?oop5xT!ztK zu#u6;x<9c+Rh#f|5UY$l5*G6p<z1MeP?l{osYi!SWQ9?o{DL{`fv6Zh(Zse#<?-vf zvR|Vn@Xya?rtSmy`)9NA?!Eb5S?u}lQ*|p~?WtMc>%!cPDXeiq|InQ@>8f4jA<}hN zR%*NrDEoN&s_2JCD)NgK#OxMV-q{NG4I$0|!7-%;n~GvYvdJ@^*6oAskn0$wf#~|p zW;bT^imd|0mKvk4ew>ufDunDIZIy&C1`fXO3)sk+X_*NMU->{O3Ygh$boQ>UU(*X# zYCrT?d8FhCawCWJf^+&xw_r@DHGC8(+dI?9exDTNTT7cN+o4n_e3opCOWM%MM@!3M zTkCHrzt~M7CR@7(IyZ{!n7eLRJB~@)(paT2#eYT~nQ4vkXnbl0YcS?!p7QT3s{klv z)CW5gAKPt>MQgjwm!Fraes{TQVARm0I3@jxN?`2;=6;opiSB7Vnu%@_#K<CaJ)AiD zDjQmX-A#FlkQO-%L`%7Jdl5)>ZGJS=-=2ju4c2%|KP@1nd>YdBq_ik@tWbU3GKzK= zZzB>$h#>mX(v~7UN8$9wRiU7i*fQ2+mg|*?>L&eG!mdS6h<Pa;t7FPJSSHoil@++F zRD~Jbv4{ugSV>I3#TQGc6ALwD??7=Lm)>zgs&)@r7=ol;NT4nOQ9#8IXIP$iI7j@P z40k9Ms&FvIk;N_7-g7T@0Gz~Q2^0@H4T8NO&X}Fjj2S|^cj+{9#YD4^7mWeW!u;r{ zMFMbkXmXOCTvZ>+9ex1TNF(bW+kZgeU~2R+v0XY%0z=0Qg9bp!E=6Wq+bNf`t00a| zAffn3376Qzu?2?3zoLVPdk|C!$?TZ)+pp|eY;5pW>~SsbDd(K2LXox^0Sw7Uj<Te< z{?=}=me80OTc#0emd85Lryo|&2x`*7q>gpeyBC5>dv>t?iRFn{d1A$o<)qD>tQQgx zY+jB|kctqEF80|A)yqlIU~w6a9Yu@m+JP`kq<5EbRtjuJcG8>otIS@+{AVI%x>zN7 z6X`pUWK|*+qBdBoJt2`3Am)nNVX;`??b_i0f@m-Th`M0>l`?-KQP4gFBaspmQ^u?D z$JtG6x2?0rWx;*{eFjL!rRDfrLyEruW|!?y^4x=p3A+UCK{%mp6^1>QH8>!=u=m@5 zdO?&=qW*9hP|HN^I$Etz;B<MFo(#7UxfP@@>Z#pisoJpJ;VdQryQEZ(eH|ZW%|LpJ zgV6>jLoPfQ(5py^(PKk0XN(|L#mQyawO*9HLk-&Mu`|^^Maf0&uW~NhxwMLdzlQ2& z*A{}>s2vQVw>wwgs7(fjiAo7{<UB>Ovlk-Ztp-Y6hQZCyU)mjDRK)F%#?vM<7<9CD z?lI~|4Fa~zwP51PeH6Z*mQh4V6FH(#7N&~JaJq@L^w>Li6$~)W5{1-cuSuK1i4OGN zr_|(x%P<vWajd(kjwm_e7ddXHq=(u5p1nM-G@`?7yMBeMmyhbT>rTQmnt(N+nd;o+ zW_%`DE$r3a{ikNhM6FCbCh<SQk6T|(J`5lIzTX0!<6w*Qky&Vf(d4-JLnliBSh_e9 z!4f00c&aQP{lNMr4iB)EQGNB*U%^A^(*7CD)+bKx{$_ttG%oCXjOm#ZM`FQqT&842 zHz3ZK6m}&sVaU2<IbqdKIX^?&0P35dtI(QjF)(w*$S*<35#6s{&kC_w{NUO})Wx6J z_@v<ZVsENu6MV<WZyTkw`?b@ApF+0Nz8!j!_SAz^KzS65+y~^${ccJ=-^y7pp{#9j zw>f7VGMKHzwo_V{VgCV|Hj%;rb~I@KU)hh{OA6)N!dPgZShozVh7Io%5}LCRJzN}D ziayVcv!t9h)ons6r1uB0(mvs#*>Vmq!?A~W?3<xNG2o@^b?l`+sr>FbcA?Lpp)WVk z$PwFNo&ckK1ec)_D5@NQRoKsV+l6U-Oe$VLZFSHdNJu#@O*zl<`VQ(gwH`yMn=+JW z_Q53dMPm5hg4w%$J-dtvg%3phDu;C4RZPGJ*4THU+dMZ78_;j`g6*)UNt=7a@GQg5 z+361FO916_P~MhW{}w;3BR)uuS8e&Pq*;ICN$KQ)1vvr{u)4$DHLSH?55_0=;8*Tu zy_3gUq5d%`*Tn^xCM%o4ucV+%S$lshk9vJoEs1e+D=p&n$wK<sa(8I{uF@k4rI0AA z<$2)JzR^X)-542yxyh9m<vQXAbi*(xYR|D{+~LX-Wf!RA(E~Ha?tX{b8X~Y~=il!> zHUU!7!OCrFh;cj3Y1l6fiB^kQY{hIZkkz3X7|s1sJkzDjZ(J^E9R_zc+{E_mI$E}q zq$MDz_D4Im@YyNn%&D_qv-k>P_72+)OmpronP*WxG?ua}X`6N|TfZm4BwjV!F(k6Z zTPE>zu6@eRlI-eVtoo|&ys4LG*X^B-v~cQT=`}lMH09Xp`CNN;CD!W=`@sS~*WnUV z;=z*ihSdhBjJQ(@P5()*SF7z?y%WC8qe9UEp$J<gGG(<qIP{+c@eQ}!QSegnDE8J> z;h5*HW_?vGvIMkNom2@AsIWwZVJd9*kQu9csdd;}jB2Jg=%4UHMfFv2&^NYrlfLQ* zdc)Q}QeOpk4v=hp)dw;%T3_|1j7-v3Z9r06`*eNPGNiS&&$M*2+S=#otDcq_iu6?& zx?EcexN<~~9NQ825XkQc5vB_w{EaYz2JG)cV$$#X!M7|#zn{Z;F&&AxEr|y{Z8wYU zZ8VO|`u(XS?CEWGgkVA~(vgZeCe14)NA&4jvE6yU>BgaCFDr(STek1_Sbog#`9#%0 zm09&>j3u@3?5gkeRS%&A6ppSF^ybJ&IkRikt_ORVQ-dwh`rZC8hHIqxSP_spYU?Jr zj+k7H_-jsgt@|3goGiK8W;n4*_0_AOj!oAOW$CN3t(e=?7%#oe4`>OdNTGau0z_y_ zq*$t^T1$)pSu7ommc>(o!CH)JT83<po%`7}<_(kM7UdRI(geXNKn<xP9O~q?NF!lR z$W>h9k332>ur#0nO6=J!f@5}zNiMl<5@gVry@8}!lI`Z)3oXQ%Y{#=?7V-CLm)<@R z{b$;bkpXiZl52ZP?$7!9$Gah|oFKj7!DD+DfdNCM$_;th+L!688bH_!G0D_zfLXTo zQp+4$dzrp!9gK**<!am+VysXC)dvydFum){Mjv!EojPJW&e+(c!`-bLz_336+z{nu zeI2b+5Q@blTVbY;X-Z{R{jg@0$&mw(dHn>(gwrK3RtiaU_c@D9dm&(%W1PzD`=c`_ zM;z8U3Z$=>sDjg1{fw<cwqvG|M6QLO(aFc!hbYp-;C&49frMq+3kAR_*^#@7eQ;!b zfUn8+!wpmVU6X$NT?`5f%W8x-gMIoziuDvkM50VQD7X${9A{VmsIR&M1yfyLmyDb& zUa4;SDkli=dSGmAaBI7OKzWxEb9!63N$tAlp`1+VH%<Frq{8*%Z&H?~bk~DDA8MS9 z5`>~tQc-t6d-hIDD}7Zd62S8%_PJq(_txxmGmDr89#}X(_NIkWodqWSCwGOS>qu7b zGQ`%g{!M9pD#73~e76U=Vf15mxmx6&mn#&8bu1XUOp=*C-ot8}qOC_EM4Ag2fizP; zhafQxG!}gw3`~rDoF=yYpb&XW*E(Yd(NXa&O$Vnpswl{0GqNej#o<IncH#|l;tgcu zR~{(uro|@gN$9vEV+3j@me;9!?A*A3Ld3wM37uk;$`H%OHwRl)?t3Qk2gA!Zy&QVk z_24`~yloO;v}D&PQz$pCSe;VvH@Z$rCl<;hBDYAYu75C|EYk}89%6#;KTBGT>~h@h z(p<Z4-6z-7*-HlS8=wMNaIw<fKhH{){Swkss&yRckcgyIeZ3+E?Q*w7$eepg9BO4J zf=d-7xlr;=A5tXhs&r~L#yf4vKKl%2K6(`y(t4wCwIw2!786^0jwRp5Dvov=6e^SU zUG{C$1B0*KOJ0o1allfK1D1W4{o1Z;<Z&I)_6raw%(hl`=xDU{6NE}kw$zmJV1NpK zfSkr1Y5vf+J&(vbS}u?ymWquOB$c29Z4@M3pad-xr0MSvzE>y<KyjFiN`TxQz`XQT z`M^lIFNMslaTT(>(Vi0>nn5}>yi+=D=b#x-vAey?dzqB^EVoQnoHRE`nUUp>y(0zN z{-6r@?I-y-)(I^#?Hp((@fMY7ZaXPgm@-T<i5~32u`YcIFp=!<5|iz?`;D_0#szIV z5X6q9z~NUZ=VeA-M7Obs<H^>U;Pi4%CVpoQ{%V{fNy?ti3<P-RVARHJpR$2UETumM z^hd}RzoYsPKHV8e9g9J?l#C3;YLHt-rUn@n`s)5Tt}>@ikxi4l9k%D2P^G(cl;`ki zfs!JNa`Y*7{&?gVGW=ks?xVBglxD~=8KVGmyJ8pE?AnK*W@!!X<uc3xCBvgE3^RKP zsPe@Z(_XAnvOhg_7yBf60Ke`kyPDiH^tO+NgKHO#2$x~}`^X8FSRY&FGhNt#{&Qo+ zcpB*E1jqUSlVhw0gmlnspY4}xpY2|9?M=avIp#Y1p#Qwcx#$srZrxOuq8PYP#2j~8 z&v5Ge&3kOXfPBqXW*d;fH!o%l1N!hUEClGv+)|AE{R>%q$_xA#$5?a9AinR-jRX6q z-M#@$I)dzo@GCA~Xefua?iyvAiwivIP1w9oI6(?gdm4;_{n0wK4533e*y4esqACz) zi=XdF<K_0f^7uyWRNx3fR{8I;lLMook~#@d7!l6TZMW}~<H8Gev5ndQPv$cyjqm?E z%O2F5KXIL{7-a6Z{2gU4^b?hYb{WM}F2e|wcvvQmi=b_W2IWX;?1w>nd8?Lf9~|jl zXr$$R^8NS|x%5wVbTAHZo7fXWHt^aK7MdDo-TxbI(73$Iy9naHs3ke+$Ek(QXrX-@ z5w5u0u0KNKg4b8y0=TaI5aQP8tA9b@6CM7*+4N@!?fyr)+qExZ9T!J>*|pnb#LuqX zLdci!k-ER(a$#H=ZMU@R4MNdT{T}mJuK=8?RtXg|*r8OTW;FXTHM$p!Lsz2{=la?R z$o-1Tiz=rJ0IpQ-!6vF)r0Xi{mlhnf=U1YCW1;f_X(FN|!zDH&ZIEFK8H(jWd$D@C zByY^|M(yxW_F~!$e&98BFYQ_W>8tG7p(UDI%q_jQu=)t~shB_#EL%)HDuw{3U1P8< z4d-jpl)rKO2o(j60OHR48@>M#R-PVX)hQ&ha`#t4LPFvx>E>a%0Mbkpyrcw^DTfOR zPBtnA+rr-gFwVkkd+A`3ie5AyBvB*=={;h0JTUzevaG<~huG6bU$F!VqF(lD**sBS ztz<vUA~Qnx<%ijjj3|EmVKyzp#1~&<TQidR`>wF#8U6XWe=t|Z0OPJ#lp_aBaZ%R; zy=%MavgllfYoOb7L4U9bnKAt9^{hOzA3yyPdnI!OZ@I=UWeyGSdKulX8;Y)>t?I3p z*}!4ZL-g`&X>l3OAQrs>wGfPJV^`1xF2mPEv*9j>-OcGK#T~@`nsYRq_r1&>A2!)~ z=m1G+tUwLXO1qWNbS3nf5?ZK)wkn}gCG@xws!&1?${|q}y`0hg3SoyLvM)bD9psrj z1@!&$7(OLM!tKrG4iD>Mlc?J2Z1<2N4zS0DC;7etIa|UY@XRZ$VR$TG-@twvp6E65 zC35-X{mw$OjNKEdFGRHbDr_q>I|`dZj7mQmf`;oavgujbyx73DXN~3qPcSJfO|zWE zn9?HqzfS7G4T(sH(v=rID6$*qrx#d-DW2Cq&$gR}^c;MV#B)rVDv!`((kK;vEqeiu zVGC}Y)%NxRcGpyCT}Go*{K^u7jbIUQY1?SIQeJe;X(ADHlTZk5|GYpz*9agzh?+tN z&qVlIHo`}#07v9AGE3&O08SkGxluKFR|{N3UPAiCmA?qKOiwC~Zt&^u>5+H;Af+@t zd2^dhI68M3e$ummk*0nlZj%0}B4yR6?Mnf4i+MwQEYRw6`s&ZAmqcALvT+%f?O>H7 zgT02yf&3k8+sI)_0q92<q>`ELk=Q+5I7ioA_9yP^+vOO9KbONlZD(HQ{<Cb)Q!8}E z0MZ_^W*vq~g|vc5FM(8(UHyYaFB|z%fFIVK>L1E`G-@N;mFg)GH+*T8KP4T6RSoDe zgl%J+%vOHJJIs)s&4+!<3bK1dJ`58UCkXngat#cHac<HI5Fk{YE%Rh6+nya_{qZ{T z`6vi0EY#v=ECPaff^Kh|Z^3kECVyEFlHy*=uf!<dRVlF#YC~P%;By)Lp92+lKYm80 zy^%u?<e=O+d=?p?YYcyR(G`<1^&2=}=~Cin<RCY=>IGYS#;VaoIRFasIQI@4J1TPe zaVWNb%AoZeG5}i*Rvp=ye?khCS<6kh%Ais_6$&PaAp(4+x&R|>sZU5hm9Y9z-3Hq! zlmA*Z%ioDgZRZh5y%QJuEYpq7;!owX@uNcn-O+MEd^|3T2ZV7FSI5?lP70rcf+2BB zx4jT4CyiI?5oybO>jG;S9TWO37B2OenT|h2T?+`9rO&WCqet@D&oYzHGoaWDmDO_S zV)2AsH_VGw2>rt2iD)~3%WY$BD3MScD-VmhKRnq1A)WuylevW8pzT=F1;++Ti~;HP zh27xrWW93CfukwH*65Y~*}4r*4>59xC$r`x2Atk1i#b~;`VHeU$dm2M>C6AIg<Z;t z<@b9q{g~9y^P8!u5<1Fd$)ouu$oe2^D=Qq+kI&n}o*a|nbp=;RT!uHcu;XKf_c;uE zB;^;FKQIuXB~D6(dr4cIC8H_<CoXX!+{MLP(7w#AH(2V}@~~K>SQSG9nMyM|wo8zo zIX%+%25THUqfgv?s)C%pA065BPSZYz0f`hRU506Lifw9&>gy~ox3JHW|DGbCGsV$b zDx)Js`=9LV+`K|<u!dVdUnshU<^C(WI%CVY!ddbK(eKK26Fj>C(pa3jkPtjvjWBKF zY`25f^#}7?#0)OQEgOZr(XX`%@pq}m2eK#kQ7wBkZ$`jJT`6`_03F>mNr6k*mAvKN znIIyxb}3spE<4bQ8K-wtV)*11eUI$NyR-e{f_*;QBcBXu`?<4=<3{M5uw}-GLEk|I z-`T_Zj-Sa-{*FC1-l!iyYo5ceNwDi(hWJfv@AzqBkAI5-usci?s(-PB+aB=JSPYmA zE<@2Ks?G_i>^=04Ef#G)7$CS#mVXXSyB&$J0JhUY=4(sWlzX@gDVx}={GI_bDax%G zjc$TJ_m7QiV}1-j*U5J0$Mc$R+4=lve%a}b6M7BVwNchL-Dss)z7y8ZfYaoUgjU0~ zIj1hiUT~LldPtKIg*~}04*9waJvOq!2}yi@EBn`kzI^?Y?2`#YyTxfyB(~`WY4N9> zF3lS)Oh55HjW1g{@o8N#G>&?tTfK-GCiOD*1w}s6eU#>+5g~hs^FgT_5RCPVMQqNb z)S+ii$xP)b(s5vPp4$uOT?t1#F^}mucQ>e=$GNY|fndsR@F~_ZX*=KNO}1ik?8YfM z{Qf7{*eU63O^qLWWJ*QgJ(|8_LxiF$*Bc%#?<Re6f(1+s<6k(zlBSxHdcI2nmtDse ze<@T6$!=p`OqXaln=w^nL-OsFY|GUC{L@A3_|&*Q8BiCv8$wq^geA?>1vkF~PER!E z^RHnKo4kycZrF~W#0_Cy%3i@DrzP;=MY~2%>!%6txYmU3gb;;eokINLD7!mz2*0kB z4W3oZ-@Cz{ofX4Bc9zx8+Gf24>l?=-snVxF8aZz?yzMZ|y%ZO&Ms0WrSi1Herj_2d z8946bz*mWTW#4abxpEp@TH8<E$BK{NG%mvgS#o{)W9I%|uVFP>3f_lqV|}-->L%49 zXB1E>7eJD2n~P|(_%jw@cD`U*Sk)DpKvzh2giAMIR;!1)e$8y$>~7JgmeQ)B^Fxuj z;@Pocm0Vn|y&ySD39v_Ir)ZY3eY1^&P>;uQlhaH?n=BKstIFHp)4^|Y`&w`v$aWOA zn>U!jQF&KW-d9bppA+1dS|G>Y1Shc!?PwVME#Kfo9dn^S9T^y1hVFJYX-=F@^C*Us z+jq#M-MEZZ&Dn@bbekKMk-kChTpyQV?;|o+lD7UE%RDS;y&iG76m|`F-JI+tSi&mT zPxrBOH*bhVag@!9xxEH#g(XCEr1-2Fhr)WNC(tIK;DbLdH~oG||F9qA>xB5_(vQzA z44HuA`yceXeMjg&snV1p1c7#yt{-OR!dO&oVPQl!xs?YunER*MZn>3rdCW~M^_fB= zzh)8pxUk27yH(UVtw?Kd8N8}x#q|(20=8qWwxb%1hlAqmR@kQZSGincl)?@3o=ZZ` zBq;da&XS}|byzmxush7FGTVlnkf*=P5K+Z0%nSCZvUSLU-J55OKzdZm`?2h=lq$)= zG_`d@?@JnX5_ac4A7lzqcpz;_+LW`FV5eUP?drpzsrQs2phEq_hp`KA#;94mbl-r4 zt<R$wx9^$s=5KHbV1~=k#YT&{7Q=2db_dGvmkJKDd-sLL%mbw@3FaEkeZr&^gfNKx z{$ks5fTb0gdR%~6pK=a|P5%a>P9WI)T*^6`K(HaW^vv&mV>^lr{P(}HJw?4kR)Z1z z`7%Gv2j?`{dFL0gn?=zBqdIeuQe-}e9kF9EB2eWpiQ<u%?p~_Zt!iN7iW77PWQwe~ z#y2oqaq5Wmpmf}1@CJ_b8Fc&;D5QV3QYYUEJtKK#p!{RG;sNZM=+Z3R={PK;-!Ai$ zG8@?4;_mL%rytD44VEBwrQWM+)?;nTO^&R`PPmRqX)~~o@ok7_>)wsx-#N&V7L1II z+>TYCpuXdnY6rX<IHnp4f_hAKc0b#=px3x5R$K?Kei`kjwPOhS3btIpN!W7fjg|zu z8x@;dRF8?D1V&t+Guc|96>n<umefzrbE|45o#XUD&B5YrW!?*W(dfQDBDK<zByHN? zSzPpfma}kmWNRR<R{ghQDNL13K3rFJe&Hpby{D9_yf=V-x+t4pd5VQB9-`UH@)tMq zi%znwyXFPT9(bv#uj~{hfAw2T_rb7TN0&rt_=VrGCI5~HCtvlE<23Y-c)?A5M{!fv zxeWauVA1zyB|M5am!WMf%5qGpMAKa@rph}D$~!#NbLIHvwajOCg!?&^#Cq>u9bH2& z28h`^I-SS3fN+F-$MEDxJtu<8v@A?Bg`M9$fL}YGb=i~X{`v$^_b==1{s1A%_C#x5 zU^RQX@!vho5|(%8n^&`(<w?A_iFwt<ur14X^G_zS38lgA{~izadv!hC9|N-MX<X&j z4cy%+&4z!o*H^^rde27vVJVddNH_k?&aDXR6TTWFw4F$Jcj8}goI3fGdW<%2*gdX) zV<H|$TLq|5bn^EsX63lNN>FV^PMQv=PMjw&8mgNs%F;8%?>Me(i6CCg6@SZVxQoki zgK9vseUHm==cbPp-G9YFV~CgSTa6_f^J6Fbcx80zRaj!`tngGaS7uKXYKzeyF8vM$ zDzf=6#;HKrJ0C`{SL%b^o8r*q`ug4){=VJpa>MA@6Hj6=`|au&%yr6OzWsiuJT|<~ zrXH~HkNt}sKM>2$|JSbH59Dh6_8d@#(!24HWzL}y{GtQQaVX0Db>z?9J+#=JE)%iH z!-x3r2KN2o4ek@7QSkC3l^Xtw&)M&daS{FYD?>>34FAV*$5Z#q#~oLpWLyS#+h`p9 zPsbgHWu|`j({YD;2FhR`HC=EY8U<uq^9y{2eb<PiTX=p;75uJIx}4WKn|0<@=5?YU z8(tQr`|Z_E>bh6i;<8x&fk)UAWlMawVM`B<-c!nXm1!*np&z2k<rQRogh>})=}ffr z6}Hy$0&m>UqRQif-=B{P7qw$xRNz{ge6LHoeu7Oej}CkQQJ@c{8wgoYM|&nRv3#)i zK^s)pVdt6S?AI}29?3X8wbZdo<-<baWY=grJi=jUCT&|jLS$4O%UTs5^7C;_7`rD? zD`Zvo1)V^2+053j3JW{DN9mp)ABIU`kKc_TZiqE%Z+^z!Ta^-A@FMnvn8t-4!wO^y z&*YEUtyM<;;rT3Lb-%cWU|cEplPz6x#l!Nw^d{*U$Q5ttpj|FQ&-rZm>OT6jV?d%E z#SGZ_ZlAI@R~xObgC^g_A-}@2#E^c_5`{rrLpa^X8B!!m76ya?7;d%l<hYU!<)EJ8 zMdVL<&_r{=VPGwk$nR(#rp1UO=1?#AH7tJ6s58+Dx*>>ZNH?XD$zg+6W$bKBMaVcG zio`_t?gi>6+3I^pnS1loBMr#1Y&H&@1o4{Ca1V1YJ`5z=&zO6z>9`2<2j*U?Jokp6 zprK>f&Wf<U4|isT;{uR(qcvh9CY>D-RMxys2I<%5S-`tN%x_H?Kc|u<teL<&d$I=* zx-pN{u8HM`ezfbOHAgkpOsHh?FP_0{M&Is-+dc(L)VcC-y3_ChA~Q1K9KuzwW73!) z2*Od|rg_Y*s=oYZ$7szyFE9v2O_H^dlG8|;Uk3`t1Vj?K0=ZSD%Ns!W&5*h>J=0OD zvGf&+epT)~g=6DnrnJgHD*J07t&L&Ebs-TE$b!l?uLqMY-VZ@^cTf15rk=!53Smpv z1qXlfoHCr=$1s!rTqkqByoYUGXXWROXQAuQ@m1s4_4VSA5m5cq+H+7ZIB_VzsP1@q z2qWx>b{r-c=KLMfy<+4ba0~X#SN_fF9-PL1e2{gqCi^=f6O-#$O1;@0dF}%?-a3#E zoz1GPQ@Wfk06QHHS|-qb@cRSIY3-d*3o7O?Zi}3hiq_CN`-ePy7Z5it(<1_dc}4-t ztW5F_Uk{=qGiW_~qq0Y!m$Kh*L|$7blP|9WxgY<)bL?DY<jCUZumY^2sf>$Ejk72> zj6s*-1E6N{Y);BKmm%P9_$FcjcWOuE1G8BALy1wpJ_|i~<Fj0Q<Yi#WOJMRqK5mQG zHL|LQp5|9hXAuuS&5y`sA3SUhv1}$o&F1Tp#sKTEf6mmh0K8q*dndbNn;g4vI2@^N zv{pq+0ks|cKRr!zAp|m(&OOb_ss`~%)7cwUlY+mThLY{st@465R4AhNwVpZ1W>$wM zU6_JF-Xu*!QYjFB#MHKD+^L?J#eql$*^X5bOqLTw)yfGl@%_Oqh<U9p!?Pe^NWU(9 z_!MK+@qUHr$UrXS(gAj;dY<0%BC43(MD-1ohCamxKGL(-xA0UT^KK=!^`D282M9$| znnYhId=%NUF3Ta&mwch8r8h3@s(Qpn({<kv<cYpN-VyQo5cd3|q5Q+m?4w7A@e>)l z^Qe&zIM1RsMDSO?VHq2GcKdO#oJL=D4YpZFx|YFg?S@1?y%&3R!!G{l1Xf^AGUQ?< zkcBe{hjg$uAd%mnWY5{7d7o5PZy&<v)UccOIRAcFO*?wz!y1<O*!`hF8056wrLC^$ zexS10%j=$~m%dG9?>%OvcQ+GkH}B!4p}?9BMdcg;3!!hiPJnhU>mgq+GE4?8hz)<= zHzPCsCuT1%TWc0x3Hj`aj^t9|6KvT>-T8MnvL`-@O4~z=h@(#phAY(ES05?z8-!sm zJxj-HFhdh4@PxeBoDz!83JtflQsoJj_HpE3USEaFHz@j)G*#wyS>&WSe@~k6pGo^r z(u71el^_14Q_^*;R#X)30G^WCXz7BESBv`kID7o#zWn<`*+(B|@j^Yj_wf|#UK5o{ z=LYahx-X^#1#zYHdSnIkd=_4coP$fTbjz({mRh&tfXIjRlj!R*gpLD(SAoc>?mAWO z{M!nFi?gE6UIR^AFW8zjLYgjaC-(oF>ahdvc+HY+{bUAzcnSOSlL~%FE?d4olh1#g zy|X`ue|s#uxj&xYx|kX31wNlZDx0`<H%k`bb8xW9Vg<i5hn*C2`L!Rih{r?tor~Gv z$5Z&OIc&+}@w{aW+xYk>K4}d5`tc0@!F?>m5y9_V#0EPC@>_G*Qpd1?k6{^vx4<#> z%0p@A&&wKfd<pg9_`e9lK%lebJTD>Tgp^RsE;xqqmJ$~Aug!dq$5`FJQu)V6v8(?Y z9We`6tc1i?tQ-&~md^JY-Ht=E;;P@13)qw=dh^<S?4c*Zhqh+ZY_=HinSnyQ<V>&b zlWH_!#$jdodu2uyqiifmd2kz_kdp61*-4DA3FOxmv*4PAyB?{T>}DNWNPRXoK<cj` zAyR^Z7^UtCk{|^uNV24p5noBG5VE8@bLG^6bWK6>rArDjUHVo*3Z=6OvQRpuAgR(3 z1u2z2o@<rUS4bboB(ai|-fIdD=LIJ{tKb*~oK&OW@WFCYm4fdeyh6cWBYdfXW1w(S zp@L(j<)q09UQf87;7x>EGn7CJ1(Fr~Ea7nq-b#3gf=h(^Dfkt_IR$Sg{Es=*5|Pez zBuT;HM(3n&6r2u<q!tAaA^ebnTX9;zNp(sff$+B!Jelwv3Z6>%CI!zTe1n1ugs)TZ ze8Ni=yp-@_1+O4{x`I~{o~z&+2+x{hm1|u?fq_cIX2KH`d?Dcx3jP}50Sdm8aCZf- zBfNdKoKHRBmleE;@KyzHA^dX%-$8hjf}hn`5%@?6v{J-Q1(yhaNx`oWzFEQB34dI{ z>Gqmbso-?yQnDyGeNRPNsNf-l7bv)q@O%YNAlwYxiaw)T9a5?ikxF=yg42PEWK?j0 z@E`@xC)`WHrxSjsKyG~@;a3!VA>ltLcq!p$6}*D*;|g90e413R1U68_2MS(8_-hKj zneb;7d<Wq*3jP}5RSLe7@CpU5BYbHAd!sgTLN=xiPLxN<F=KIRsY_QV0~JaDDE$>m zj6#_X$^wNFpitl`*3DKZ_hzw>%|`2GY>tuZ7qh6*V~vRaPN5uBC<&mPRVeQ%6kH+E zom42>6-p{7hZKrKp=5!wPoX@ZPy|qRDwIVEWg#e^SQXMVg;WaC2MQ%yp;UnKhC)e^ zDPkq&Cy37}#3+SG+6BZKh3Ksi(M}@T6yhH<(Ly?1#;a56tBW|GV1u{l{cG3#WkAHP zW0STd@^jX)$}QjSihkx<jn(%llFfz|$_`1~U7tERX;?pm=svmpk_bHMpStifBn}1H zu3bu2r5sByjOgZ5(5@>{zybv<1d!j`6?IdH;)=hXMz<JfeZ{NAmbj2<@)rkK#&g51 z(XgUbN7p+QaC>CItT_J+Qi-}V0Bw(>av*)JkUj%&;;r-e14{Tq3NMwa7&S!ph<nja zXC>yJAlB@lhKWaWi?l-~b6naCcS~0sNd?Gnl(xDHZa5`mx1Q?}VpcNC1n9WDTZkj^ zRF=9e!hh2iY)5dy=7kqmgjd*o+a}W4y!a`7u0zyLn8X^k#dQP4`I&S->@{3kj{~<S zxZin+-PyJvwbxdN145Fa(n0c0%P}TIirh{WZ^F*jWw4XYY1giOMFvY=Vb5$IY;<i> z4uVoZbr~|jf;aXdn{YYYX9)X!dk;P>iG}Wn;eXu5GIxXt&D)SCmfj4Z!R016BJdso ztm{|FI>lQ_FJpf|XN0wK81eM&j>nY9GCA@BZJ(v#o7napY1XVsQ>H4tMaE@5cIEpj zkP+SJGRFxfQOXss=8A37bQp}d4we?lHX|A@@>hRB!guJ(VBfk7I%=ETLHa62o#QZ! zL4;hh2_1IxTDjLy*Tz9IN5}+vlS({^Y7axM_+$)m%8gMNTJtGP@jc{tua5XfmG~W< z@&0nWL}W_800_=TkWfq2g|K2!qZtaj%TV9JH{48^U3<0`{TqPBpCSaeEVM^};9ZkO z0KB<pxi2^Uq6IDh<Q*g@*1b(J;uV*{eGr@YLXKdPIZs&zx(x3ORLbmyTN?5aF0N*X zx&)P>tIOa}8R)o6;SqH=pC$>E4P@WH5W~C1GtU?M`#)Ru*Yr*(W20Ua_)%r7=Ebr8 zH&^}@Q@xV4y;#AQtz<<nMaL|xMhl`vZ6TmtH<LnLrF5{Xldx$8d*!7BYw#q@#l?7? zzuZ$AkPo~-u-$51JrWgVkKv{#x(aeThilw;sUPp4=hDrUMJaNdA<y#VY1-H)p@1=B zj!FHocY}-*1>5b`RUZ?jJ1Ei%E2vaI<kaDE!DZM~Qe|`d9F(dNE6<CJ{_+l~55l;Z z^o6vvBO+Fb2wK6`zC4JJU%}pcIhb$g!J1$0%|H1P`}5^Nev9ElA!}9CzE;KtztX4I zlcg?KbgHx%KyC1i<=9I=XEb<FkK&^r-{Y|6jpx|oujGZ?=!&9`NuOV&`s=HXV$nim z_H*p_SGp$Nc}yPixb#;5C0Z{JcWpQ}MrkvG#)y8cIijClzv&=uPxfd1UJd1w`m-^w zCiDBAW$RuY;QrwE@T0!}Y7cjJLcV!5(*67IfZTmGlfTuEWxN(1sc1nzya%s-!KaXN z#AhAwWt4u*^4bvI{tSEjwP@a>A8UDS4*zs4OMX3?FN<Z9UQginZe{CVj}N_?gch7_ zyNsG+$CKOURGRR>R`%iRkMP&Fu=qD)e3thiTU}q(&&4LZ8OH2yY~{PiF+42soHtMM zkx(#iB}QJ6$Fv{5!GNpYR4oCXG%s`F5tpGnk<ELnD{t)0R=qW-uore%s$+Q=P`o!> zcas*8PkT{qoXyk_AsT0cawd3_T)sPVt+!H*U3ah{z*TX)+W8WsAAua&Dv+Lv&4)@! ztYyA$hxLAU5fu^vxl4zD+KgOVCb$ymVlB244r3U43){Ut*=+pVrg6mzkCY)NlU;aN zKUMena?Gy(;y$;4^gp@JHwCZ@ZyQt3EbMHi6DVq+_|N$+XeRm3zfh>s+kY_*1q%Op zFw1=>%(|?T-iC35y687xLgEDMxu;N{^>F9o3rea>@dc37`lv2N#hp)wVF&cM<4M%( z<`7(ENCYVzBv~KrgUm4@;T)~+DN7&P9s4$e)Ma5Jb&YgsK}Rc8?|*<?`>xWboe|QU z1+oKjM|~I@QPPcnVhfv9a*E%*h&}u6$k_O7Oy*+jAaPM$uxIa3oc@zQP^Vbye8%tW zHEwqN-%qfHzl0uj7(y&`f_3CSPOt`o@qclG)g_*-+4=VqtY_zU7T0S&+q-kMaVK8m z{Obg(0arlkbBgK(x2940-BsQ#5iI|`cAuT)B&K81YiHTS_p|v$<!sCQL-?I~Hes=W z{rvuWe4zy=Sqt3vf9`Va(tc2^;pc9^xs|co$1CZ;q47V@tP-N-Gph#?=Q6Y!k@Y{G zSxuGW>y30~#h?D1CG1|){ZU%A#q1sbcw!aR&g#LMyNzAg-Jf6KO9xi`jvH*)o+N(d z4YqVoEWhPB_T-*m{;Lf3`W_>{Kaw5Wlg_UkiqopE!&ZF?O?6$?z_@Z=s$98$?6|b` zPj;uS7r*?^T|M`DX>_45rPUsLWfsfW7pDurog>*%)kQi#i`~Dkd*U6;H>~`r<Xpo_ zb$>VwF8R>vJJ_a9+<-Zz99mu5!1nCRvlfGjc_C#0s*dxG%B)+Jp1B;>ZP~K_IJB~W z^iQ*n96g($yK3ktOBv;X_*x-uw;_Vw62Ma7>?VByTUb>*mw?(5jHthSBFs@!FexLX z(-eU!z3~_CkeTew52N+#=yXW@N_hkEw};rd4~Mbm*BA_6!-KXUT{jE2X3FLYMK|F9 zTkt3we<F(isemm%(WB>ThymK^PuL0(o;d#wmfoJxS&;)kNi{UNSmC=7EV%)%9h~s> z-}MM`TO<_WBPD(IKf-RDi06kCu&9#>#v`P-MDtx#-Or7V(}Shk(>t>Zn88X;#wA<= zg<5f-Gsc;RYW7dpwL^uJ^%9Ccg{pLaguQ<<#;`+WuIOZzw9{GJ$^QJ-Y8G)S($w=2 zn3=Y9DSlizt_bF)1gLM~mW6QKjkDpWV{f_4DSq-hQPOQ(WPpXMA1nI(1m4g9m*H|Z zw)RwlF%TsS;sN?1hkPC9H2Eko@p1!jhbsQ9ZtTFR#3+%H*W<PIBW=53MPTmDt485C zr*LTBtYW?`c_CgzuswjvM8iJqsLRxDth6O;=ouRnP4zG3#T12C?}KsAXC#!KpAtTZ zuoJJjpOl$~A3>C0Pr)6Ct2TD1Wf=dyjk$knG$ucYA%lSoA6C7kOemsD@at@t(suV2 zn|6kcnLk~?|NCL~_NNQFXHEIbm^wZgYtunzu^>*#!Yg^L(!~^(a(Y1E_6^X1mk`;= zVHLx7a3-_Wr<48O2E{Qq23j_KGTU`}MShJzo-kc)>k~MAbqW?ma-uyC!{#}c;WL;U zw)VC9>TsEU9aj$^Xk0yjfkh_{FFuMweuZTwe_H{WuARgzpA}}gLtbP_7MbbmfOrKF zuQcH#0@mdzNg{Edvl!9L2(&lBEaLXU5%CN9Zk=*Fhc~dG&*$pCok*33^f+nRL{|BE z1fP-2wtXHIuz3j`LExpgO^rhOpp8q|@y`<sg=?uuY%P>cmqYND4wo_SF9r+?1v`DE zLcOY>y}1}Ic3E;r&}Qk~w@L0=_xUJh$y+oM)~u1XzDbCsR60L_t@&b5W{)*8(-TNZ zsjzuU5X3<>a`1Y&QMvG|QJB`MOxqNuC5xH!ivfK48?5`8WL~?NO*qqw5A4U*oiT@R zT0{p#Uev;;C^52rz_}7RKd^|MJ2QbFxQ6xpGBV2>Z00n0hArjv35|`fyIh|@^FWvT zdEmXYORJTZBlQ%DUy1lUczWT1b_97yz9X6VWla8dEWa4#{!l*BGvhmk1$D^bJ~Vz^ z_&y>M*CCK+?<*c`d*LpvpB*3AppTLnrK#hv^(3|@`>?xTCh}4r*8A+15XV)z*`-`R z%{Mzn<HN5Q9ILOgb7w<S1b;MgKfMa%n1=lWJ_CmLcP4>I7gkZD{?ahys@@n&0V;AV z|G`j}^;P_=*z?dSI1cd>HVcWkTYB!naOXMcW*(@i+Hzm%6tG5)?&J$Nv&5}v6w468 z2dF@l4|bdAb5yS)<eai*HqKN8@e43*{GHW*6_ULa^c!b|aD1fgBHZW#W^#GFRMMHN z^Z-!V<_`oXx!YkdRQY2vASalJ$Z62;Eb{9pe$85z`Sl6wnB2}W`{h`dtByvm(?`_K zT#n=#a>&bm4CU?hrEwv&m8JLB&EmK85h3wwDHTS%I7z;gc;I0i7qufBWdX5`4Kdca zTyR08LQP#ikY&+HU(2yER7z`#>pHvtn}JDl`=HL97Z)cep+b7!oh~lU_)yk%=!s*} zk5|}<ZwBxuzGa&8aec}vFxmg%;-W9^2>q9fi|c-46VE5{<Gy8;=i{t<X)v~VEXCMR zzT@F)Q3kWmUU~L%SnaQC^Je3<AWM+)?TLNRI$0)%z)P;tcuN;cAt!BFLdUE)Frpg> zq4*d}rznr*P>Y(#sg!U~`Q%#Deb}U(CW6q*6@SfJekq0C!SzB8sn`@s1vDsd^yDY) zMed-SDPU*M_2eDCO!G}xoFy)RPF9kYPh44YrQ{d!y;Nmd2^y#o$<>pcI&U!U%#a19 z6>t_FLSIWY;ytsDX(5dgnEQofYfU`eC~Dq^lCsRuYDSmgDIb(-*Zw+->TyU=y;q{{ zyiCALGk#P(+*CGv2n`@N%@v=3hPdK;%YO$djq3cQUlzVdaoI>;LKX9}q^KnF!(eNQ zFY;!wH!tXT_aydSYgbRi;xr+V{dU1%%uAM9692|WIiSUt_Ii@QCzMly9)gJGekyuN zt6%vqtW=nM(Lo9+XS$1_k#{Zs#6cTo={pN?@QoHW=VBE9s)apxF^0EU*vA(mecM-2 z2Lv6Jd=uG^7o+)93)QZ_E^aDK)gGu!XG=QTNKH)p?XS{gDKMcmKje=xWtif?5gUwp z<-pNLYG6BCBa@fF0rB7RlI+F*KqWC**xlAJKDvzEYYpW+%24&tF1QrTNk75=(2>uc zdA&7(8tdl%<YM5sS5#Q1!XqkdRpC_?dJItbBUG5G!pSNuQQ<ljZdBnrDm<jZvkJ6w zl1gY-VF11#PQMrx9;dIr;I~hOZ>X?Fh3izl4JzEE!nag-NQGxqD5>y{3~}HPGEgaC zunNbhaE=NqRQR|Gx2o`f3Qw!>I~BI8&|{F2Z;%R&D(tPo!30r0C#Zy}DlAsvG8I0k z!c8jNp~Cl7ctnM#RrrGnud2{}uu_o-6~?J>ARv{`WvPUTDqO0<M^yN%3g1`ZaTR`} z!mBFuQ@bQgg}qgnszUmbBK>BnaM=(|vP^hTC7e~EOvInbYF`agVPT2}AGwc@tcl^W zD66wm6<E`(!1ir`Peymu)!$GErLllW(z8d$=l4k!=Hb~9qv3W0D)nidq>;0$oV;u3 zkIQt{!T=>qCXD=3!_ZvvZ--uqhbokR`Dz%(fc(4Suf)SWA^&RBaG)B_Qo}Gq<zK5# z;m0O`epaqjC4?#j&Zvg5VUd43{gedQ4#~ePHB3$#`d#r=;&Jgy{=KG#yQ|?=A0^(X zhD+6Ov>cv-D@R@m1J+LYSEz>9sNpO%yjBex)$koP4DX06QI{HS_tfmVeXWOY1~-qh z;91HUx%u2m{I?vBkz2$q!{4QdDF;@>EtF%2f)>w}ajKkmMY_gnu>L8(DPWw(-G@Z; z!Dc~5MkHK`5EVhRQaSq~;AQyFh^))xf))VpiHw&b+>=uZA)Mo$dR$Xbo2t=tsnuvS zKSyT#w;Ijmijl=iZWvdv=SPi3&vy~se|i66@dopAx30O*z3<_NHMLrIP1)pxRQcaX z;5J5b+`sV*Y*%W%5BSNyg}a2gb72F5OAUoV$&^L|Hu?_f_tbT28jTOn`3&$#uHE6T zG4h92)E;oxj0x=EuIt5d<nIe>uZ_}bf<rp!6}>rbaN=Jj*Vg-My44nGHC+rHEc5$v z+;TkQsG!<rZ%ue|mmm)=sMw1O8tBOd4G1XhU^NWjxHvpAE4VctJm)cy&#%q()Pz`x zizZgD6ppLMvrw)1N#NiAEv)fz=X?fwP-{xN=-oLzO3@GS?JT9=Ab550bd=&P^LukX z6FoY4+y`^qi+F4(gP_KbuO089$?FtIOeV*T7{+nWx7WVop^1t=w1VSWwP3_2nA~As z(NE*abDo-KWO0+oHMPBbG|{!+=`_I}#NXmxd&xr+)`Rd;r0pVC#8a+#he%dV;J9Wy zAGO!^_SA$?$BgpSM9IRSJkMetRlyYaNEJpG64?O0vztfG<hU7lzU+wqn{YY=>I17P z2+viOrx);iJb^c<6{uHNcdjdh*mZzj77#jN5VsR^<UF2^M85&=G5fDXnm~6haG;J0 z+~}Jx_aiD*8{wsi>lESkLXLZT9>?{$Sv%276GKf~<)yi=Gk)1(j{6P|M109h6BW}@ z-QG(%ZW*2lYW04{9`#X#AG%rF8>6fCTOUo(Tty7|T?}5F!GfpQlQUpU7zT7{=@9LL zWgPc1o@dp3G~qs6xFvuKFV=J61O2)1jX^De^#S=By$7c+_Qp8!;`9@JOGzZPuX$_Y zYk&0C^y{o{<%1meJ|63>+IKKTY72FmgwDvfMUK0O2a*5s(e$o`vJOe;(#3=8GSG+X zGC)`FTk2ETQL)%<95))zKDA=CfB0zfgF9j$eHAfyS`oYN)!Lc9ny@Hk<oIwtEB_k8 zC*FmM#>3yL-KtYY&TgHihelKToUbNUp5eZl!98j4(fX*gQjb>pbNcs{R?36cpXdAs z_|zu)X+mpr{WM;k75U`|$Ni3nxPCuUyVy??*4f_bCXVac%yDCH)jsW~iR#-?`Rky4 zbd=*3-6AP+Txb+zy3x1QyT!9!Tk2lumd__+9DL`eiLp}6jNur5)Q1Cosqb3#E&nO1 z<}W$!HXf^*-~S}iXU;<YcsfKHn(WJUA6U$F*UaPsuK2h5wfNL~m3kI><Z~RA+r)E; z|NjO5h!1$+Q3|fTr_=QAZ2T({Ubx1y`Bv>ve@#>>HJ%jL;WqpOIe2!dm0b#4=j6E0 z)Ud{Y0Xi^%GraF#sw?!%uYJv5lNlc9#RX3G!CV3y;9u`o>RYJ6hkU%DWS-V*jIx-L zaqTiRiRZ!)BX=l_iqJ$)F3d7e6E-kK6Q)Vl_^M4S^+uQZd2oIhd43a>Mr*qJaosIl zxbDRP5Q{(8ePzg%;8u0`hxl+I#ds!r!;1CdLIwo2Vp^1j4$*NvEO+v`9>ur09s_T2 zJv4uCe<f4oScCnEtP)3H!1kq)qWqF&TT8=Ls0Ke#OF`Sr!?FtffBS|1w_o_bxnJlf z57Phr{X*@^K+VyBV$G=KEAO9Y$(vtRHgCcFX|+SUYQF9#a{2Qt3n$LMfBCBUqZTcl zKdE$n(KIewGs&_N|L08OU>g$0$mRFnw`ke?J&{2g<{nsCs+hLQY=Nm<x+gG9v)o&g zz31Urjlae!cq$BL6=tb0Lxrg-9H_!%6(*?AsKO8xda1D8L&^8D3R_k9jS9~y(8_(T z5?WMvT!l?4Jfy;U6@H|`Iu(AP!ksF7O@*6P__zwIRM?UKIysCTrA3Wcs=`7QPFJCz z!c-L|t1wB0MiqLguwARP93O_2m$VMi<JJExQe2V>TM5#R_N+>1QDKt`>s7c@g*#MO zqrwUm7OF5`g{dk`R$+n)jVcULVE~|&YUHI7I2E?LD>c2MLP>?KDm<&gCKc{fAuN8m zg*7Uq0x17VHC(E~LKO<`nh`QXs!A}b&`X7P+>{LdP+_YI>r}XdV6D-jiP-2iURyN= zp(mrm|96mD)WN;BAVCwMy)scw)^$K(kW$)}5CxWoDX@m}MKkjA6bR3=yi+U=-g6*9 z^NyRfJy3~|RM?TxeZdMn;kvTF|33w>3F7FHYYx+aYo#WDP*L=M23pnM4{E;a)O`OL zLd*SggfjlArc}AkU+r^!G)t@;R&f}~+)sF}^uY&c0LORXxPo5z*Lnb-?27l2z_ST( zeGtddKQ34a=)}_s{>y*?1{g=66CBnJVc=PSxuG049XP?J2v`Kb37*D7{8u6%G(9%9 z0kKF7_zRvO;8y@gMj?Zq8m<`dPdxdcw*wC94qX8}6>tq6;#mh6X;g3{;NWO%Vet=u zqyo0!p~4705~DN>t{@Ix9;C&@Bcv7&F>MCCEk|g$JAi%SluYXYU+e(^qu`eSf5Y<< zRR}PwC&%ps9sxM17p`9cpA2{#PYdunfT7q|ACCtQ;Fv^oK^&w4_`Vf?s5Kt|mL(~b zumJW(cO`%+39t%}892dh=;cER@InFpl8j9==vM&!`zr+m03N`z4m^hdhhh6&37#y# z+jyv3?*QJz274#yR&F52Ex|)#TnhLTo-2ru0LNiheFu0xU?X<TMzo*_@M}CIt#1H( zz!)}zPDXM3P~<~6pf(eg1MUSlcNiK0d?Dbj;T-oC@DBig9FFoSBME_NSr`ZCv+01V z@Q}Fb0bB794?)hP-~>nEp-LA5R^d4d{u;p7@U#No3HTnKA1EKd20Rk*Lx4|>faMN+ z^9U=)(Z8^A2Z0vAL-6vs_W%=M*f^!I2*BNVNGf%J=JASn2@afqZUR5SU-1aQuK-p~ zL}6$Q!9OQ++)~in0nbe3xDCMJgG7swzZES@p3ZTv&rtg2EkL)KO3T~<*Wjr`f(?Lg z;~`1x1e}lk{8`W!0$#>Ll_MBXprj=@ayI@!PDscMco+{!s|oPAIq=>B-vMa-3V$f# z8^Eh7?l%`Y01wgg0d06D1Fr)77!MUz54gNg!Ak*M^Ej>$JgN6_+#EcV_Hn?rVw`9I zzYN%KzS4kHK>Fehc&yw~Aj1}N+y`LF0{jF|3-Eftk&6`E4A`vV^rfZAa3e_IA$SiD zhtbY0fwb_@2ww--goi3Y@B_FL62MarXucl<6F9+R_gj^|`~iWZ%TPN+90v?ouC$Ec zbv!iX+5xjxph75s;2b<ONDBcE;-SK5v-T|>>ZTt6O)EL>8>FoSJc;KD@D{+?7C7pF zms)|$EJuOJv;gp(RZ0tX0)F=Yb#=ZmRh4lZKZi)rV7qUuOLNl~{UI~P7Bej7c!|r| zm9fQ~TeW7B1$k+_z$h=Q+?ZVFwY4sp>Ge6pc$1P)khZcyttP|<<)yj0W2pEC0Ygz_ z%g^^dhp~CLAD{2@JHPYX=RD8vJooH(Uho0CA^u@Fxr?nAJp~_S(E5TPJdEjz6L1=< zM$f>+9&8PMxVFW{6vM}&BquRJ_zD)$s6&V&m`-kk>-XB}ErkbK?HzFt_I<{^ON>}~ zzz(7guKSz{9{v(od(h5M9lVQaBbtZ3U)UKH8xGMw@$Vnv{_mopL3G1gn68YHyc0NV zwU|oSj>Le$xV6N8J-bV$;9qU*s`!)e+?RI$7c<-K7mFpn7ALWdtJ(h>DZc!KRdRA4 ztUkssI9goaN%o>k;k%gDI|2X4G*IVj^l=i57^jP=#&mat;OTB=0{<Xf{*B!ag77t8 zhhZ@CEhi8ZgrmKVGlL$3?|tVuE1qSF;NK@$d-Mgk<s{jU4#Tz|$Yiwm@y`q#{Rwn_ zvA-$evOeq2fi;+Bx)Bzh<|2uYlp?aukQ6jl!9THPbQ0dfqUd>;ch+9<M!4%Y@&G?? zP@Ex5oesl)eka@Ur{FA>UP|cnvk{Su;%-dw&G6L$)?bav0fJ>rsrdFEHe;*dO<&Kz ziu0^D9f{8;=?Gl}9~t3l&^fRiQ=b*^3Z{MHIy^aM?}+c==8JZjDlT%Xyl{!k=Hz17 zg6ZTaTzA<fT?xE|O%XE=w~pI^itZJzj;EkCu;?1oiWcu&=K%vP=1kbU5szcqKf2+L zX=Xh_P#AFy)8mwLlg|Ic(BfM&Bo}%+jAQ8;f=}P3@P@w#)?xCCU-`NdE}iAeh{=IX zm^Px1;Ez}mKkvJo$S8kwvfdj!j>QNP_3ojAOcpo!eo<9=#fYk=D@Hux`$ZM66(g!p zEiJ0HEG_2VP3uUyD*c>oP&&3{U<IihywmqLL6vpo7ghF?7KePT`f0|T^@}fK8l0%| zq~b+Y_@u?wxo2#nm4?cAIv@`CTBW=jR`Wt|q9^-Bx7Ljq5+Q>pi%b@u&r}<aJj~!i zJJ{|z9odWRQ%9MuhZo!V-5h^%u{}6KJFwWUze+rPFZ7qXL7VScXlIm7_RPKHW`$Pr zKp*75ph=pP88?oX?S;Hb&k3+@_~TDR-TWolCTN_}_@<~kv?0V}CkMi&-ZYwK6Em$Q zVLD8w={2X!uSOk>n<;ZA-BrNje+YT`UZGd)m8Cm=-P`Vky?U?F>#5!A?#tL;nDuGQ x&CC68+e4chdJ?()^xYqSD(03iEqJrKs-WhLs(0e`F*o)=#Pa=a;Sw^G&wU6cQG);g delta 28499 zcmeIbd016d`#-$*hQkbp>A+z=;6V{k5gbs&K(RK8it~hug7buWFmnPuP~w&%mxfuX zIi#hjrR9(rqzNjisW}fgJhc@~N>d!_yq|mTjrKg>=X<^HdtL8!z5o2UT7K?zuY1kY zz3#R4>C6F#W9uBsI`Q@ICU-6Pe016K+jf6%Z*+5dj@>T9|HtY9+p87qzx^Wxv$xj* zYv)eceiwM#&bIwag`cU(=W3XvVBZNj<0(z<KmIbiaa``34&07OXNDpuNC~dzA~;vU zh2yTmyG@NTuQl`Ln>lj=$Juk7<5JdzZyy`NbDb!-jOR9F0t<Pr7SUe()$`nBp5r_p za9l9EF2w5UIc}}+Zg?Q8=bhS(xB@{>ILZcz{`1eoaoH_r<c_k8;<%OV5Q3OoBHRtu zQV44uhg5yx3k3e-FNNbOTIQ*-l!Bl@oErpMvIU5RVsd@q>-axnrL>$eX7aQgj!Usa zIEt;5XX(!4LNve3-rotO6D8fICXCrf6>RLyaq=xa$7PhJnd8J#F<UG(R6GYW$DiZ& zQsOp4*)@)<IapI8mSmZo#1f0ybC77gYm#)$5_qKbRFr;CxRfCs$tdmeTz}wXx^zU| z=!GPuYO%P&5+dnr@g#RVHHlBxr(H<5Ri?*4IIY8T)=?M8d$IvKQ$`XpMc%i%K(w@H zT(^W<86{)lWM5Z~OO<MhD-dRah<<sceDZIbO_U&mG7AZ(<PGR*^2_f8ssyXl`8Ayn zZiXT?&&;Pv=Ti6BqdMu*k8(3V1x4wkEHj(VSWveF@p5eGF@K46&u%Y5rN2RM+Dpge zZ{5`tQ%et^EuR%5-}+R&_`F*}g>+7K)UrsE4yH@zP(yjIf#i=<(sNKslpN*GeFi3K zAf4B*sFZ*E3#BjZSu0AXQ}<BE%LS9w{3&A0-zb7S4I?jRemzwxX8zrU_o?JBvwauU zq?$F>=p0ol1v#tPh5lQ1g~+Z~X;zG_XVR4!f%@ILAOw$1?O<nd=^1kk%$a}mue0kZ z-BeoeelQJ%EOSb_bSPaKmY6Qh1UXksmu{wxOdXLrJT+^0<471VkcL5A0@~#F(m^f7 zl6mHA`8{XUwX}t6SGL<RJh9<2#}!xTi;7Uq?`Sa9&Ce*+EqY2a{mtXW(hT!p`I^#e z<dxu+&da^Ln5R>-fcKTi`GE;ER?OLG$o7aRpE9saCtrbcW@DXN3w<uL_no3WiXkhV zFRsXUvl%8_W<NPac0ZcVbHzt2P6?-MhDVn;ZV%-zS`S4Xq;c#?#WfBPODCHP#f5ii za$#1C2*c=0I3$*I%M+z138xZ{*bH|ru~(g&2wz=dlbnMc9iN~G9iOqcokI!|kSa>` zya9Pv%kIx?wuDnurM$G#F5~6-y)c0u*=)9|G;`i~<ZUwyRU!<)NT`-q(I750TvH>T zd}_1p{S(4ALt9m-nuPLIp^K`}77}`>2zf%NwOBgfBuQneQa`IwZ$e5eb>9T$KB{t6 zx2+t|X1I5e4Rh&K(EFlN>M}@YNL7d)iU?^^7PWtW^I%armR1ttBJzllMxyAHY@4A( zBL`cLqKrX}KI+Cyo1uxCX0BR-AD`H4hz0(^3s^Z`EF6=m)!B|hH=`)sLEQ&J9An33 zh(zyU!l!=A@?1j-ewN30)3C`8fJ`9Cl=g748Dwe~={zP<evMds$>N%BcSV%`zI}qa zwbZ~vFm(?tkTehVPK~p{3;g1W4Cyef9KI--Sh~Qd->n~uvEh{}{VX?NsZtg?By5Kn zRDi@H5eZS`e>FhfS|w=mZX|yM@(`6p>Dt(|4$Uk9`rWR?|3c-vh}MGw0-EIiss*f6 z0xV8r(@L69peUV?voKiH+4{$0lvpXCC^gXfP%s^tj<t4<$ZtN@niQc*{RJshfXdXD zf*X^v9u}y4F9ppgIZ`3P<`6^?sG^ZnKrgi%0Tj7T4~~9!wfr2L78YqYDwuw^-Pq!a zi2Q$vlEs`UmJCNzVkr=%$!1ZMsxWs<v;d4e$UNT*4929v27^hHAEZs1B6A)%Hp7N< z>@D}O0vkN&)dT1Xs$sr!Mrn(fKd}ZxWR#8<>b#}1RDhmp0X)*JKM1M%-Q748A^%oJ zY1`N&p8J*-zJw#`(%CfW2b<xIb6C1MBzq%7I+^bZA!m@H^^PExE;VOkKOftz#60k@ zZhJJ(MP<roA2+sgoZ8CT5_|*GQY^Kq`@wV8%Ok9Sni;EmsllFF8r`q~GHHvOiCB6v zW1eM52c@%nZ~w({Y0`<3$>u1rB+YEH8LpmH#+kA`J$;0F<|utQ4w2E!x$`o`(s|}g z42N8EriF%`bXM+y?G{arfr5_12+EhoS_@ujqIUEzNQ6deQ>0Mmp*E4X8UcMoBhir> z=$En9SHjg8x3m}!v3xYfsP$;<)t517s-iDXWR&EZQ)GwdjZJfzik(5*lT7vAhGeM! zQ<N5Ygci9=86adiLJ}&{tw(Iw;nStRN;8GAe{9DT6d(VTQJP9z#s3%U>Di(n0=<FA zRg^j<-jJ$aaR_P3kw_*3O_-PgO(HI%)SU`|_QigP(vf<B#+__7$!lpV!CXnfq{Sjq zgUu#N_Q7^Wo)>>($Lhmqdx30{`@2S-%l8s1D?L5OKCZOc@`VQn#mXa|w^RL?yH{KH z-@>pSa()n&=Z3M~UXj`M!Ia5)>ds6Wr0()(ii{C5b>E?TTINT{<u9V4M(PrifdU4b zpe~jrRMdq*)#)Z35>*}GC&Wd4v5?A#b?l5+YmXSj|AulcG0M9`nUA+wcoNDwc*n+L zZnp17%2b9_DN62ZV|kADEBUmt#`w!yLY3m#44;65wYpy@i}48!JB<XjwJ@kdnmNT{ zkjIlGhLx*4B9x8yX(kMB!QS-=4et}qa~40Tuo~GH>wKW*<~zwLF%T}X!{il$*-0N? z=exn+JIm=2?4Hj=_bDMSn(1jU%k`b^{h$TLz-~Q_A(MXhdHH4w_N(v6&?T5>`LFjx zwsXYdBl&^y6f~#Sz)yo%u7MSN(Ht`rVJ5jQ0Shfk$;k^_gR&7-g>+Hxr4YhqL{-0V zk~sC$7qK05G0=`J9@Lai5F7cp*KLvK1W_i`5Aw7Zq$nM(^OjFTN*?-xnNY!I`5E*- z=y~p7iqg)KE-{m>@(V98Wb&NMwWoH_-F}72Sdnio&2&OuB_R#H5qPtm5?G@JI_?QX zj&-vVj$jZBLW*LEFE3iZj}xtxPNMCY#Tm`H2Eui}Ks$s?Z^(2j746cc>WorDw`3m5 zXN6eVptD{NOJ^PZll))lh;v5rhA1p@7M=VP6-IJz`U+d>Z}iyefgF-9%;+qZ>f*fF zm;TX0eG<Fs9}wV$9j`{MjVN8TKJlIJW3BX*4<s>9V}SA6adpcrHH0WB9G%e?DSYh3 zQjM|EBatDB8z<T#S3-{By6gANFzS)O?Jg8p+MNRD9%tpou0m%ocG(zY{1F4Xarh1a zeR~=TEU)j(^Z~5`y*p#}=G3C~UXkaJU^H3@<9w=-4Gu^aS{Ye+KsUj@GrJnlLHOLC z1q8-~xFsun>5VQIrL)w#Z+9a7t<Fsz<IhF~n)zq!%|OYiDus%BD3Ses(3iCcYUMs2 zlSmt0e;U|?pss?sGusxlNZ8e##RpIK4d{+7M>?E=^=mvf4NRCik?jr+5&EaFv%#-9 z?nE~7I$vfE$@gwgbW<qcT7o-$*p`qr?sI%za2$QupwL!b+KFhX2Ad_?+VHm}Qp|zQ zbyTc;qJt)TqAkx^Z-j}HP7O}C^R0&3HN_7Dzzak!2vNeqsQaufs}60Md>l%Rmenwj z*5kI~NBPl;GJp)E^F5^+8aJvAc0x`-RWJ>tb7&oXI!g#k6><aEtgtY_TE)u4;@tnj zcCMwj#EYE?JKQ$<C1*EwRcp2F_E736msjDL>qOl&-`<+zge3+GF_ctZKGBbzFhvLj z3G9KXr%>d{x`g);ocpmA;qk)Hec6%lXkmUl`!jr?`y1GVwCo)8Y;Z&Z^N$RP4SvBz zZgmr_wf7V5m82R|tqnYulh3eNOMls+7RTh4Em(GBg2z`~X$!4V`lh50`#3TqIR~4S zmU2CgTa-`>=yxvGXS_vceJ12zkv*aHmf@5|gXM($8&q2?*`UfEvgoGG3O>X65{(^_ zfK?0%rM}3DF$f(oKSAse;sK^5U!F7otP))ux<zi*pT^%C&ho&1MC1=b+{dKFAln{d z@gvKtq(g=vP+7-av3^++WOwo>Rm}<zCD*amPH!aV>(v2lCy(sQPB-ln(WVtPgoI1j z%ZeZAS5zQnAK8d1HBO!lZtT@&ttR%y!9Alio8N~m5^F+@-0?T;xg`#?1o_I}Vqw!( zw=Y3T#=5|s(hup<8B+BtuyR7m54wuQE~BK`UPmM$FT*M-9jr6Z!N$}r;RwbNon7Ry z>{_#MHaRLTJ`1xFy&yHn+o&DT9}cOAQ!QUon?h$jVXN~Ir5;gKR~OlIj~$Q7GK~Hm z?M~8daq3XFv|oQ`uQYGs_sa>J&0?Q$sV-5>(HTyXKIMMu1e@Kwb%75`jWR}9lPCM| zoPd?Bc@wPn*cmwaJ5%GzufrZAkH<=_il%C!e@8$7gO`(+zYx8MsPe{PXj^$CQbNzU z>_tRQ4uOcxVEYa=D$y0TLeEM=xdQUs@`mTFA(}90pz`n7GiA#_jRoC;ldpoMsQz*m zN`u|S5iN2A%nV642dYFyiEb`Hlnhw_$-OWBm4CWTO+o4(1{g)iri5qWpE`f3!8$L> zoPsSD`~hIq$dmAI8Le|!P@=7e;zk}+6`9wDu@lk3O|PJ!Hp9s8s8e*+Xav$k1}hs# zf1G*gL@e*!!@Ocz6)c9RqIPD`;VMId9IY1F=;(2KNUav7BJ*O@3-#|$DV1toYXz(} zy6Y5O(sfkw&~5P|Uy->?mEVfMD%~P5o$a}bD&0I#m{SyC2(M;49Opyb0L_zUGfX^y zbzXNDf@%}q8A__U&Cnfu3<jx!`lc;B*5tzw;__Wox$hxoGvt2Dar-DQjB1;~;Q$Mc z-P`Vas6U(~6tVY$k}Z%#r+6sGSW)`XW>^Qd)Wbe?50x!l`Z?Y1guH;cwrK8PMtX9= z&W<hIcwu7bPO;?&yywRKbR@J>s&QH#mdzftYCUkW!qh3uWAf8qZ8i)*I<&dy_ucod z>xGSbh$2FRhKByRmm;1J@v4h~NH0Z%CLf9H`jyRzd&P6)NGh+s_yU$an=aLj9f|87 zwkeX<1RJakrljQ6i9Dw-y-U$@9-}#@MzKz<qq1h3m54<<l=KX3p%93Y?;lv;W?-;o zNFhzYNtf&&JW4TIaaNN~BIq1~>U7Wo>*m?2Zoh#n_YOvU`K?eYR5k2b$ly*hR?*ro zWeLUysu>FPqN*7s?)F3$ZYfFchCkhEe#X_AA^)iw+CWNz&G2A1O@)x<&6r<&e8@b^ zJ(2Wu<xFUKIh;$}+j_Dw@lDz+ehLXPpA^@^%9tuONPkEdtWRuM0wS@_SaZ#J7#bO7 z3pSuV%c-y)(QJQwP{12XMjM^bt;#V4iz23gd~pY>k53d<?O@RfZ9880ng*EEU^7&K zqI5)?VHZf$%(K{*?`C~w!wfzqrzi_|DV0G?sFAPNviB3(`mg&6BhFtPabyO9n)3=G zv*~4i#yC;Bjkb9Y8dIutA)QvbKQJr5W<G7k+kM`^u?1~j8}|YBDO#ePp-+7WGf<tq zXsz(1qz)=alezy0U*8z+tAuO1=LLDtKV%AC$Ut4{i~wkf<DPIVwrx{3ux*4ebQ_z~ zwnsWnJ!p%4WL-`;0$WlX?HV@2j$J5CiLTNO-HA!k+-7(WoK6li15uGJ;3^i~)V*i~ z<w%jYi+Oa37YhGoUAi<2NP+;AUF=^r!&^JmdbO}7U-D*)x{Pd6hcVkN;gVQ7-!rwe zhXan~2h&Q1yJeINw;%gpi&&bx>M^tLIx1xQpVY$C-+9#5((h8by=D>e-N_bp&E%i5 z(_K3Wt%ozOv=)NPFn|y1mu3=v9?J65)(K-Sv4C!Eg#}MpX1CTIdZ0lpU1$>ys<*jf zlPlFZQ4Uz{;>4tCjG~^G>2RuxfS_3N76vr}b-qv7zHYCES`cQ9RK}3?+28q*Rl4^u zo@k-485Zwgp|6I9PJY1{h^bb0#`^4ezORUd)KsO*bYeqa?I0}tgROWqS_o-im9M6? z+O%Drn7>d?boP->*bH+t<_`+9S1=7`EQ_4{K5O&ZUg0|jcK@{yj|vlYX#XkrCuIL4 z7APJR)|_M4#Kl7XU^XG$T%i9Q7G;|=j;qq!RFm|i8mi%WQUz8Mquyrs=*?;ng1#6N zi+iMVeZ+CRzW5$I2cTAPP*VOh()QBTzC?Eu<_xK;Q;F`XLb#RaE)a1J8fgD-Y_<}V z-uhHG-yr53)$dO0;p90swWLTKpU!-{oA~yuZTIlzSe<NDbyv}*lOIz4PE<JeY|v`q z=A5I#h4g;N3cCCGI@S~avW1RJaxVnQ0gu?H-P;;A2BJ0cefQEbC_6)0tJ1v_$R2bb zA{0Gjy?T5o{PKX^?J*Wge!q-papup|r&1KP;LR-RQ3=~D2K5n(pS1&y^KzemNDNRV zJW0C4Klm+vW<O*^6gaCa5y9UTqJ_km6Y`&Dlnij*Q@p>ZUK}hr`Jze>rl3;Yo<9H> zXMxte+2Byt7Y2bPNhBbDPQs;g2On^tgiCevNxZ{r^%ieY(kp6{)e+^@$;D^bl%7Um z(i!$v&rqSq8Mdiss<7)JyVEmH`0{ra+^e;){ZH1rSG=iilX_Ug9GBc@VXV_)>25Q` zYzFC``zQOjSA=k(lKs%Dm9V*vJ?hm(IP{Qp?A_5b^AmKx`x0~wEr{`(SW)lr_9NlN z=3p~;AYJqd)&)P@kJO_JYzA-QSz$>nv8!{GO{ZYYxkSVHk5Aa;-u(+Iexg?DAyP$j z@|UV_u<F~O`o^oi_f%h=>RYV(W~;tAiceCSUP)+z%2=y5nPUaYpv>ex;D4u#;S+KQ zjzesFW}w&U+f;0AwmZnipV-CBSl35rXG<U&y!3bG)u)ND|0HYIC&p>+M$$h%`h!jA zV+xx~eL*#k1{c*1g(V)N(v60oq2)%lxlcFYt&{A2pVtJB%Pg^P2mTV9(YHg$sLyD5 zNZNqdh(!kHSHMwplF$vTx^JW~avi(hw|%pYb<{k^<Tr;?@G*IohAvP`;m0a(8g~22 zP}Z|wVZi}XcccpzBNUmPpydUxD1~y8jzNVXM%xjHRcdHS`Ifrd)?;)nKuNK=42A!6 zGW<^|gVK=G2*ZMV22ck(h#eJEu$_dy;v^<rn{iLHCJ&<QEMbV)i_-S;RddtRDd0#d ztaLU*J1>^qKeZKIyzu%WTCA#cQ^|vFNn0G51G4y%zW5CFlH|S%vkMldkJ-uoeok*I zp2Z)t-}-ls6+b3{^Ku&5(Mud{u-LhGQUqG6A@G|Of9S_-@PO9CzF9}bcmEI|??z{! z(tA|ar^MO-R#~^=TNeFZQnzGJ939}zZ(5}rjy0Z&rv^M$t^rTT2Nkm=M(xBk?ACw+ zp=3K7Gq9U5@dn#EuxZF0G%HS?`l5WqEbV0{UjPG*>TXRAy}|AeG!~FyKXt#a9j(o} zYHR|c^kb@YMy|y6n{b4bH(x}Bp}HjrDmR8>OWHO^`#{^Y83wKf7Z%-Lk!Unhplw7_ zuHOog0J_HT^7KWPOq1M6hP{&R=XSP4W(He@Xl>{;>owx60|#li?qDAb3K`r6{r%q( zxCQmI24WT1b(BNkkMcsSfha&KjHViAn##c>F=Rk4b&scsw7ehXUPGDZVE=Zdl*oS; zZT3HzYH7z1OS@tk^${CBI7QI+V`~Nncy~h0MQN!yMF2vn+EvAl4vq~vjEvFZ7XQ6y zQgpFeju30|^J~m&NJK#RbFR_yCQ27DIy}L^Ed7XO4CyM&|A@Uiq?zX~XB1Y)p^K#- zOWfafX4OMl1?3ad`u$*7q#mn*SU6_<Ai1YGGw-2ELNjO9d#Io9eQfDs>0*kE0crPh ziQ$S9dt+#tcR2-EtDKTf&s$vQAcef;#7+#2@pSo6Y36QX&V7u_Tqou-tfdh90qZ!d ziJ*64BZnmhM6RN$%IGMYr6<idTz=q;GVlYoV^}Mpcon-oEWs(ok><#aRV+B`l@>aT z)r5P{e^RYASS60jS=F>!D0fv+IH{ZI?D8R!m69(uu*F&VfjJmnvm$8uHGrK~x<*F( zr6q;*Xkh-shqPEan2MmpZ$(G_=nlFokChn5ZH9N=rx>fX7(YB_9}UlLar%G8$b1n) z__r8|k6HAHp4kKZd2ap~G3Noc`{t;PXzf(bSu$}psDG4-V}_^P`z$mnG=hVz3da|- z^_lOy))0Q~0AWa}XM}+(si^M1s?6Wzh=a<p;+z9JH)4qAVm}J)=Se1?YI)8`mNIgh zvoo0R-5AOCjO^xp!a!%DLJXgb99%-{*65f%+t201Va4>IE6}l4*<IX|bUfEx^1Xqj zx8X2bkUdn`aFbokHtA>3o>%HtEtcqQhT$ui^Qb{Rf^Q-NoUdZU;(L}Ln7(+60kgqo z_+SMU=SO*31^UO@1a&^>DcX+bpTSCd{|1HiXrPVE)tbcTJJ<}9R<JFjnt6UkL3T$_ z>1v!BVwbZUqauVgcbUWJNTJUS)?#$HFykuA9NoNq9aE;JKW!0{zJvPH?kdzD#a5m0 zsLnmTB<nA(&Oy#Z5bP@M79?ylj9_fX=vZOxb#`@hOQB&I^T_GwU!+5xIMo>BsaGfo zv>7KqyUa%BjOHh??{nUB9}k5$qx4lC<9JjS8<X4I6i+H}VuZf<EfO}vS7NMNAP0ef zkTHvG%T4Tf@sc8xpCF$El~!V^YYvM_)L}<V<iGPn<bB{);nfT_99Q{s#iQii@e(tQ zStESAk5!IovMkEvx&NZg)@a!hL9@81SbZTemB`=YVx)@Q9MfHp%2=zhNo>~G+1?Ye zOiDeBVov>|%0>D9^5_f8#svzmUSPrFQezK(OTAJ7UH`nrL;gzjWy6>6iaAv>Mrm3_ z+iX6Dm5ggGtRBerk2AN3eTGHodKj73#Ysm7i*|0`*EyoXhb}>BPIWR_{1Z+|$6@aj z<jNf8K0ZolGH_@6@vV3knA0-gJhUTfXruf+#=F+gH1yfe5wq++eGZ~`lK}grL`-h- zp8K0nF|}qtF^=iY&6{L`qcr<cYW6Phl^Aaq65Wo}(NcHoI5y1OA^9N`NXX^4k?(+_ zmXAiuH^)U`E4ArQgZ-lE4=Wy}>HC3XKbcL;xwSi+IKjoe@MW0$8Mbgjdtqrd+czOs zSoDaQCPoMz=UCdr)vocUq2$?05@qMp?B2ux^CD=Ndh1H)uef2!kQ#9LHm=-eORu!) ztiOcykd{6%jbPD}f}Br4J4Ce2r&zB^6Z`JDM^jq4k_nPqAxJY%rYR(C+_!5nTBV+1 z!YRe7d=1+G(k^?C;+^jy6(L=+re(#Ys2=7rIe{O+I!`v?-;tC3G7y|<cTu!`->o#e zA#HJ5T!+-czxe!?jd7P`#C1&VSZxWx;V@vaOCwv2@B~pQPDiZ<m6Lr1=`D6{ve~_O z1sdA!2IL!1HK!@dklzPWLd~1rR0fj^EL6n`HMYb2%a&2tXFp$VvuWa2ruakr&neC0 zKO2gRn)!oW5IzXu7JZ!~$PAEai<^kHgBeLvWc?yHsBm>?MkTMzsoBO<Se|d`zjp1S zKTyQy!G~SjM{aq7RZne#l<!Ur_D97Le{ovt4%Vl3`Cd!xPzNe%*ff(+GK8g0Ya0KC z73;lJi?{}xflS+2JNF*PP&#J2eH7YC(m`o>E!5-Rm)UIH)y&_Y_7T!=pBFaXrAVos zht05Z5mEZ$_t0OctiGre1Zv@gXlj{x?onEtlW0lW=YpWI9R{4Yq|jr5CEd`6R4SfB zL7kU$yYLTh2d%yb@?s;d;<Tj1MY2oY#T7Uu;aBI@$eZ;pbDJK7l0{9A&pG-zs_^XZ zRDIfI*sh1z45bT6m%wuZxuAx|m|~(`u8<gA-0f5-i`IrI`S#May06d%?<44TESOkm z0_92YVaTSvjQZ0t_U-i4WIu2cF2Tfd?^t81w+|q_giFOo^ou_Nm6Sa8H<}plg3geB z&5$0aSN@Ibo#b)9v7{N%#_foR_=}bJG$HE-!T$DSBAY)Wyv@KDsmhZSIdls`$4x>% z9Ynb(9FbeS6x)B4othEl{uB>swAJRVqs(z;qQ_U@G%l#AM_JFA{vlgqklV2g>3baG z%Ko5*>XGFKG3nV%H#z?(TR1bUKzo!o&+M7npDvQ2X32>dh-jcI1C<^xDo2s!yW3tY zuKD(8F(3J&%25{B*Y{u`aW-AwQyllWs}a^AE<Y!uH1bp}){2~lbZe*QTz<@6GD68C z{)Hu@#Qr%{qkJRl+HPvddZ>*HbBs1zV_0*{*i~s~&GE!PI2AFM9}@)YefG+aCxlfM z%=5UJb+Uvt`RY^bj?b%Xwr)78m2_K6*EO+n9T-}p%&uY!EX{kRF2yUB;*BUB?Wt?9 z3PZ_#eiTY>Qk5l&uH>3z<Wyj_F+vbmt*O>p>;zAAJttKR?rB$aghp+h3tA8&e@g66 zOLLlYkGdolT4LokFSDzJR$=jDJ@Q8h6$0CqA0X5U>`49{myI=Q)}?Xm+gaU&5jD*H zr$83^dI+D)I=x;c3^>L-PK|QvoNhhrBtPuCbL#A|ykLKoT{|5d<bXYX2Fgldsrm!Y z#hgc|>mHjyH=DW7NpVU*n9cC*>ukgsqm!%R`RH}F;7qXn?pU0OSD$&^o=XMcIXA$* z6`u337IOpbe@FqR$J|!J-??nw+!*^W@XCd4nH$Yp*|nd|N!|#i_Z!r%QnwwX!0MsE z%5y$e+B&0Tzz%7iIgRcQ#?$pd>NoTd7OnGD5o<dyR@l9U%{v>xisyYD@NI8QlSfD( zhAtZN$fB5%x}-+_mQ6bsZNKbKn{DT!`3}7M8En;BuZ{YatymD*e;alHQlRra<#HvE zCNtga^~L)c98BpV4?6YznJ8-m$vN2!GiG3{3kem-&l%-igLri{bhd`g8C`yjc@)58 z0<P7>$qoBoOy+lxbn%hdGn0~Ezs7MhC*fgMhIBu@@-Ix`w)3vBw+jOKHtf@a@OH~y zM3OgwL^|qZU5C<QvIfcTs$6-(OTkCqEGsm!50|!f_feS2hP?J0c5!JFq5LCeTRO@0 z6UFYI-%OtJ4a+ah3J5xa*+T{qJczLS=0($#?PGUJ*9nEy?CrPBeu2}_LOBhn>}=d* zDp!HB-&t1ucDVP=sWfjb9m&L<f{Fj#2=?slc23t;VOIZ8cb0|t206fD9dfIZ4SlDx zF^!ZAn5;{5bE=RgV(v%a+m&p`JCR053M_FX?ri0e&CfIz3|HB`cLD?3R;ojrN6Hx` zkxk*nonn=4!W9-?mf%;o8>c~x;EKbT_1oW9WOf~96U$7(#i?v*Su69$P%zZT8Wyh% z=?CQjOtl;nK*_?4h~rJcRCcYbg}xR`k#e9K0u6uWMHcj~so)Fnlq(^!C~P2sq=S}F zteq=~rs1(>GBrd#cIo+`aMK}>$x5ynK<o51q)!t))p|1w<HPPEMRGgZbJC^3T@I%O zIKiS}al?|a0JWfzhml2B7kI95$TSjfK-s=`jlEh@Acm6zP~CBEFI0!qn52;63m8tr zMd_I+JrI>0A#w>=)?abqZySvJn1}iU8HrBT>%65sGU_srnU)2%{N(G#q;RJS_5svJ zs>0y<908P9@e2Xj`)f9SSxaF~Ia|4`kC4=a)h!DYdQW1Gtcfu6(9T$PlrQ-FJPHJ> z;v<ZF^lPjAn3LECF69TIR=0N{uv0QjLbzc(CVv)+Mkvj)<CoYKRpkF#`WnBF)H5*$ z(+L;0>L~evdL82d%@K0_8z?qO%CYa5JQ=YIlafpGcuPw$=Qs7*EeOY@T#Bm>qM0uT zkxK|GTVV`diX<rSY-mHGHF6%j=;~&$FdaUJVPs^NSNQqe_(B~@-pE__QYCej%;~)X zVZtz0^4=w3=}<PdTx$R2&uVR-?L%s%$u$_!su?B$$FFmTbqA^uZZq^BN129fgtS<a z?C>3nS~*C#a*Qop8RyXhyL+nbSV9Hfu_hg0->qyTOdresTsgoidMw0qWYoQnh-%p# zW5eE$j=Dca8D*|?a4#sMP2{06biV_iUKJq#1Nn<F?DO|yoi7!G342^I3tZLI+evu{ zkHh0mmA#eNt%SNySdUd9UC(@i1#K41V%(*LDXC%n*$i$V)1>q1372e!@wqSQyP!!r zu`%#;F59vyCUgYSNq>Agqa-8?lzRWB4B{GV<ouKD;i~t98`Ig+58e~H4`${M(~K*( zk}hO*waHUKHL4U(x3Izw?+8owvdJIyZ<5yo=2JUbdc);5TN>qEKvi)@w6SdajNSaG ztxz<I1+MPr*J>1UF6maQEb1M_9D01|`Xf8KIw<y4EPq&2=6*)UUHl^@-u}Rz%83~~ zF<WWd9%_U`H54J#Hi9Bx(#Jy@&(3RY276jUFXewKU;C8Vt%-CS{~8idG8tOK+N>F+ zm+DZ&Zq-!Y0C~ixY{Qyn&40w&hULM3tabjGz<f_JXF#>&DmUp%YNX9_NOC0$=qb5n z-Oh(=TzH?a<B=x%zNRr?V?6WvI6#<shQ)r|S(vqnP5#&<EWgd({y12eb(?MdxS9WT zythCcebED)5*y>yv}Sicju8%;ng6<-Li{jRvo6-Kp>fA+Yctg1xInAyC(M0)xZs_} z;?}nprfgsn)|);0ZG71y*EX;Z)=vp2!XT&fE1g{>`|k^Lot*xb^z!XA7QUf?89$2< zW_-vxeKuPt8_d4`EF*Zv5j0y2RQ`E*&<X3%)+)yuC@E0s(%xdBTM~o^gIK>UkwV%K zHfPJL!t=rG`z@V>S=Gz71`BC#ve2z<go%S$|E-<d429JQ2fNZ9^$R;(xuPsHHy6>w z>-~~(3JmEmy2(jQ_)*SW$PR3aWq)k-7M!fi;q#S($NOyU=ZV7P0qo-EuLbvm*+wZF zt+ZHyzo@d)$`)C~lD9<*e^s#w+k!e)^rwMgG2lCf>@+-!!^aRb%e=6(V=Mb-fi4yz zf2?jjVR!sd9$bL@I}4r*+27m7vn!rXJI8MCZ&z@59*v|Pp7OUUVU%~Ngh}3_5>fIx zm57tyR|r@6U6qj{FIEXrE>MX~d6r5HmZzvhww$XH<K<y0ktp|7i99)dUV)PPY&k_? zNrg0j+NktmqN7y0jObvMhS?1EZk1k3w7p7iCc0rRB`CqV48>ig_YhsH(iKFXQE6Ha z<$`L}bCNv!Rr);9J5;)s=nX0@6TMQU>xo{f(hWoxsx(>NWs6F~j*5pmDh<;<CugZN zUDC;!DqR3`Gbg91o+zRdRXUF7Sd~sB+N9DcMEk0=NVJnmXA=E<j#B$PqU%+9Hqp0K zx{&DeD!rKKnmJ1QOJ(G#Py<#Hy-TIX6TMlbHxvD#O79@LOr`e_U8K?#M9)^~YN98r z^hu(#ReCMa{Z;xrUjUD&dTJ@4lS<1($EkEZ(PouyAlj(Xq<qV6D(yrxr_!E8Kg6>R zi~}RlvPzqXzO2$wM4tp*03mu8ARkf#5{cfU(qyrhx2m*A^jek9Bzn0@4<>rCN@o*2 zPo>8bou|@yMCYpXY@!FNbRp<La)#<zOaUn>T}E^pm0n47luEB9I#{JQ6YZ(eJBYSd z={-a@%w|iz4(aprYFuyW<WsYiNrr1L_hTxjLgjdZQ>k*cshq*!JXSd$svI10-S4TK zC9_$>*QSDbs&AAUZbJABmD5}0M1eD2<s_>d*d^UZsGJy;lL$_Kl@p+HQo!l1avW8T z2u_O1`SW$92IIl$RiLtNs;oS)(p1h*DrYu09aYYE3P)K~z-*y1KU0~sx`1g?nJZK# zEi+*HsLVoQ(m@>GZE3HDVc}+ed-NVLFwiw>2fJ6Wggr6BA1he)p6hJ-r<<G_pV;q= z+xt0Ra0$mpQCdtZ3|YwSZ9moFrFCcc=mCuKrVW-(-0I@OH9-WsL|61#V<>$B<UXIg zCGHDUC{UpgfP&F6V+wI>k$+?BS3eoT*FqMvaZqai)B&k1>zh{!{)A0QJ7+EeS(gF} z#h;%dmgN2$;Pz4!4y->^Rvn;j9{d+o|7r41miw=ziYV4hC(@v2U^0qQ*;=ZYbTlJJ z*M2k%WJByNKDglURGw_t+KYDIi%BeSe^X<Yn%DrqcZkW+S8q1MZ?Ce!`-44lKY^Ny z9oY$Q@{D)^v%jzVC~RmymEdy$N!Mo-yT0G-4^G`_IeE2WMKMF#5z-63V8P#xOHAF2 z=0GDUsP>kk!IyT2{R+Anlq#H-Y=(Ecp|mBsRZ#x{(k}MRx9v<P&_#5s&<|Xjp&LXp zq+0ls6o1=p%<Di?VL%e=a3DhXY6}~FAW(dt(!@U8NvyQliKW4K_ynzOdzzB$9y|<6 zmvG~>M)4z@zQbCk1~Mh^DjmsW{m1OP106yXQ&giyJnlU-kc-l&t~NvSG#0Wcs34^u zJ{nZ|b0BP0xNFj_dlOmF6QVS;m@3H`(jOVpZMpsfvf1JB?o37DZO|9fu>j*M33{T< zaD|GXbfLb8QTLS^yb++3J)FWwW1B+(RGadyibU-QdKxMnM|*`LRhyxK_$2VUDzJG3 z`6=8%2|wQ$K1dB8|04XijTC=RV|aTIuyZ5gzYsvp8G#aotK#^4woy)>MpTJz52Wfm zQTgpe;8tJv2?#v2+eFb})*79Wbj^Z;C(3rK3l++}ltQF>o8e3s*0CbpJzSAIVQFJC zEa<|@DtwzcX&fKif>9ilg04#NRDhmJyI<HqMef>#9jk~CnoVZ+D_VOb!eZ7q{SQg3 zWu++Gkl5_X9v&;Cmm%R2`>ArapqE&W@53W9alus=F6l&IiF+6F`N-#%yqK4{OW51r zM~(e#DyHW|vG`QJqkMQ0=q%CtwD$F`D9I#fKD67Sh?hit+Ii)>j|_Sm(IHi-B#NFt zt%L5_YApL0BFE$->qyUs5z6|kcGkDV*$s}|c?k<X7!(tZ%#}G2{EDJ=A1T2}!LIIS z*{LzWP7T;m!bTr#8(4{O?MAMmE;(Jl9oMY*OjO?3mVI$BS}6H~T{+m+ZS;1e3LRV0 zl`UkUhgvjmfoS20vV#)a55<?+V#Tm<yV8L=51)wLgb{w(7G^orvtW`B%5qF@V<0V8 zUvw1P83J2u;c$s*Uz_?U&C#h)U$hf!d3m`q*73S53+rozGR}2&knAwTx4Yz4n=ZNO z^~(?91c_=;V)GFTnzeIE(Ib5k?aDU8^UfR>KF<uB#1%}WtZppjhX`TaXDs)Jc>7O` zFc+@+p=o3+2po*`MPJg)upO)mR^QRmhgW22l3hviUq2*=9ZlA59P~xKlv#xL#v)#C zny3;#e9BS}2MF=WEbDN4Vfv?R#o=(_Y$vwo@Cf0}cxJ2$7joiRQdN}j@n$xrDl*_y zN3_NH+jmh?*k3Yk*MUGfIh)z~syBs8o0$8N2;r3^*1LMY@B~LD_=b06t&bcR?sZ`N z(HP<UM%MCZfPc3R%8|j<4o^?hlKw8(Q-{@vOJpOCwhjCNhZOC`;37!ezwLQKJJA>I z+2==t`y5PEn{MZGoF(R&MZ8<zfbQ7?(c7~jNN)h~sh+!!GR^EOl@3a=xTY4)=K}Rb z@eq-FC|Lwk&u1@UfyaV`Yol2EW2x~R0<0#eqzfC%aQmf6auDX-f0!h*HjqhD@C|2& zkD1!dco9o}3nUU!ZIX+^(OhJcoUU@^^p}EX3&|$grY2BuS;#VL{0+tHk;{A-K1E!6 zYnH0@Y+g-ZK^GI=2a>7kQJN9g;4v_teG`WEr?q8CdukOjudQuMT|{d+29`no97itP zS-c5Q8&xgRYNUlC`N&gV1jtXJdSe$QqXdQsJ`l|sIpSbV@u>wF%5K0uo!M0Q>1(#I zuo-KZ87f52#sxsvCf!D%L6;)xzZw7Mf}w5iP6e>W2ePu``d&YE{pSTht6k`=Qp3_k zbOCVq-!1?)L-7CR0-(4nyLbGb7XXJAyvVZ80_6gr>xogOGTeVyuj}jZIq*?2rxMQ# zD$;W*M7!!&^<Nh`?{H&>PBgfgi>Mj##tbif*mtuG?8?cO!WAoS2ign!P3+a54hv@% z;c8%<{Y-3wJKs2!%L~>OxE3%8zb|C`86Tmjm2wI2<wBamomwfE08124a4Wh55Zc{h zAD(%GFJSI-gYCCrMI!q@pT@+w@xuIIGX4v3+u4^t#|pxBR{L|4z;_>GeE%2Q|C7Ni z<g7_Z=)_(*n<O+H1PlMgz-Y(+ZrzXmoaLQsF4(s2{NS7u@1C0fqKS@M*pBmN_h4Vt zoGdY3vM!%pKOYt|I0m~xnmLh}*nloHS7s_%@m&zR4yHTJia)L#R(wB}dZB&%Qj0om za)FxZbr~iXEcvsNl5awLm^t;|Ecx<#m^OZ_;)1=f)`uOw5T5wg%*L3qx0;x!4pcWP zAVzAi{1hTIH&4A39XXSQT@2UP&n1})>XnwyJFNG`&dsfNqpVN<#y6n7UWnIqL4jH( zxu0Bt0yPzL=;Oc3E7;KuVea}DLh^?**rN?$Li<U~e`9EeZ#K~wZDE{+eW5gR6k?)X zENx*%l-DAN?yRE7!$=NB?40vr<ujdJw24)23}Mnn*O;M-m=>q!KjWnQdz{u3Co0BK zlgIO%6Y^6ip_B*S^PiCK!Sjziu71LNKWQo8Hx$x7NfBzx*_)pXX#Z^pH3dDp#KW$d zOp1_Y&ZE!1XJbmIs8bp{clrbjm<rK)gcm!w_b*}HH%$;)_hnygnl0QnvsRmvg+tTW z)XnL_7&F_uIZ|-w!)iB+!v0J^JLdLdpo5>h=NT@I5(}ew;o=yUyYRIpzxTpYkT|7r zDcD1n?O3@P7}`>>YceZb>c?&^93k9DW@(F>2;U~}oUkZ^cWZ?UBbtH!XE8G>tzFF5 zrxW8ru}%MWfDL!^iGKPbPZ0XAJ@8uzW%4Lw&YBfXw4aFuovkQ3B(OX-ta!1#a|25C zYjGhj{I!Tpc*`t|o5tRG%P9OA%z~0TI7Q>G2A>)Pv#jJ0r@wGJgGX4wY+-V^J-v)& zZ5Cg(|KlMDzmj!A_NzPhmAo$qeY&tssUaS6{^%Dq5k{jXt=O82E*=N5c+4Cr<~+eT zIZ?!RUknxSRPWryreRgsgs~m{2}Lp35%;uy@)0bS+TM0`6bru8)+2~wPY`o1Aa=7N zHu6%Wux1E*^HP+_PmTQ-=BJw-U5xw5{x74)j$+3znWG%Qp-FKyyD{Oqo*3n=liahd zNVvI}^Aq;M$yOG4Il{0XLL|KWg|Hl+&H7w!oh?9^?p|==(nD(Vi*g#W&P#CP^4pa5 zc9vhkwpgb6@h&kV!E@hQJ+<6H=tPgt2c2s2)RvauwwI0wvd=<jcz9OPLvs5CvH_l0 z4-M?b<tSm!BIb4_Sa|NsqOZh+t}j$9zK+u2+h0R5LEpcDnE1Q~`h?FSHuFl)0uwQ; zb5V+LoL3q%{RArlwmR3e((!y6eZOC4XX~nE{u{QPZpHWVb4eb*Z2{3oU9nfXseT{$ z>+tE;aYg8r!w3>f67a%dD3<CTF~RlXp7m18eeyb~rTDmxy>c~12wcb}Up1K&ix8GY z7(y#7GsPTwihrvRXQ~qWlU7}2A=`R&obclUX1F#kENl2nja$ybDt@poSCj^%&@aWv z4?43CuEl%%7h|e8K(LC7rdE5F#jah8b2|d@(vcC^um@$apxWs|tv7qSHb_YDVq0oM zeS(S*%LOvZvn)uxH;i4b%@!6fWSy@E3X>PIq1R19UvD<=`bhV`hQ1iBONKJ}da!US zi8<T|^{gLHD&b$L`sGz(QrkP@S(_VChS{@dt-zU4Jx!O2&hnLMZ2FD(&eI`IA3<st z_qsmtql43_CDP<gA0u9gZt`eUB4;gi=^Jmz%ho7bexCef2)llxZE|Pq91!|-9HmrY z!KoOFW?NbDKFT+#?}7tVp;MaBYE|gqIM(lGypXV(y>&BA7(R~ez1du7+L7J5nHF?+ zEZrzMQTu&Pk&*P=x*16M^jOyQR-cg>DpOx1{*BNa{9J;$8b4Q1?bNdbKXIcQh3IJ= zurgZm>C4R}AK7dN1SAF>!2!SRAYdP&7D1{5D10Kqd(tlhoDfTW9c0J(?7^*w=HGcJ zqtOGqmArdUV}H_eeIb#?q76T8g2*+Atn<HuU1to0QGxs$6WN%5#R$(6*|LAVpS$on zy{=N9sb;2?zJ^~y!UfhOaFvVJWKp^z#^7voN&j{w3TQKIc16v<qbD4tgK#v&FBahA zpY>qUV^>U$*78u~q`mFx4N~G*MR@?*{7YoS`Wo6(Y?u=()kGHr)m@Se5V9uf@?GV+ z0qp)S#)7{vo0WUuYWkG_F?@9c-WZN+7*YBeg?qDBEu6lnKUQThKF6x5sG=Xb(V`#* z*~G`!MB53`ir;RClot)4^3YcTaydxlls5y3;+G2`pfB=3Do7kpvyoQwT3eV>b$!ue zbX`KlZ30CL%^)2v<ofGu<F7w<8H>ENt>r;Kn{5xB<?37{-S?A`Fi-niqdiN$S&0PD zl`eag1K%xpAL;sJvCKP$g57=a!JQoyt>*-~+l1~wH~s<gBP6P&Zv`omc1%gCzwoes z6!y?l#DJ;mK87J|Uwdf)-HxYZYR1A;<b=c$vESAXL*I}>;wGm<1f1h4$CmHN&ff|1 zosYIa@h;$d9^3@sLwQ)zSh*b93MW&kpYqD``$Z^HoKhrb<YMh%H0`n3kX<6y&qv>< zSlngre(aW<8v9OrDs1EH%Y~{hyYcnqfNRRiOC|;R&#SD@?Ra6rNmhK@+~TnX$^Mtu zm+uVtAFnT$USVf%#|n2&F#9`Z=2mBnjw;8m6shw^<gt#(J_#s=I)Pe>r2~28y1823 zI*awH3l>IavdMLElg~%c_i0D=p+YHXSYS;y!@r=+VtNi7LQQr^)C`i6dpCuFuWH<= zCGnKm5QO8p(jm6U*&uC^bCiFJ@Gn_BQwFH7sP5p#DO2v)o_XFihM3SAWr!4qAHPxr z;_wSbdyyr+OP<bBfAfJ62fsg7*G?5PvBJBCkat&UzQ#x<ilmI@zSG%{cLS2!Pyb&f zg8vaj#d=w;y_hPvBgtb;?nQ*0&--sFpldn#+dTH#y%5($=pKAbdQ?tr$EMv2H_iBO zF)*~Y^`kXnr9Inm&)}XZ54-;&VRrsrrm+aBm3Bvo(`1EuN9iKpW+}gg#BG`OLilAl z<s;A-sL@RStiRMVMgEYL{uU^Vn#SJ#Eg<&Qf5iQdiehQPFEsok9KXA_uAtCF)#JFg zHC(IVHVvyayr|)C8rp}e@k|<a(y*_F<25YOaFvGJG(4bU0e4(u{GwrlhVz=L8BEkL zHbV6$YM7>BmWCD$-_&rWhFdkP*6<e%pDI|uaUM<7grhX<q+xdrXK1)c!(|$7*6?c$ z4{3N(!;2dJqG5xE4rVn!Uqa;1g=&nJ8m4KOso`i1XKA=d!{r*T({PuDH5#7L@Ro-4 z8VZqW0fK>)Ki5uUWN0{A!+9Dm)o`7L`!zhO;YAH)4I4Cc(Yh{F!<HJRXgIVf4+R^4 zi#3Mg!9TtcJO*cENGB7QLS<dtO~t$|n!gzMZn%&8;5#ZKI2?#5VNJE4tK?`E><L$o zkGSgX)C$*j<(1?L({>JhFx9;vg;V{czR+K-O-+rKcKR#S{MZ+izc|g0Jzx1d`L`<X zrTOzTzqjT$X?|EQmA{I=RC%@1X!}f!5ugchnm<tUmpxY_1ZjT!WTR36GMmxg$!DrR zRP}O;HGi1quXw74n>2r><_}l=*io_@RDs!=K#JynL-U(7{~XQFY5wP$zu~bO-=_J` zKjwA^34Bx6-3#6M*Ld@QsiQ_u9%HghGv&+}Gs-f?ls98q&X}1qP5Cn?OdV%x7CCul zvvRYZ&nmCb^8w}W>iJ)Huk_^I1h4JpRabs%!FsSyyME<Tx#Bfl`B&cj%&5zFbwXWx z3Qh7ST-hBp=ck}O&@7Gq@_9bINw8#>ulC`)ccfS}1ag~k+<dstv{+ujIxe`2vBEFU zknJ1CagB23DEyo#T-lxS2w%QsW+UG#7C(dvcdaqLuLI|s>%{rCapZjCJ@Xpn+O~jN z3)d*e>m4|Kt|O;!1B`dgD?jPO8w*I9>XP0PLl5qdmg;Yy?OMI`d+F^F6R{DqF^8K7 zzX#WtgR_f0=hDWRbLs2wLS_$YGzc$lo*-R9G%M%EdvUzUjWgwXmG?97sosqVo`v=D zQ6k4}tt<b;z=sxCvQa1<N{2t*QQ#c;50oaPW+WHT%9#tbe8q+4?&3n*?BqiE9h_$- z?ijK;E)Vg%lma<&F7Yo56p_MlZ@~H2amrsnm=ovO#*_1$;dauc!YR)&+aZ(UqZmB* zvW+Oa#c3SZrrXP?UJe515HG|v7XN|Ban9)+7q2C)X7BJKdur!>2tNeZQ<J#>n(NMS zQ#3!X6j_bq<zvtJw800+KJj{`K`DXk9#Hn-8WUIp`Yhaw1ibAz?>6q7_cGVa^8Wt3 z`9<sY?#*$-(K?Ik%9r@_5i~Hj`SYVEHx@q>det(xt#!x-e_jDDT!6)m3&?fl0@{G? zYpC}*=~Y4URFaEBv26^)cc+^C$MCm@|FP!h4NjcF0*66oz@Rh4dsUQ|8TsHBCGsE6 zaRcDI@0K4h@?rk=JZC=|?XHYeXN)%|PTv<Jwcf}_`Wsz1W3C%#?C;7MQ5s`k-;>@I zo_XcI0eqaHu@29t;n#HYI4<rk3Rr&Iz=xNA63qKiT@M8FfiH5dT)=S;;NtF<e-gk) zck=Rb;JlEkSG;?LYo1GXV|qJbmN^G^pVGr0?~92>ldV4{8|p+8uG}+_&m7Vi!~bpc zCtN0C5b|ah&fF$|Gyf4-Z>;q@>09BQ=auc5$<yHJD`cvJ#D#O2@p6m~hpDs>ZiZ5e z-vjw3Q~|#rz8zt&Al^^eMR^y2##S8fA#fh?E^$hd7>`Op@lUxWh&TB)7VO8(9QP9( zjpUy;mwy)YkLDQj8JYv`o4e&_gZR*vjd9Ly;kXaCa@+;2`T?Qnk!7y6&L<r!bb0pK zc9}vPN)!>yM>x@>F$E#l!T{bkr7?x;dpXW$pVlGB`G0L_$UKH?Z0PdI0esYp?ix^q zgB%>XYg-^6NUd-*kZ&3h5a-H;wfTe#<3Hp)>pf~;PGNNsIB8%0TQDE}BAw2cIBq1I z)4lTO5I!`KN=ZX`_+^eOf@^XQ9{kyZ-gt%Mx@vyjfZ>Zp!tkX>o_n@iX8Fkg-dJ9U z?*)SJO{+7FS#KxKyT1#j9x&dc!Y$7=yZn3zZ;GIHiL=LI73s+NTiS5`Z4x+tK8|y3 zT&Vc+=uqC2tLMUI80-CNm0_J35bw^JEU#mYoW+^?=X0hu7S6=yaRIgdCk++a5{I)Y zPjjXVmPz~yVB?<yX_fU@0!=S@Ch*Gsz;9dsA7%UhDBJ&kD%<|bDE*(xcKMQU{%HBw z2>!ftH@=@`2L4OmJ*5fH?04rS@|nDMVYaF$H;(ec?A_i`{4{62?e0abc@MrIO49?) z8k#f=*3hV-uZEr)I%&vh*ziczCuI%KYgnUUg@*ez+@oRvw@YK}&~U4Un>E~^;aUwp z)NrMS%QY<1aIuE7HJq$ru7-{2XDNQ1qWWtA85*W&n5bcthDHrNHFVRE(@_3XZGrO| z;zzxdZKJX2Pip*X4J!!g6t_oX?9gzthHEt})38v(JPikHn4)2vhDHrNHFVOD)3D*8 zTF`o60WEg2#;DcsyoM(=tk$qX!#x^q*04;&LJjjYqzov1{BVwfnHr{O81<0rq68Q< z2B)F?K#g!&!!sIIXt+|tLc(&>B0iYa3%zwk9WmJ74G;Q%LX>;@@b=|d?f78b+Ac~o zpZFAeb+VjvQZds-#WG44^~g+C5!P1a*p%k5`}=nMHoJoJf*Me*VPisF98^Az1spjg zCtw-n2%OyS{}U|W3beoFTEcl+!vAlP3PlAa{O8}+U)2uz?}*dPe-HRy_~joZ@skRS z*nBOX9CrzB7k-TIGLU{7v_JSKfqQZIP6NFk*bH}1PN1WJ8E`8>6Ly9@Itp|Oke>DI zz^_^ox<jSP1K$%E2uJd9z}|4#_=&&FAe`Y_1mj!@PaMz+XU5MpZ2(?}^96ki7#X7S zqku;=x(0Xy2X|utWPtO+)C>sYO=@L20fWQUxP)Eds1RwuO$r|~Z!?GsiihWNBTzas zh6t4o_yJrs=ncTbaA!bQ18q%l*agkuFuV!wA?VG(AK=RHOBU6@M>qhR(V`8&5zW=g zWCO#Z)k2tnXA5G`ln^=(jEm*C@t_HBz||U2GvElEgqMP!4GhA#pl%8VhN8z0fi?kG z!JWiUS9}O0E!rMG9Agihi*suf=y|}m;i&dyz>ClxGr(64(HGjFzu~zI+}j>~26{ix zrvowu9Sj`T5gCIX4}2ev68aG6nuOY;pl-luII6t`xEzjT2zO~T;Zr#3V$V)Ekaj}< zpGTr8@C=5l1)U8X1$PT{9&kFG4B7%b3P*|70LLb)H69Oq2uH0&c<U98y9;^3)hYO& zl%Q4C0^?HAefU*X!i`<w2Y++d0*-qV$KnilU?k&qz)=RZuX5ZFQEloh;7&Ly^)6s@ z9DC<MJ`p$^j^wj}hVD3UfNvy(Q@R6K0Y`BOAN0Vufy_f-eg^pX7?hh0qGwP1FFr7B zGJz#<6k#cFoks5m+BBN$g-aW_V92)tPJyG$@_=EzRoVpHo{7GPj12VXqo&yj_<ldM zHuxU`3$EcGs(}m)>aPj~1AiNcT0*EEICT(YK<5D;X!Jwi8-rE;JfJ;J-!$(Dx4~6| z-T`brRBgpr;A}Xm7-8-(==pxwdGkO#gp0$#A)K3~_T_TmoZ;vi@aF+5;HZ{_pNv3a z;L{t~`jHrP_)%R#k8E`SdID#{QDzq4yKq#=<v^Rp=SHFXMxp;#BB3&PuEK2wT??F& zgW7;D1WwFFZ9q>3zBxwCcrozjvACiE|2!~yyjDwK?`ar>;Aa9S<#F6|iVIu@rxXGh zG+mVsE&y>4E*7J=9@uOKKA;301)K^;HOK?%W~$BW1l%<rOEP2#yDv}^$^gcoGULII z1@<q*6a-Bu!%@ce3$c2_Z3e#v$Xn6>)O7YBO5kV;Ed`22YEu)s7DF`x-woImjtZ0p zEQX^*7X#O7G-2LbYC42#7prRn;j|^{LYD^|D`6kOvOQkH{LfmduHl5s;i!)ZYc!g$ zpcLZ+5j@}KxIS>5K=%he$77}}(1gd|!+-^S5-60b;~)<B^nDbBWPr<7p({Zz2mbK^ z$DI$r`u`Bbj1M{Ge|b~}JhTQ=5w#@zay_OQ=v_e526YW5q~CQTek_n4LsP4)1=8=} z#evQSI&Fd_4zvmQD_ja_ZZpSy1t)^u1H4p#e=^{?415AdO&a?t*8DBdOTdT4p4$mW z8SDZc*owskbOrDn9Lb*tdVH?7oF~w@9jyl$!jIsT!U3o4Q0YA24mgTiK!o)Rbr2N+ zUA|OlH{j&G7(7Ubu*W_urC2^QfK9$Z%|M%h`Ec2wX9Hh5tPV^Ocm<BSvKDx-3Wt8k z90K}RtA#KEU#~|0Qz>SX=P3FKLS;Z*4ae;V?Fk$JN9*=r;2Moy3p@u$H9HS%{-aum zSm0qeT8OHF5f`xBBCZ+u6C4fjGr$EG(cKvMg%>eZ9>PO~AbkH4`W^xFTRk;b&}X0t z>Hj%Z2%2yM9PM(1-D^REp8<5dh3*CI1hm4DrnC|m_X`#;@CpBhy9}E9mE&5$tt^00 z9Ed#ffF}G@qxS&4WGp6#;0s)T8#^254ZwgqkVhecfrH>U#KmO}H;4G3$&OD}`ZUN8 zl5O0AkxxiAZ=wkc$hb{Hgk)DHnve{^L=%#|mT1E98cjCa#TrdWmRFJ?B&#aXg!eU? zEUVskRX!nEIUzGyA=0$~GCY#ukRlU)14kt#Bs(7Q3CYq%G@;Wyl_n$`81V@^Yc$!x zMyT}eQHgwWez%jzPv;AkVb2rLxb|o~{M9eVqKl4c^c&y9_Tu!?U$GkNmF}fKY6HR} zU;0lTf#36`fA&SlBkjiYYOlhNd>Z|Ncy0I7blx$uNj+vP0`{z{SXaI7<T`m>{kpjI ziCA#3yMw=Z?Vh#erd52Dz{QpKTE(CCNWpkRK;F99>k8K`E^qw-?-Nyvcn#~g^-k+O z*BjTH)<><6Tc5dp@cQia<Jae{pS`|t{o?gy>sPMdJ^cgz9pT+L$M;tA0q%QN<n}86 h`Q}%Lkldm2rmOkB<qKEyW&gnsUBkyYpat>we*pmr!(0FW diff --git a/Sources/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py b/Sources/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py index 34add348..f0c66ed9 100644 --- a/Sources/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py +++ b/Sources/pyOpenRPA/Orchestrator/RobotRDPActive/RobotRDPActive.py @@ -15,12 +15,18 @@ def RobotRDPActive(inGSettings): # Global error handler try: ######## Init the RDP List + lNewRDPList = {} for lRDPSessionKeyStrItem in mGSettingsRDPActiveDict["RDPList"]: lConfigurationItem = mGSettingsRDPActiveDict["RDPList"][lRDPSessionKeyStrItem] + lAddToNewRDPDict = True + if "SessionHex" not in lConfigurationItem: lAddToNewRDPDict = False # 2020.08.03 fix: Init the Session hex field. If no SessionHex - trash in structure - remove if lConfigurationItem["SessionHex"] is None or lConfigurationItem["SessionHex"] == "": # Minor fix - need for restore existed RDP sessions lConfigurationItem["SessionIsWindowExistBool"] = False # Flag that session is not started lConfigurationItem["SessionIsWindowResponsibleBool"] = False # Flag that session is not started lConfigurationItem["SessionHex"] = " 77777sdfsdf77777dsfdfsf77777777" # Flag that session is not started + if lAddToNewRDPDict: + lNewRDPList[lRDPSessionKeyStrItem] = lConfigurationItem + mGSettingsRDPActiveDict["RDPList"] = lNewRDPList # Update the structure ########## # Run monitor - main loop # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/Sources/pyOpenRPA/__init__.py b/Sources/pyOpenRPA/__init__.py index c16aa7c5..a9791f74 100644 --- a/Sources/pyOpenRPA/__init__.py +++ b/Sources/pyOpenRPA/__init__.py @@ -3,7 +3,7 @@ r""" The OpenRPA package (from UnicodeLabs) """ -__version__ = 'v1.1.14' +__version__ = 'v1.1.15' __all__ = [] __author__ = 'Ivan Maslov <ivan.maslov@unicodelabs.ru>' #from .Core import Robot \ No newline at end of file diff --git a/v1.1.15 b/v1.1.15 new file mode 100644 index 00000000..e69de29b